From f02b737a304bd1c5be411043025d633c2e5ead9c Mon Sep 17 00:00:00 2001 From: Mohamed Kassem Date: Wed, 23 Nov 2022 18:02:04 -0800 Subject: [PATCH] rules/klayout/drc: improve drc failure mode and documentation --- rules/klayout/drc/README.md | 18 +- rules/klayout/drc/gf180mcu.drc | 251 ++++++++++-------- rules/klayout/drc/gf180mcu_antenna.drc | 36 +-- rules/klayout/drc/gf180mcu_density.drc | 14 +- rules/klayout/drc/run_drc.py | 43 ++- rules/klayout/drc/testing/Makefile | 18 +- rules/klayout/drc/testing/run_regression.py | 138 ++++++---- .../klayout/drc/testing/run_sc_regression.py | 116 +++++--- .../drc/testing/run_switch_checking.py | 127 ++++++--- 9 files changed, 448 insertions(+), 313 deletions(-) diff --git a/rules/klayout/drc/README.md b/rules/klayout/drc/README.md index 2e225076..000809a3 100644 --- a/rules/klayout/drc/README.md +++ b/rules/klayout/drc/README.md @@ -16,10 +16,22 @@ Explains how to use the runset. ``` ## Rule Deck Usage +The `run_drc.py` script takes a gds file to run DRC rule decks of GF180 technology with switches to select subsets of all checks. + +### Requirements +Please make sure to define PDK_ROOT and PDK environment variables to make it work. Example definition would be to work for this repo, go to the `rules/klayout` directory and run: +```bash +export PDK_ROOT=`pwd` +export PDK="drc" +``` +Also, please make sure to install the required python packages at `../requirements.test.txt` by using +```bash +pip install -r ../requirements.test.txt +``` -The `run_drc.py` script takes a gds file to run DRC rule decks of GF180 technology with switches to select subsets of all checks. -### **Switches** +### Switches +The list of switches used for running DRC: 1. **FEOL** : Default is on. Use it for checking Front End Of Line layers (wells, diffusion, polys, contacts). 2. **BEOL** : Default is on. Use it for checking Back End Of Line layers (metal layers, top metal layer, vias). @@ -44,7 +56,7 @@ The `run_drc.py` script takes a gds file to run DRC rule decks of GF180 technolo Example: ```bash - python3 run_drc.py --path=testing/switch_checking/switch_checking.gds --thr=16 --run_mode=flat --gf180mcu=A --antenna --no_offgrid + python3 run_drc.py --path=testing/switch_checking/simple_por.gds.gz --thr=16 --run_mode=flat --gf180mcu=A --antenna --no_offgrid ``` ### Options diff --git a/rules/klayout/drc/gf180mcu.drc b/rules/klayout/drc/gf180mcu.drc index 1e5a87a5..a319c712 100644 --- a/rules/klayout/drc/gf180mcu.drc +++ b/rules/klayout/drc/gf180mcu.drc @@ -49,16 +49,25 @@ if $report logger.info("GF180MCU Klayout DRC runset output at: %s" % [$report]) report("DRC Run Report at", $report) else - logger.info("GF180MCU Klayout DRC runset output at default location." % [File.join(File.dirname(RBA::CellView::active.filename), "gf180_drc.lyrdb").path]) + logger.info("GF180MCU Klayout DRC runset output at default location." % [File.join(File.dirname(RBA::CellView::active.filename), "gf180_drc.lyrdb")]) report("DRC Run Report at", File.join(File.dirname(RBA::CellView::active.filename), "gf180_drc.lyrdb")) end if $thr - logger.info("Number of threads to use %s" % [$thr]) threads($thr) + logger.info("Number of threads to use %s" % [$thr]) else - logger.info("Number of threads to use 16") - threads(16) + threads(%x("nproc")) + logger.info("Number of threads to use #{%x("nproc")}") +end + +#=== PRINT DETAILS === +if $verbose == "true" + logger.info("Verbose mode: #{$verbose}") + verbose(true) +else + verbose(false) + logger.info("Verbose mode: false") end # === TILING MODE === @@ -95,112 +104,112 @@ end # run_mode logger.info("Read in polygons from layers.") -comp = polygons(22 , 0 ) -dnwell = polygons(12 , 0 ) -nwell = polygons(21 , 0 ) -lvpwell = polygons(204, 0 ) -dualgate = polygons(55 , 0 ) -poly2 = polygons(30 , 0 ) -nplus = polygons(32 , 0 ) -pplus = polygons(31 , 0 ) -sab = polygons(49 , 0 ) -esd = polygons(24 , 0 ) -contact = polygons(33 , 0 ) -metal1 = polygons(34 , 0 ) -via1 = polygons(35 , 0 ) -metal2 = polygons(36 , 0 ) -via2 = polygons(38 , 0 ) -metal3 = polygons(42 , 0 ) -via3 = polygons(40 , 0 ) -metal4 = polygons(46 , 0 ) -via4 = polygons(41 , 0 ) -metal5 = polygons(81 , 0 ) -via5 = polygons(82 , 0 ) -metaltop = polygons(53 , 0 ) -pad = polygons(37 , 0 ) -resistor = polygons(62 , 0 ) -fhres = polygons(227, 0 ) -fusetop = polygons(75 , 0 ) -fusewindow_d = polygons(96 , 1 ) -polyfuse = polygons(220, 0 ) -mvsd = polygons(210, 0 ) -mvpsd = polygons(11 , 39) -nat = polygons(5 , 0 ) -comp_dummy = polygons(22 , 4 ) -poly2_dummy = polygons(30 , 4 ) -metal1_dummy = polygons(34 , 4 ) -metal2_dummy = polygons(36 , 4 ) -metal3_dummy = polygons(42 , 4 ) -metal4_dummy = polygons(46 , 4 ) -metal5_dummy = polygons(81 , 4 ) -metaltop_dummy = polygons(53 , 4 ) -comp_label = polygons(22 , 10) -poly2_label = polygons(30 , 10) -metal1_label = polygons(34 , 10) -metal2_label = polygons(36 , 10) -metal3_label = polygons(42 , 10) -metal4_label = polygons(46 , 10) -metal5_label = polygons(81 , 10) -metaltop_label = polygons(53 , 10) -metal1_slot = polygons(34 , 3 ) -metal2_slot = polygons(36 , 3 ) -metal3_slot = polygons(42 , 3 ) -metal4_slot = polygons(46 , 3 ) -metal5_slot = polygons(81 , 3 ) -metaltop_slot = polygons(53 , 3 ) -ubmpperi = polygons(183, 0 ) -ubmparray = polygons(184, 0 ) -ubmeplate = polygons(185, 0 ) -schottky_diode = polygons(241, 0 ) -zener = polygons(178, 0 ) -res_mk = polygons(110, 5 ) -opc_drc = polygons(124, 5 ) -ndmy = polygons(111, 5 ) -pmndmy = polygons(152, 5 ) -v5_xtor = polygons(112, 1 ) -cap_mk = polygons(117, 5 ) -mos_cap_mk = polygons(166, 5 ) -ind_mk = polygons(151, 5 ) -diode_mk = polygons(115, 5 ) -drc_bjt = polygons(127, 5 ) -lvs_bjt = polygons(118, 5 ) -mim_l_mk = polygons(117, 10) -latchup_mk = polygons(137, 5 ) -guard_ring_mk = polygons(167, 5 ) -otp_mk = polygons(173, 5 ) -mtpmark = polygons(122, 5 ) -neo_ee_mk = polygons(88 , 17) -sramcore = polygons(108, 5 ) -lvs_rf = polygons(100, 5 ) -lvs_drain = polygons(100, 7 ) -ind_mk = polygons(151, 5 ) -hvpolyrs = polygons(123, 5 ) -lvs_io = polygons(119, 5 ) -probe_mk = polygons(13 , 17) -esd_mk = polygons(24 , 5 ) -lvs_source = polygons(100, 8 ) -well_diode_mk = polygons(153, 51) -ldmos_xtor = polygons(226, 0 ) -plfuse = polygons(125, 5 ) -efuse_mk = polygons(80 , 5 ) -mcell_feol_mk = polygons(11 , 17) -ymtp_mk = polygons(86 , 17) -dev_wf_mk = polygons(128, 17) -metal1_blk = polygons(34 , 5 ) -metal2_blk = polygons(36 , 5 ) -metal3_blk = polygons(42 , 5 ) -metal4_blk = polygons(46 , 5 ) -metal5_blk = polygons(81 , 5 ) -metalt_blk = polygons(53 , 5 ) -pr_bndry = polygons(0 , 0 ) -mdiode = polygons(116, 5 ) -metal1_res = polygons(110, 11) -metal2_res = polygons(110, 12) -metal3_res = polygons(110, 13) -metal4_res = polygons(110, 14) -metal5_res = polygons(110, 15) -metal6_res = polygons(110, 16) -border = polygons(63 , 0 ) +comp = polygons(22 , 0 ).merged +dnwell = polygons(12 , 0 ).merged +nwell = polygons(21 , 0 ).merged +lvpwell = polygons(204, 0 ).merged +dualgate = polygons(55 , 0 ).merged +poly2 = polygons(30 , 0 ).merged +nplus = polygons(32 , 0 ).merged +pplus = polygons(31 , 0 ).merged +sab = polygons(49 , 0 ).merged +esd = polygons(24 , 0 ).merged +contact = polygons(33 , 0 ).merged +metal1 = polygons(34 , 0 ).merged +via1 = polygons(35 , 0 ).merged +metal2 = polygons(36 , 0 ).merged +via2 = polygons(38 , 0 ).merged +metal3 = polygons(42 , 0 ).merged +via3 = polygons(40 , 0 ).merged +metal4 = polygons(46 , 0 ).merged +via4 = polygons(41 , 0 ).merged +metal5 = polygons(81 , 0 ).merged +via5 = polygons(82 , 0 ).merged +metaltop = polygons(53 , 0 ).merged +pad = polygons(37 , 0 ).merged +resistor = polygons(62 , 0 ).merged +fhres = polygons(227, 0 ).merged +fusetop = polygons(75 , 0 ).merged +fusewindow_d = polygons(96 , 1 ).merged +polyfuse = polygons(220, 0 ).merged +mvsd = polygons(210, 0 ).merged +mvpsd = polygons(11 , 39).merged +nat = polygons(5 , 0 ).merged +comp_dummy = polygons(22 , 4 ).merged +poly2_dummy = polygons(30 , 4 ).merged +metal1_dummy = polygons(34 , 4 ).merged +metal2_dummy = polygons(36 , 4 ).merged +metal3_dummy = polygons(42 , 4 ).merged +metal4_dummy = polygons(46 , 4 ).merged +metal5_dummy = polygons(81 , 4 ).merged +metaltop_dummy = polygons(53 , 4 ).merged +comp_label = polygons(22 , 10).merged +poly2_label = polygons(30 , 10).merged +metal1_label = polygons(34 , 10).merged +metal2_label = polygons(36 , 10).merged +metal3_label = polygons(42 , 10).merged +metal4_label = polygons(46 , 10).merged +metal5_label = polygons(81 , 10).merged +metaltop_label = polygons(53 , 10).merged +metal1_slot = polygons(34 , 3 ).merged +metal2_slot = polygons(36 , 3 ).merged +metal3_slot = polygons(42 , 3 ).merged +metal4_slot = polygons(46 , 3 ).merged +metal5_slot = polygons(81 , 3 ).merged +metaltop_slot = polygons(53 , 3 ).merged +ubmpperi = polygons(183, 0 ).merged +ubmparray = polygons(184, 0 ).merged +ubmeplate = polygons(185, 0 ).merged +schottky_diode = polygons(241, 0 ).merged +zener = polygons(178, 0 ).merged +res_mk = polygons(110, 5 ).merged +opc_drc = polygons(124, 5 ).merged +ndmy = polygons(111, 5 ).merged +pmndmy = polygons(152, 5 ).merged +v5_xtor = polygons(112, 1 ).merged +cap_mk = polygons(117, 5 ).merged +mos_cap_mk = polygons(166, 5 ).merged +ind_mk = polygons(151, 5 ).merged +diode_mk = polygons(115, 5 ).merged +drc_bjt = polygons(127, 5 ).merged +lvs_bjt = polygons(118, 5 ).merged +mim_l_mk = polygons(117, 10).merged +latchup_mk = polygons(137, 5 ).merged +guard_ring_mk = polygons(167, 5 ).merged +otp_mk = polygons(173, 5 ).merged +mtpmark = polygons(122, 5 ).merged +neo_ee_mk = polygons(88 , 17).merged +sramcore = polygons(108, 5 ).merged +lvs_rf = polygons(100, 5 ).merged +lvs_drain = polygons(100, 7 ).merged +ind_mk = polygons(151, 5 ).merged +hvpolyrs = polygons(123, 5 ).merged +lvs_io = polygons(119, 5 ).merged +probe_mk = polygons(13 , 17).merged +esd_mk = polygons(24 , 5 ).merged +lvs_source = polygons(100, 8 ).merged +well_diode_mk = polygons(153, 51).merged +ldmos_xtor = polygons(226, 0 ).merged +plfuse = polygons(125, 5 ).merged +efuse_mk = polygons(80 , 5 ).merged +mcell_feol_mk = polygons(11 , 17).merged +ymtp_mk = polygons(86 , 17).merged +dev_wf_mk = polygons(128, 17).merged +metal1_blk = polygons(34 , 5 ).merged +metal2_blk = polygons(36 , 5 ).merged +metal3_blk = polygons(42 , 5 ).merged +metal4_blk = polygons(46 , 5 ).merged +metal5_blk = polygons(81 , 5 ).merged +metalt_blk = polygons(53 , 5 ).merged +pr_bndry = polygons(0 , 0 ).merged +mdiode = polygons(116, 5 ).merged +metal1_res = polygons(110, 11).merged +metal2_res = polygons(110, 12).merged +metal3_res = polygons(110, 13).merged +metal4_res = polygons(110, 14).merged +metal5_res = polygons(110, 15).merged +metal6_res = polygons(110, 16).merged +border = polygons(63 , 0 ).merged # ================= COUNT POLYGONS ================= poly_count = 0 @@ -474,7 +483,7 @@ logger.info("METAL_TOP Selected is %s" % [METAL_TOP]) if $metal_level METAL_LEVEL = $metal_level else - METAL_LEVEL = "6LM" + METAL_LEVEL = "5LM" end # METAL_LEVEL logger.info("METAL_STACK Selected is %s" % [METAL_LEVEL]) @@ -509,7 +518,7 @@ logger.info("Gold enabled %s" % [GOLD]) if $mim_option MIM_OPTION = $mim_option else - MIM_OPTION = "Nan" + MIM_OPTION = "B" end logger.info("MIM Option selected %s" % [MIM_OPTION]) @@ -836,7 +845,7 @@ nw1a_l1 = nwell.width(0.86.um, euclidian).polygons(0.001).overlapping(dualgate) nw1a_l1.output("NW.1a_5V", "NW.1a_5V : Min. Nwell Width (This is only for litho purpose on the generated area). : 0.86µm") nw1a_l1.forget -nw_1b = nwell.outside(dnwell).and(res_mk).not(comp).not(poly2) +nw_1b = nwell.outside(dnwell).and(res_mk) # Rule NW.1b_3.3V: Min. Nwell Width as a resistor (Outside DNWELL only). is 2µm logger.info("Executing rule NW.1b_3.3V") nw1b_l1 = nw_1b.width(2.um, euclidian).polygons(0.001).not_interacting(v5_xtor).not_interacting(dualgate) @@ -3071,6 +3080,12 @@ mt2a_l1 = metaltop.space(0.38.um, euclidian).polygons(0.001) mt2a_l1.output("MT.2a", "MT.2a : min. metaltop spacing : 0.38µm") mt2a_l1.forget +# Rule MT.2b: Space to wide Metal2 (length & width > 10um) is 0.5µm +logger.info("Executing rule MT.2b") +mt2b_l1 = metaltop.separation(metal2.not_interacting(metal2.edges.with_length(nil, 10.um)), 0.5.um, euclidian).polygons(0.001) +mt2b_l1.output("MT.2b", "MT.2b : Space to wide Metal2 (length & width > 10um) : 0.5µm") +mt2b_l1.forget + # Rule MT.4: Minimum MetalTop area is 0.5625µm² logger.info("Executing rule MT.4") mt4_l1 = metaltop.with_area(nil, 0.5625.um) @@ -3092,6 +3107,12 @@ mt2a_l1 = metaltop.space(0.46.um, euclidian).polygons(0.001) mt2a_l1.output("MT.2a", "MT.2a : min. metaltop spacing : 0.46µm") mt2a_l1.forget +# Rule MT.2b: Space to wide Metal2 (length & width > 10um) is 0.6µm +logger.info("Executing rule MT.2b") +mt2b_l1 = metaltop.separation(metaltop.not_interacting(metal2.edges.with_length(nil, 10.um)), 0.6.um, euclidian).polygons(0.001) +mt2b_l1.output("MT.2b", "MT.2b : Space to wide Metal2 (length & width > 10um) : 0.6µm") +mt2b_l1.forget + # Rule MT.4: Minimum MetalTop area is 0.5625µm² logger.info("Executing rule MT.4") mt4_l1 = metaltop.with_area(nil, 0.5625.um) @@ -3137,8 +3158,8 @@ mt305_l1 = top_metal.enclosing(top_via, 0.12.um, euclidian).polygons(0.001).or(t mt305_l1.output("MT30.5", "MT30.5 : Minimum thick MetalTop enclose underlying via (for example: via5 for 6LM case) [Outside Not Allowed].") mt305_l1.forget -mt30p6_cond = top_metal.drc( width <= 0.34.um) -mt30p6_eol = top_metal.edges.with_length(nil, 0.34.um).interacting(mt30p6_cond.first_edges).interacting(mt30p6_cond.second_edges).not(mt30p6_cond.first_edges).not(mt30p6_cond.second_edges) +mt30p6_cond = top_metal.drc( width < 2.5.um) +mt30p6_eol = top_metal.edges.with_length(nil, 2.5.um).interacting(mt30p6_cond.first_edges).interacting(mt30p6_cond.second_edges).not(mt30p6_cond.first_edges).not(mt30p6_cond.second_edges) # Rule MT30.6: Thick MetalTop end-of-line (width <2.5um) enclose underlying via (for example: via5 for 6LM case) [Outside Not Allowed]. logger.info("Executing rule MT30.6") mt306_l1 = mt30p6_eol.enclosing(top_via.edges,0.25.um, projection).polygons(0.001).or(top_via.not_inside(top_metal)) diff --git a/rules/klayout/drc/gf180mcu_antenna.drc b/rules/klayout/drc/gf180mcu_antenna.drc index 8e27e221..1dfc04a3 100644 --- a/rules/klayout/drc/gf180mcu_antenna.drc +++ b/rules/klayout/drc/gf180mcu_antenna.drc @@ -87,24 +87,24 @@ end # run_mode #--------------------------------------- LAYER DEFINITIONS -------------------------------------------- #====================================================================================================== -comp = polygons(22, 0) -dualgate = polygons(55, 0) -poly2 = polygons(30, 0) -nplus = polygons(32, 0) -pplus = polygons(31, 0) -contact = polygons(33, 0) -metal1 = polygons(34, 0) -via1 = polygons(35, 0) -metal2 = polygons(36, 0) -via2 = polygons(38, 0) -metal3 = polygons(42, 0) -via3 = polygons(40, 0) -metal4 = polygons(46, 0) -via4 = polygons(41, 0) -metal5 = polygons(81, 0) -via5 = polygons(82, 0) -metaltop = polygons(53, 0) -fusetop = polygons(75, 0) +comp = polygons(22, 0).merged +dualgate = polygons(55, 0).merged +poly2 = polygons(30, 0).merged +nplus = polygons(32, 0).merged +pplus = polygons(31, 0).merged +contact = polygons(33, 0).merged +metal1 = polygons(34, 0).merged +via1 = polygons(35, 0).merged +metal2 = polygons(36, 0).merged +via2 = polygons(38, 0).merged +metal3 = polygons(42, 0).merged +via3 = polygons(40, 0).merged +metal4 = polygons(46, 0).merged +via4 = polygons(41, 0).merged +metal5 = polygons(81, 0).merged +via5 = polygons(82, 0).merged +metaltop = polygons(53, 0).merged +fusetop = polygons(75, 0).merged #====================================================================================================== #--------------------------------------- LAYER DERIVATIONS -------------------------------------------- diff --git a/rules/klayout/drc/gf180mcu_density.drc b/rules/klayout/drc/gf180mcu_density.drc index 3956b42e..12b6e861 100644 --- a/rules/klayout/drc/gf180mcu_density.drc +++ b/rules/klayout/drc/gf180mcu_density.drc @@ -87,13 +87,13 @@ end # run_mode #--------------------------------------- LAYER DEFINITIONS -------------------------------------------- #====================================================================================================== -poly2 = polygons(30, 0) -metal1 = polygons(34, 0) -metal2 = polygons(36, 0) -metal3 = polygons(42, 0) -metal4 = polygons(46, 0) -metal5 = polygons(81, 0) -metaltop = polygons(53, 0) +poly2 = polygons(30, 0).merged +metal1 = polygons(34, 0).merged +metal2 = polygons(36, 0).merged +metal3 = polygons(42, 0).merged +metal4 = polygons(46, 0).merged +metal5 = polygons(81, 0).merged +metaltop = polygons(53, 0).merged #====================================================================================================== #--------------------------------------- LAYER DERIVATIONS -------------------------------------------- diff --git a/rules/klayout/drc/run_drc.py b/rules/klayout/drc/run_drc.py index 9dde969d..eda2862f 100644 --- a/rules/klayout/drc/run_drc.py +++ b/rules/klayout/drc/run_drc.py @@ -70,13 +70,10 @@ def get_results(rule_deck,rules,lyrdb, type): logging.info("Klayout GDS DRC Clean\n") def get_top_cell_names(gds_path): - # klayout -b -r script.rb -rd infile=./layouts/caravel.gds.gz - - pdk_root = os.environ['PDK_ROOT'] - pdk = os.environ['PDK'] + curr_path = os.path.dirname(os.path.abspath(__file__)) top_cell_names = list() - proc = subprocess.Popen(['klayout','-b', '-r', f"{pdk_root}/{pdk}/utils/get_top_cell_names.rb", "-rd", "infile={}".format(gds_path)], stdout=subprocess.PIPE) + proc = subprocess.Popen(['klayout','-b', '-r', f"{curr_path}/utils/get_top_cell_names.rb", "-rd", "infile={}".format(gds_path)], stdout=subprocess.PIPE) while True: line = proc.stdout.readline() if not line: @@ -88,15 +85,14 @@ def get_top_cell_names(gds_path): def clean_gds_from_many_top_cells(gds_path, topcell): # klayout -b -r keep_single_top_cell.rb -rd infile=./layouts/caravel.gds.gz -rd topcell=chip_io -rd outfile=test.gds.gz - pdk_root = os.environ['PDK_ROOT'] - pdk = os.environ['PDK'] + curr_path = os.path.dirname(os.path.abspath(__file__)) basename = os.path.basename(gds_path) dirname = os.path.dirname(gds_path) main_file_name = basename.split(".")[0] output_file_path = os.path.join(dirname, "{}_single_top.gds.gz".format(main_file_name)) - proc = subprocess.Popen(['klayout','-b', '-r', f"{pdk_root}/{pdk}/utils/keep_single_top_cell.rb", "-rd", "infile={}".format(gds_path), "-rd", "topcell={}".format(topcell), "-rd", "outfile={}".format(output_file_path)], stdout=subprocess.PIPE) + proc = subprocess.Popen(['klayout','-b', '-r', f"{curr_path}/utils/keep_single_top_cell.rb", "-rd", "infile={}".format(gds_path), "-rd", "topcell={}".format(topcell), "-rd", "outfile={}".format(output_file_path)], stdout=subprocess.PIPE) while True: line = proc.stdout.readline() @@ -114,14 +110,16 @@ def main(): logging.error("The input GDS file path doesn't exist, please recheck.") exit() - # Env. variables - pdk_root = os.environ['PDK_ROOT'] - pdk = os.environ['PDK'] - + curr_path = os.path.dirname(os.path.abspath(__file__)) + # ======= Checking Klayout version ======= klayout_v_ = os.popen("klayout -v").read() klayout_v_ = klayout_v_.split("\n")[0] - klayout_v = int (klayout_v_.split(".") [-1]) + if klayout_v_ == "": + logging.error("Klayout is not found. Please make sure klayout is installed.") + exit(1) + else: + klayout_v = int (klayout_v_.split(".") [-1]) logging.info(f"Your Klayout version is: {klayout_v_}" ) @@ -188,28 +186,28 @@ def main(): # Running DRC using klayout if (arguments["--antenna_only"]) and not (arguments["--density_only"]): logging.info(f"Running Global Foundries 180nm MCU antenna checks on design {name_clean} on cell {topcell_name}:") - os.system(f"klayout -b -r $PDK_ROOT/$PDK/gf180mcu_antenna.drc -rd input={path} -rd report={name_clean}_antenna_gf{arguments['--gf180mcu']}_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") + os.system(f"klayout -b -r {curr_path}/gf180mcu_antenna.drc -rd input={path} -rd report={name_clean}_antenna_gf{arguments['--gf180mcu']}_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") elif (arguments["--density_only"]) and not (arguments["--antenna_only"]): logging.info(f"Running Global Foundries 180nm MCU density checks on design {name_clean} on cell {topcell_name}:") - os.system(f"klayout -b -r $PDK_ROOT/$PDK/gf180mcu_density.drc -rd input={path} -rd report={name_clean}_density_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") + os.system(f"klayout -b -r {curr_path}/gf180mcu_density.drc -rd input={path} -rd report={name_clean}_density_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") elif arguments["--antenna_only"] and arguments["--density_only"]: logging.info(f"Running Global Foundries 180nm MCU antenna checks on design {name_clean} on cell {topcell_name}:") - os.system(f"klayout -b -r $PDK_ROOT/$PDK/gf180mcu_antenna.drc -rd input={path} -rd report={name_clean}_antenna_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") + os.system(f"klayout -b -r {curr_path}/gf180mcu_antenna.drc -rd input={path} -rd report={name_clean}_antenna_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") logging.info(f"Running Global Foundries 180nm MCU density checks on design {name_clean} on cell {topcell_name}:") - os.system(f"klayout -b -r $PDK_ROOT/$PDK/gf180mcu_density.drc -rd input={path} -rd report={name_clean}_density_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") + os.system(f"klayout -b -r {curr_path}/gf180mcu_density.drc -rd input={path} -rd report={name_clean}_density_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") else: logging.info(f"Running main Global Foundries 180nm MCU runset on design {name_clean} on cell {topcell_name}:") - os.system(f"klayout -b -r $PDK_ROOT/$PDK/gf180mcu.drc -rd input={path} -rd report={name_clean}_main_drc_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") + os.system(f"klayout -b -r {curr_path}/gf180mcu.drc -rd input={path} -rd report={name_clean}_main_drc_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") if arguments["--antenna"]: logging.info(f"Running Global Foundries 180nm MCU antenna checks on design {name_clean} on cell {topcell_name}:") - os.system(f"klayout -b -r $PDK_ROOT/$PDK/gf180mcu_antenna.drc -rd input={path} -rd report={name_clean}_antenna_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") + os.system(f"klayout -b -r {curr_path}/gf180mcu_antenna.drc -rd input={path} -rd report={name_clean}_antenna_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") if arguments["--density"]: logging.info(f"Running Global Foundries 180nm MCU density checks on design {name_clean} on cell {topcell_name}:") - os.system(f"klayout -b -r $PDK_ROOT/$PDK/gf180mcu_density.drc -rd input={path} -rd report={name_clean}_density_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") + os.system(f"klayout -b -r {curr_path}/gf180mcu_density.drc -rd input={path} -rd report={name_clean}_density_gf{arguments['--gf180mcu']}.lyrdb -rd thr={thrCount} {switches}") else: logging.error("Script only support gds files, please select one") exit() @@ -217,8 +215,9 @@ def main(): logging.error("No provided gds file, please add one") exit() - # ======================== Reporting results ======================== - rule_deck_path = [f"{pdk_root}/{pdk}/gf180mcu.drc" , f"{pdk_root}/{pdk}/gf180mcu_antenna.drc" , f"{pdk_root}/{pdk}/gf180mcu_density.drc"] + # ======================== Reporting results ======================== + curr_path = os.path.dirname(os.path.abspath(__file__)) + rule_deck_path = [f"{curr_path}/gf180mcu.drc" , f"{curr_path}/gf180mcu_antenna.drc" , f"{curr_path}/gf180mcu_density.drc"] # Get rules from rule deck rules = [] diff --git a/rules/klayout/drc/testing/Makefile b/rules/klayout/drc/testing/Makefile index df53ce7a..58c843c0 100644 --- a/rules/klayout/drc/testing/Makefile +++ b/rules/klayout/drc/testing/Makefile @@ -29,13 +29,16 @@ test-DRC: test-DRC-switch test-DRC-SC test-DRC-main #================================= # --------- test-DRC-SC ---------- #================================= +.ONESHELL: +print_klayout_version: + @klayout -v .ONESHELL: test-DRC-SC : test-DRC-gf180mcu_fd_ip_sram test-DRC-gf180mcu_fd_io test-DRC-gf180mcu_fd_sc_mcu7t5v0 test-DRC-gf180mcu_fd_sc_mcu9t5v0 @echo "========== DRC-DRC-SC is done ==========" .ONESHELL: -test-DRC-gf180mcu_fd_ip_sram: +test-DRC-gf180mcu_fd_ip_sram: print_klayout_version @cd $(Testing_DIR) @echo "========== DRC-gf180mcu_fd_ip_sram testing ==========" @ python3 run_sc_regression.py \ @@ -45,7 +48,7 @@ test-DRC-gf180mcu_fd_ip_sram: --path=ip_testcases/gf180mcu_fd_ip_sram__sram512x8m8wm1.gds .ONESHELL: -test-DRC-gf180mcu_fd_io: +test-DRC-gf180mcu_fd_io: print_klayout_version @cd $(Testing_DIR) @echo "========== DRC-gf180mcu_fd_io testing ==========" @ python3 run_sc_regression.py \ @@ -54,13 +57,13 @@ test-DRC-gf180mcu_fd_io: --path=ip_testcases/gf180mcu_fd_io_5lm.gds .ONESHELL: -test-DRC-gf180mcu_fd_sc_mcu7t5v0: +test-DRC-gf180mcu_fd_sc_mcu7t5v0: print_klayout_version @cd $(Testing_DIR) @echo "========== DRC-gf180mcu_fd_sc_mcu7t5v0 testing ==========" @ python3 run_sc_regression.py --path=sc_testcases/gf180mcu_fd_sc_mcu7t5v0.gds .ONESHELL: -test-DRC-gf180mcu_fd_sc_mcu9t5v0: +test-DRC-gf180mcu_fd_sc_mcu9t5v0: print_klayout_version @cd $(Testing_DIR) @echo "========== DRC-gf180mcu_fd_sc_mcu9t5v0 testing ==========" @ python3 run_sc_regression.py --path=sc_testcases/gf180mcu_fd_sc_mcu9t5v0.gds @@ -70,7 +73,7 @@ test-DRC-gf180mcu_fd_sc_mcu9t5v0: #================================= .ONESHELL: -test-DRC-main : test-DRC-foundry-Option-1 test-DRC-foundry-Option-2 test-DRC-foundry-Option-3 test-DRC-foundry-Option-4 test-DRC-Option-A test-DRC-Option-B test-DRC-Option-C +test-DRC-main : print_klayout_version test-DRC-foundry-Option-1 test-DRC-foundry-Option-2 test-DRC-foundry-Option-3 test-DRC-foundry-Option-4 test-DRC-Option-A test-DRC-Option-B test-DRC-Option-C @echo "========== DRC-Regression is done ==========" .ONESHELL: @@ -143,11 +146,10 @@ test-DRC-Option-C: #================================= .ONESHELL: -test-DRC-switch: +test-DRC-switch: print_klayout_version @cd $(Testing_DIR) @echo "========== DRC-Switch testing ==========" - @python3 run_switch_checking.py - @rm -rf pattern.csv + python3 run_switch_checking.py && rm -rf pattern.csv #=============================== # --------- Clean ALL ---------- diff --git a/rules/klayout/drc/testing/run_regression.py b/rules/klayout/drc/testing/run_regression.py index 5582f727..14f5c69f 100644 --- a/rules/klayout/drc/testing/run_regression.py +++ b/rules/klayout/drc/testing/run_regression.py @@ -32,6 +32,8 @@ --run_name= Select your run name. """ +from subprocess import check_call + from docopt import docopt import os import datetime @@ -39,21 +41,21 @@ import csv import time import re - -from sympy import arg +import pandas as pd +import logging def call_regression(rule_deck_path, path): + t0 = time.time() marker_gen = [] - rules =[] - ly = 0 + rules = [] + ly = 0 # set folder structure for each run x = f"{datetime.datetime.now()}" x = x.replace(" ", "_") - name_ext = str(rule_deck_path).replace(".drc","").split("/")[-1] - os.system(f"mkdir run_{x}_{name_ext}") + check_call(f"mkdir run_{x}_{name_ext}", shell=True) # Get the same rule deck with gds output with open(rule_deck_path, 'r') as f: @@ -86,13 +88,13 @@ def call_regression(rule_deck_path, path): iname = path.split('.gds') if '/' in iname[0]: file = iname[0].split('/') - os.system(f"klayout -b -r run_{x}_{name_ext}/markers.drc -rd input={path} -rd report={file[-1]}.lyrdb -rd thr={thrCount} {switches} ") + check_call(f"klayout -b -r run_{x}_{name_ext}/markers.drc -rd input={path} -rd report={file[-1]}.lyrdb -rd thr={thrCount} {switches} ", shell=True) else: - os.system(f"klayout -b -r run_{x}_{name_ext}/markers.drc -rd input={path} -rd report={iname[0]}.lyrdb -rd thr={thrCount} {switches} ") + check_call(f"klayout -b -r run_{x}_{name_ext}/markers.drc -rd input={path} -rd report={iname[0]}.lyrdb -rd thr={thrCount} {switches} ", shell=True) marker_gen = [] - ly = 0 - remove_if = False + ly = 0 + remove_if = False # Get the small rule deck with gds output with open(rule_deck_path, 'r') as f: @@ -132,27 +134,32 @@ def call_regression(rule_deck_path, path): marker_file.close() # Generate databases - os.system(f"klayout -b -r run_{x}_{name_ext}/regression.drc -rd input=run_{x}_{name_ext}/merged_output.gds -rd report=database.lyrdb -rd thr={thrCount} {switches}") - - # Cleaning directories - # os.system(f"rm -rf regression.drc markers.drc merged_output.gds") + check_call(f"klayout -b -r run_{x}_{name_ext}/regression.drc -rd input=run_{x}_{name_ext}/merged_output.gds -rd report=database.lyrdb -rd thr={thrCount} {switches}", shell=True) mytree = ET.parse(f'run_{x}_{name_ext}/database.lyrdb') myroot = mytree.getroot() - report = [["Rule_Name", "False_Positive", "False_Negative", "Total_Violations", "Not_Tested"]] + report = [["Rule_Name", "False_Positive", "False_Negative", "Total_Violations", "Not_Tested" , "Known_issues"]] conc = [["Rule_Name", "Status"]] - passed = 0 - failed = 0 - not_tested_counter = 0 + # Initial counters + not_tested_c = 0 + passed = 0 + failed = 0 + known_issues = 0 + + # Get known issues list + known_issues_df = pd.read_csv("DRC_Known_issues.csv", usecols=["Rule "]) + known_issues_list = known_issues_df["Rule "].tolist() + for lrule in rules: + # Values of each rule in results - falseNeg = 0 - falsePos = 0 - not_tested = 0 - not_run = 1 - total = 0 + not_run = 1 + not_tested = 0 + falseNeg = 0 + falsePos = 0 + total_violation = 0 # Check whether the rule was run or not for z in myroot[5]: @@ -170,20 +177,24 @@ def call_regression(rule_deck_path, path): falsePos += 1 if f"'{lrule}_false_negative'" == f"{z[1].text}":#(f"{rule}" in f"{z[1].text}") and ("negative" in f"{z[1].text}"): falseNeg += 1 - - total = falsePos + falseNeg - report.append([lrule, falsePos, falseNeg, total, not_tested]) - if total == 0 and not_tested == 0: + + # failed rules + total_violation = falsePos + falseNeg + + report.append([lrule, falsePos, falseNeg, total_violation, not_tested , known_issues]) + if total_violation == 0 and not_tested == 0: conc.append([lrule, "Pass"]) passed += 1 elif not_tested != 0: conc.append([lrule, "Not_Tested"]) - not_tested_counter += 1 + not_tested_c += 1 + elif lrule in known_issues_list: + conc.append([lrule, "known_issues"]) + known_issues +=1 else: conc.append([lrule, "Fail"]) failed += 1 - # Create final reports files with open(f'run_{x}_{name_ext}/report.csv', 'w') as f: writer = csv.writer(f, delimiter=',') @@ -193,26 +204,52 @@ def call_regression(rule_deck_path, path): writer = csv.writer(f, delimiter=',') writer.writerows(conc) - print(f"\n Total rules in {name_ext} for {path}: {len(conc)} \n") - print(f"{passed} passed rules ") - print(f"{failed} failed rules ") - print(f"{not_tested_counter} Not tested rules \n") + logging.info(f"========= Summary Report in {name_ext} for {path} =========") + logging.info(f"Total rules: {len(conc)}") + logging.info(f"{not_tested_c} not tested rules") + logging.info(f"{passed} passed rules ") + logging.info(f"{known_issues} known_issues rules ") + logging.info(f"{failed} failed rules ") + t1 = time.time() - - print(f'Execution time {t1 - t0} s') + logging.info(f'Execution time {t1 - t0} s') + logging.info(f"===============================================================") + + if failed > 0: + logging.info("Some unit tests has failed. Failing regression:") + df = pd.read_csv(f'run_{x}_{name_ext}/conclusion.csv') + pd.set_option('display.max_columns', None) + pd.set_option('display.max_rows', None) + pd.set_option("max_colwidth", None) + pd.set_option('display.width', 1000) + logging.info("## Full report:") + print(df) + + print("\n") + logging.info("## Only failed") + print(df[df["Status"] == "Fail"]) + + exit(1) return report if __name__ == "__main__": - sub_report = [] - full_report = [] + # logs format + logging.basicConfig(level=logging.DEBUG, format=f"%(asctime)s | %(levelname)-7s | %(message)s", datefmt='%d-%b-%Y %H:%M:%S') + + # Initial values for DRC report + sub_report = [] + full_report = [] final_report = [["Rule_Name", "Status"]] - final_detailed_report = [["Rule_Name", "False_Postive", "False_Negative", "Total_Violations", "Not_Tested"]] + final_detailed_report = [["Rule_Name", "False_Postive", "False_Negative", "Total_Violations", "Not_Tested" , "Known_issues"]] + # Start of execution time t0 = time.time() + # Reading docopt arguments args = docopt(__doc__) + # DRC switches definitions switches = '' if args["--no_feol"]: @@ -230,41 +267,33 @@ def call_regression(rule_deck_path, path): else: switches = switches + '-rd offgrid=true ' - # Getting threads count - if args["--thr"]: - thrCount = args["--thr"] - else: - thrCount = os.cpu_count() * 2 - if args["--metal_top"] in ["6K" , "9K", "11K", "30K"]: switches = switches + f'-rd metal_top={args["--metal_top"]} ' else: - print("Top metal thickness allowed values are (6K , 9K, 11K, 30K) only") + logging.error("Top metal thickness allowed values are (6K , 9K, 11K, 30K) only") exit() if args["--mim_option"] in ["A" , "B", "NO_MIM"]: switches = switches + f'-rd mim_option={args["--mim_option"]} ' else: - print("MIM capacitor option allowed values are (A, B, NO_MIM) only") + logging.error("MIM capacitor option allowed values are (A, B, NO_MIM) only") exit() if args["--metal_level"] in ["2" , "3", "4", "5" , "6"]: switches = switches + f'-rd metal_level={args["--metal_level"]}LM ' else: - print("The number of metal layers in stack allowed values are (2, 3, 4, 5, 6) only") + logging.error("The number of metal layers in stack allowed values are (2, 3, 4, 5, 6) only") exit() - - os.system("klayout -v") - + # Starting regression + # Getting drc rule decks rule_deck_path = [] - files = os.listdir(f'..') - for file in files: if ".drc" in file: rule_deck_path.append(f"../{file}") + # Running regression for path in args["--path"]: for runset in rule_deck_path: return_report = call_regression(runset, path) @@ -272,7 +301,6 @@ def call_regression(rule_deck_path, path): full_report.append(sub_report) sub_report = [] - rule_num = 0 for rule in full_report[0]: @@ -306,6 +334,6 @@ def call_regression(rule_deck_path, path): writer = csv.writer(f, delimiter=',') writer.writerows(final_report) + # End of execution time t1 = time.time() - - print(f'Total execution time {t1 - t0} s') + logging.error(f'Total execution time {t1 - t0} s') diff --git a/rules/klayout/drc/testing/run_sc_regression.py b/rules/klayout/drc/testing/run_sc_regression.py index 8bd49343..60d48322 100644 --- a/rules/klayout/drc/testing/run_sc_regression.py +++ b/rules/klayout/drc/testing/run_sc_regression.py @@ -25,15 +25,17 @@ --thr= The number of threads used in run. """ +from subprocess import check_call + from docopt import docopt import os import xml.etree.ElementTree as ET -import csv +import pandas as pd import time import concurrent.futures -def get_results(rule_deck_path, iname, file, x): +def get_results(results_file_path): """ The function takes the path of the rule deck and the path of the gds file as inputs, then it runs the DRC using the rule deck on the gds file and returns the name of the gds file, the name of the @@ -43,13 +45,12 @@ def get_results(rule_deck_path, iname, file, x): :param path: The path to the GDS file you want to check :return: the file name, the rule deck name, the violated rules and the status of the file. """ - mytree = ET.parse(f"{iname[0]}_{x}.lyrdb") + mytree = ET.parse(results_file_path) myroot = mytree.getroot() violated = [] for lrule in rules: - # Loop on database to get the violations of required rule for z in myroot[7]: if f"'{lrule}'" == f"{z[1].text}": @@ -57,15 +58,11 @@ def get_results(rule_deck_path, iname, file, x): break if len(violated) > 0: - status = "Not_clean" + status = "not_clean" else: - status = "Clean" - - rule_deck = rule_deck_path.split("../") + status = "clean" - print(f"\n The file {file[-1]} has violated rule deck {rule_deck[-1]} in: {len(violated)} rule/s which are: {violated} \n") - print(f" The file {file[-1]} with rule deck {rule_deck[-1]} is {status} \n") - return file[-1], rule_deck[-1], ' '.join(violated), status + return ' '.join(violated), len(violated), status def call_simulator(arg): """ @@ -76,7 +73,13 @@ def call_simulator(arg): :param path: The path to the GDS file you want to simulate :param thrCount: number of threads to use """ - os.system(arg) + try: + check_call(arg, shell=True) + return True + except Exception as e: + print("## Run generated exception: ", arg) + print(str(e)) + return False if __name__ == "__main__": @@ -86,7 +89,7 @@ def call_simulator(arg): if os.path.exists("sc"): os.system("rm -rf sc") - report = [["File_Name", "Rule Deck", "Rules", "Status"]] + report_header = ["File_Name", "Rule Deck", "Rules", "Status"] # Get threads count if args["--thr"]: @@ -94,11 +97,9 @@ def call_simulator(arg): else: thrCount = os.cpu_count() * 2 - os.system("klayout -v") - rule_deck_path = [] rules = [] - runs = [] + runs = dict() files = os.listdir('..') @@ -136,14 +137,18 @@ def call_simulator(arg): iname = path.split('.gds') file = iname[0].split('/') if "sc" in file[-1]: + print("## Extracting top cells for : ", path) os.system(f"klayout -b -r split_gds.rb -rd input={path}") print(f"File {path} was splitted into multiple gds files") + else: + print(f"## {path} Not a standard cells library GDS. We will use the full GDS. No splitting required.") + ## If this was a standard cells library, get the new list of files. if os.path.exists("sc"): other_files = os.listdir('sc') args["--path"] = args["--path"] + other_files - # Get input data for simulator + # Get input data for klayout runs and create the run list. for path in args["--path"]: x = 0 iname = path.split('.gds') @@ -156,34 +161,53 @@ def call_simulator(arg): file = iname[0].split('/') for runset in rule_deck_path: arg = f"klayout -b -r {runset} -rd input={path} -rd report={file[-1]}_{x}.lyrdb -rd thr={thrCount} -rd conn_drc=true" - runs.append(arg) - x += 1 - - # Run DRC - with concurrent.futures.ProcessPoolExecutor(max_workers=thrCount) as executor: - for run in runs: - executor.submit(call_simulator, run) - - # Get results - for path in args["--path"]: - x = 0 - iname = path.split('.gds') - file = iname[0].split('/') - if "sc" in file[-1]: - continue - if "/" not in path: - path = f"sc/{path}" - iname = path.split('.gds') - file = iname[0].split('/') - for runset in rule_deck_path: - if os.path.exists(f"{iname[0]}_{x}.lyrdb"): - file, rule_deck, violations, status = get_results(runset, iname, file, x) - report.append([file, rule_deck, violations, status]) + run_id = f"{runset}|{path}|{file[-1]}_{x}.lyrdb" + runs[run_id] = arg x += 1 - with open(f'sc_drc_report.csv', 'w') as f: - writer = csv.writer(f, delimiter=',') - writer.writerows(report) + print("## We will run klayout {} runs".format(len(runs))) + + # Run All DRC runs. + report = [] + + with concurrent.futures.ThreadPoolExecutor(max_workers=thrCount) as executor: + # Start the load operations and mark each future with its URL + future_to_run_id = {executor.submit(call_simulator, runs[r]): r for r in runs} + for future in concurrent.futures.as_completed(future_to_run_id): + run_id = future_to_run_id[future] + run_info = run_id.split("|") + info = dict() + info["file_path"] = run_info[1] + info["runset"] = run_info[0] + info["results_file"] = run_info[2] + results_db_path = os.path.join(os.path.dirname(os.path.abspath(run_info[1])), run_info[2]) + + try: + run_status = future.result() + if run_status: + violators, num_rules_violated, db_status = get_results(results_db_path) + info["rules_violated"] = violators + info["num_rules_violated"] = num_rules_violated + info["run_status"] = db_status + else: + info["rules_violated"] = "" + info["num_rules_violated"] = 0 + info["run_status"] = "run throws exception" + + report.append(info) + + except Exception as exc: + print('%r generated an exception: %s' % (run_id, exc)) + + + df = pd.DataFrame(report) + df.to_csv("ip_cells_run_report.csv", index=False) + pd.set_option('display.max_columns', None) + pd.set_option('display.max_rows', None) + pd.set_option("max_colwidth", None) + pd.set_option('display.width', 1000) + + print(df) if os.path.exists("split_gds.rb"): os.remove("split_gds.rb") @@ -193,3 +217,9 @@ def call_simulator(arg): t1 = time.time() print(f'Total execution time {t1 - t0} s') + + if (df["run_status"] != "clean").any(): + print("## Run failed as there are failures or violations.") + exit(1) + else: + print("## Run passed with no violations or failures.") diff --git a/rules/klayout/drc/testing/run_switch_checking.py b/rules/klayout/drc/testing/run_switch_checking.py index 87ef89ae..8c6dc600 100644 --- a/rules/klayout/drc/testing/run_switch_checking.py +++ b/rules/klayout/drc/testing/run_switch_checking.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os +from subprocess import check_call import pandas as pd import subprocess import logging @@ -22,38 +22,39 @@ # ====================== Generation of switches patterns ==================================== def gen_patterns(): + test_name = ["no_feol", + "no_beol", + "base_A_variant", + "base_B_variant", + "base_C_variant", + "connectivity_enabled", + "no_offgrid"] + switches = [ "--gf180mcu=A --no_feol" , "--gf180mcu=A --no_beol" ,"--gf180mcu=A" ,"--gf180mcu=B" ,"--gf180mcu=C" ,"--gf180mcu=C --connectivity" - , "--gf180mcu=A --no_offgrid" + ,"--gf180mcu=A --no_offgrid" ] - expected_logs = [ "FEOL is disabled" + expected_logs = [ + "FEOL is disabled" ,"BEOL is disabled" ,"METAL_TOP Selected is 30K" + ,"METAL_TOP Selected is 11K" ,"METAL_TOP Selected is 9K" - , "connectivity rules are disabled" - , "Offgrid enabled false" + ,"connectivity rules are enabled" + ,"Offgrid enabled false" ] - mydataset = {'test_case_name': [ switches[0]+"_switch" ], - 'switches': [ switches[0]], - 'expected_output':['pass'], - 'expected_logs':[ expected_logs[0]] - } - - initial_dataframe = pd.DataFrame(mydataset) - - for i,switch in enumerate(switches [1: len(switches)]): - - add_list = {'test_case_name':switch+"_switch",'switches':switch,'expected_output':'pass', 'expected_logs':expected_logs[i]} - initial_dataframe = initial_dataframe.append(add_list,ignore_index=True) - - initial_dataframe[["test_case_name","switches","expected_output","expected_logs"]].to_csv("pattern.csv",index=False) + ds = {"test_case_name": test_name, "switches": switches, "expected_logs": expected_logs} + pdf = pd.DataFrame(ds) + pdf["expected_output"] = "pass" + pdf.to_csv("pattern.csv",index=False) + print(pdf) # ================================== Running patterns and checking results ================================== @@ -69,47 +70,89 @@ def run_switches(): LOG_FILE_NAME = "run.log" def run_test_case(test_case, switches, expected_result,expected_log): - test_case = test_case.split("--")[-1] - run_directory = RUN_DIRECTORY + test_case - os.system('mkdir -p {0}'.format(run_directory)) - - log_file = run_directory + "/" + test_case + ".log" - os.system('python3 ../run_drc.py --path=switch_checking/simple_por.gds.gz {0} >> {1}'.format(switches, log_file)) - - check_output_log(log_file, expected_result) - - def check_output_log(log_file, expected_result): + try: + test_case = test_case.split("--")[-1] + run_directory = RUN_DIRECTORY + test_case + check_call(f'mkdir -p {run_directory}', shell=True) + + log_file = run_directory + "/" + test_case + ".log" + check_call(f'python3 ../run_drc.py --path=switch_checking/simple_por.gds.gz {switches} >> {log_file}', shell=True) + + res = check_output_log(test_case, log_file, expected_result, expected_log) + return res + except Exception as e: + print("## Run generated exception: ", test_case) + print(str(e)) + return False + + def check_output_log(test_case, log_file, expected_result, expected_log): proc = subprocess.Popen('grep -irn "{0}" {1}'.format(expected_log, log_file), shell=True, stdout=subprocess.PIPE) (out, err) = proc.communicate() out = str(out.decode()) - if len(out) == 0 and expected_result == PASS: - logging.info("Test case passed.") - os.system('echo Test case passed. >> {0}{1}'.format(RUN_DIRECTORY,LOG_FILE_NAME)) + if len(out) > 0 and expected_result == PASS: + logging.info("## Found the expected log: " + str(out)) + logging.info("Test case passed. {}".format(test_case)) + logging.info(f'echo Test case passed. >> {RUN_DIRECTORY}{LOG_FILE_NAME}') + return True + elif len(out) < 1 and expected_result == PASS: + logging.error("Test case passed as expected but didn't generate the expected log: {}".format(test_case)) + return False else: - logging.error("Test case FAILD check log file for more info.") - os.system('echo Test case FAILD check log file for more info. >> {0}{1}'.format(RUN_DIRECTORY,LOG_FILE_NAME)) + logging.error("Test case FAILD check log file for more info. {}".format(test_case)) + logging.error(f'echo Test case FAILD check log file for more info. >> {RUN_DIRECTORY}{LOG_FILE_NAME}') + return False ## read testcases from csv file df = pd.read_csv(csv_path) + pd.set_option('display.max_columns', None) + pd.set_option('display.max_rows', None) + pd.set_option("max_colwidth", None) + pd.set_option('display.width', 1000) + + print("## All Switch checking patterns:") + print(df) + test_cases = df["test_case_name"] switches = df["switches"] expected_results = df["expected_output"] expected_log = df["expected_logs"] ## init log file - os.system('mkdir -p {0}'.format(RUN_DIRECTORY)) - os.system('touch {0}temp.log'.format(RUN_DIRECTORY)) - os.system('mv -f {0}temp.log {0}{1}'.format(RUN_DIRECTORY,LOG_FILE_NAME)) + check_call( + f''' +mkdir -p {RUN_DIRECTORY} +touch {RUN_DIRECTORY}temp.log +mv -f {RUN_DIRECTORY}temp.log {RUN_DIRECTORY}{LOG_FILE_NAME} +''', + shell=True + ) + + run_status = [] + sw_failed = False for test_case_index in range(0,len(test_cases)): test_case_switches = switches[test_case_index] running_msg = "\nrunning: "+test_cases[test_case_index] + " with switches: " + test_case_switches + "..." logging.info(running_msg) - os.system('echo "{0}" >> {1}{2}'.format(running_msg, RUN_DIRECTORY, LOG_FILE_NAME)) + check_call(f'echo "{running_msg}" >> {RUN_DIRECTORY}{LOG_FILE_NAME}', shell=True) + + case_res = run_test_case(test_cases[test_case_index], test_case_switches, expected_results[test_case_index],expected_log[test_case_index]) + run_status.append(case_res) - run_test_case(test_cases[test_case_index], test_case_switches, expected_results[test_case_index],expected_log[test_case_index]) + if not case_res: + sw_failed = True + df["run_status"] = run_status + + if sw_failed: + logging.error("## One of the test cases failed. Exit with failure:") + print(df) + exit(1) + else: + logging.info("## All test cases passed.") + logging.info("## All Switch checking patterns:") + print(df) def main(): @@ -117,13 +160,13 @@ def main(): logging.basicConfig(level=logging.DEBUG, format=f"%(asctime)s | %(levelname)-7s | %(message)s", datefmt='%d-%b-%Y %H:%M:%S') # Remove old files - os.system("rm -rf patterns.csv switch_checking/run_switch_results") + check_call("rm -rf patterns.csv switch_checking/run_switch_results", shell=True) # Gen. patterns - gen_patterns () + gen_patterns() # Run. switches - run_switches () + run_switches() if __name__ == "__main__": main()