Coverage for klayout_pex/kpex_cli.py: 68%
469 statements
« prev ^ index » next coverage.py v7.6.9, created at 2024-12-17 17:24 +0000
« prev ^ index » next coverage.py v7.6.9, created at 2024-12-17 17:24 +0000
1#! /usr/bin/env python3
2#
3# --------------------------------------------------------------------------------
4# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl
5# Johannes Kepler University, Institute for Integrated Circuits.
6#
7# This file is part of KPEX
8# (see https://github.com/martinjankoehler/klayout-pex).
9#
10# This program is free software: you can redistribute it and/or modify
11# it under the terms of the GNU General Public License as published by
12# the Free Software Foundation, either version 3 of the License, or
13# (at your option) any later version.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License
21# along with this program. If not, see <http://www.gnu.org/licenses/>.
22# SPDX-License-Identifier: GPL-3.0-or-later
23# --------------------------------------------------------------------------------
24#
26import argparse
27from datetime import datetime
28from enum import StrEnum
29from functools import cached_property
30import logging
31import os
32import os.path
34import rich.console
35import rich.markdown
36import rich.text
37from rich_argparse import RichHelpFormatter
38import shlex
39import shutil
40import sys
41from typing import *
43import klayout.db as kdb
44import klayout.rdb as rdb
46from .fastercap.fastercap_input_builder import FasterCapInputBuilder
47from .fastercap.fastercap_model_generator import FasterCapModelGenerator
48from .fastercap.fastercap_runner import run_fastercap, fastercap_parse_capacitance_matrix
49from .fastcap.fastcap_runner import run_fastcap, fastcap_parse_capacitance_matrix
50from .klayout.lvs_runner import LVSRunner
51from .klayout.lvsdb_extractor import KLayoutExtractionContext, KLayoutExtractedLayerInfo
52from .klayout.netlist_expander import NetlistExpander
53from .klayout.netlist_csv import NetlistCSVWriter
54from .klayout.netlist_reducer import NetlistReducer
55from .klayout.repair_rdb import repair_rdb
56from .log import (
57 LogLevel,
58 set_log_level,
59 register_additional_handler,
60 deregister_additional_handler,
61 # console,
62 # debug,
63 info,
64 warning,
65 subproc,
66 error,
67 rule
68)
69from .magic.magic_runner import MagicPEXMode, run_magic, prepare_magic_script
70from .pdk_config import PDKConfig
71from .rcx25.extractor import RCExtractor, ExtractionResults
72from .tech_info import TechInfo
73from .util.multiple_choice import MultipleChoicePattern
74from .util.argparse_helpers import render_enum_help, true_or_false
75from .version import __version__
78# ------------------------------------------------------------------------------------
80PROGRAM_NAME = "kpex"
83class ArgumentValidationError(Exception):
84 pass
87class InputMode(StrEnum):
88 LVSDB = "lvsdb"
89 GDS = "gds"
92# TODO: this should be externally configurable
93class PDK(StrEnum):
94 IHP_SG13G2 = 'ihp_sg13g2'
95 SKY130A = 'sky130A'
97 @cached_property
98 def config(self) -> PDKConfig:
99 # NOTE: installation paths of resources in the distribution wheel differes from source repo
100 base_dir = os.path.dirname(os.path.realpath(__file__))
101 if os.path.isdir(os.path.join(base_dir, '..', '.git')): # in source repo
102 base_dir = os.path.dirname(base_dir)
103 tech_pb_json_dir = os.path.join(base_dir, 'build')
104 else: # site-packages/klayout_pex -> site-packages/klayout_pex_protobuf
105 tech_pb_json_dir = os.path.join(os.path.dirname(base_dir), 'klayout_pex_protobuf')
107 match self:
108 case PDK.IHP_SG13G2:
109 return PDKConfig(
110 name=self,
111 pex_lvs_script_path=os.path.join(base_dir, 'pdk', self, 'libs.tech', 'kpex', 'sg130g2.lvs'),
112 tech_pb_json_path=os.path.join(tech_pb_json_dir, f"{self}_tech.pb.json")
113 )
114 case PDK.SKY130A:
115 return PDKConfig(
116 name=self,
117 pex_lvs_script_path=os.path.join(base_dir, 'pdk', self, 'libs.tech', 'kpex', 'sky130.lvs'),
118 tech_pb_json_path=os.path.join(tech_pb_json_dir, f"{self}_tech.pb.json")
119 )
122class KpexCLI:
123 @staticmethod
124 def parse_args(arg_list: List[str] = None) -> argparse.Namespace:
125 # epilog = f"See '{PROGRAM_NAME} <subcommand> -h' for help on subcommand"
126 epilog = """
127| Variable | Example | Description |
128| -------- | -------------------- | --------------------------------------- |
129| PDKPATH | (e.g. $HOME/.volare) | Optional (required for default magicrc) |
130| PDK | (e.g. sky130A) | Optional (required for default magicrc) |
131"""
132 epilog_md = rich.console.Group(
133 rich.text.Text('Environmental variables:', style='argparse.groups'),
134 rich.markdown.Markdown(epilog, style='argparse.text')
135 )
136 main_parser = argparse.ArgumentParser(description=f"{PROGRAM_NAME}: "
137 f"KLayout-integrated Parasitic Extraction Tool",
138 epilog=epilog_md,
139 add_help=False,
140 formatter_class=RichHelpFormatter)
142 group_special = main_parser.add_argument_group("Special options")
143 group_special.add_argument("--help", "-h", action='help', help="show this help message and exit")
144 group_special.add_argument("--version", "-v", action='version', version=f'{PROGRAM_NAME} {__version__}')
145 group_special.add_argument("--log_level", dest='log_level', default='subprocess',
146 help=render_enum_help(topic='log_level', enum_cls=LogLevel))
147 group_special.add_argument("--threads", dest='num_threads', type=int,
148 default=os.cpu_count() * 4,
149 help="number of threads (e.g. for FasterCap) (default is %(default)s)")
150 group_special.add_argument('--klayout', dest='klayout_exe_path', default='klayout',
151 help="Path to klayout executable (default is '%(default)s')")
153 group_pex = main_parser.add_argument_group("Parasitic Extraction Setup")
154 group_pex.add_argument("--pdk", dest="pdk", required=True, type=PDK,
155 help=render_enum_help(topic='pdk', enum_cls=PDK))
157 group_pex.add_argument("--out_dir", "-o", dest="output_dir_base_path", default="output",
158 help="Output directory path (default is '%(default)s')")
160 group_pex_input = main_parser.add_argument_group("Parasitic Extraction Input",
161 description="Either LVS is run, or an existing LVSDB is used")
162 group_pex_input.add_argument("--gds", "-g", dest="gds_path", help="GDS path (for LVS)")
163 group_pex_input.add_argument("--schematic", "-s", dest="schematic_path",
164 help="Schematic SPICE netlist path (for LVS)")
165 group_pex_input.add_argument("--lvsdb", "-l", dest="lvsdb_path", help="KLayout LVSDB path (bypass LVS)")
166 group_pex_input.add_argument("--cell", "-c", dest="cell_name", default=None,
167 help="Cell (default is the top cell)")
169 group_pex_input.add_argument("--cache-lvs", dest="cache_lvs",
170 type=true_or_false, default=True,
171 help="Used cached LVSDB (for given input GDS) (default is %(default)s)")
172 group_pex_input.add_argument("--cache-dir", dest="cache_dir_path", default=None,
173 help="Path for cached LVSDB (default is .kpex_cache within --out_dir)")
175 group_pex_options = main_parser.add_argument_group("Parasitic Extraction Options")
176 group_pex_options.add_argument("--blackbox", dest="blackbox_devices",
177 type=true_or_false, default=False, # TODO: in the future this should be True by default
178 help="Blackbox devices like MIM/MOM caps, as they are handled by SPICE models "
179 "(default is %(default)s for testing now)")
180 group_pex_options.add_argument("--fastercap", dest="run_fastercap",
181 type=true_or_false, default=False,
182 help="Run FasterCap engine (default is %(default)s)")
183 group_pex_options.add_argument("--fastcap", dest="run_fastcap",
184 type=true_or_false, default=False,
185 help="Run FastCap2 engine (default is %(default)s)")
186 group_pex_options.add_argument("--magic", dest="run_magic",
187 type=true_or_false, default=False,
188 help="Run MAGIC engine (default is %(default)s)")
189 group_pex_options.add_argument("--2.5D", dest="run_2_5D",
190 type=true_or_false, default=False,
191 help="Run 2.5D analytical engine (default is %(default)s)")
193 group_fastercap = main_parser.add_argument_group("FasterCap options")
194 group_fastercap.add_argument("--k_void", "-k", dest="k_void",
195 type=float, default=3.9,
196 help="Dielectric constant of void (default is %(default)s)")
197 group_fastercap.add_argument("--delaunay_amax", "-a", dest="delaunay_amax",
198 type=float, default=50,
199 help="Delaunay triangulation maximum area (default is %(default)s)")
200 group_fastercap.add_argument("--delaunay_b", "-b", dest="delaunay_b",
201 type=float, default=0.5,
202 help="Delaunay triangulation b (default is %(default)s)")
203 group_fastercap.add_argument("--geo_check", dest="geometry_check",
204 type=true_or_false, default=False,
205 help=f"Validate geometries before passing to FasterCap "
206 f"(default is False)")
207 group_fastercap.add_argument("--diel", dest="dielectric_filter",
208 type=str, default="all",
209 help=f"Comma separated list of dielectric filter patterns. "
210 f"Allowed patterns are: (none, all, -dielname1, +dielname2) "
211 f"(default is %(default)s)")
213 group_fastercap.add_argument("--tolerance", dest="fastercap_tolerance",
214 type=float, default=0.05,
215 help="FasterCap -aX error tolerance (default is %(default)s)")
216 group_fastercap.add_argument("--d_coeff", dest="fastercap_d_coeff",
217 type=float, default=0.5,
218 help=f"FasterCap -d direct potential interaction coefficient to mesh refinement "
219 f"(default is %(default)s)")
220 group_fastercap.add_argument("--mesh", dest="fastercap_mesh_refinement_value",
221 type=float, default=0.5,
222 help="FasterCap -m Mesh relative refinement value (default is %(default)s)")
223 group_fastercap.add_argument("--ooc", dest="fastercap_ooc_condition",
224 type=float, default=2,
225 help="FasterCap -f out-of-core free memory to link memory condition "
226 "(0 = don't go OOC, default is %(default)s)")
227 group_fastercap.add_argument("--auto_precond", dest="fastercap_auto_preconditioner",
228 type=true_or_false, default=True,
229 help=f"FasterCap -ap Automatic preconditioner usage (default is %(default)s)")
230 group_fastercap.add_argument("--galerkin", dest="fastercap_galerkin_scheme",
231 action='store_true', default=False,
232 help=f"FasterCap -g Use Galerkin scheme (default is %(default)s)")
233 group_fastercap.add_argument("--jacobi", dest="fastercap_jacobi_preconditioner",
234 action='store_true', default=False,
235 help="FasterCap -pj Use Jacobi preconditioner (default is %(default)s)")
237 PDKPATH = os.environ.get('PDKPATH', None)
238 default_magicrc_path = \
239 None if PDKPATH is None \
240 else os.path.abspath(f"{PDKPATH}/libs.tech/magic/{os.environ['PDK']}.magicrc")
241 group_magic = main_parser.add_argument_group("MAGIC options")
242 group_magic.add_argument('--magicrc', dest='magicrc_path', default=default_magicrc_path,
243 help=f"Path to magicrc configuration file (default is '%(default)s')")
244 group_magic.add_argument("--magic_mode", dest='magic_pex_mode', default='CC',
245 help=render_enum_help(topic='log_level', enum_cls=MagicPEXMode))
246 group_magic.add_argument("--magic_cthresh", dest="magic_cthresh",
247 type=float, default=0.01,
248 help="Threshold for ignored parasitic capacitances (default is %(default)s)")
249 group_magic.add_argument("--magic_rthresh", dest="magic_rthresh",
250 type=float, default=100.0,
251 help="Threshold for ignored parasitic resistances (default is %(default)s)")
252 group_magic.add_argument("--magic_halo", dest="magic_halo",
253 type=float, default=None,
254 help="Custom sidewall halo distance in µm "
255 "(MAGIC command: extract halo <value>) (default is no custom halo)")
256 group_magic.add_argument('--magic_exe', dest='magic_exe_path', default='magic',
257 help="Path to magic executable (default is '%(default)s')")
259 if arg_list is None:
260 arg_list = sys.argv[1:]
261 args = main_parser.parse_args(arg_list)
262 return args
264 @staticmethod
265 def validate_args(args: argparse.Namespace):
266 found_errors = False
268 pdk_config: PDKConfig = args.pdk.config
269 args.tech_pbjson_path = pdk_config.tech_pb_json_path
270 args.lvs_script_path = pdk_config.pex_lvs_script_path
272 if not os.path.isfile(args.klayout_exe_path):
273 path = shutil.which(args.klayout_exe_path)
274 if not path:
275 error(f"Can't locate KLayout executable at {args.klayout_exe_path}")
276 found_errors = True
278 if not os.path.isfile(args.tech_pbjson_path):
279 error(f"Can't read technology file at path {args.tech_pbjson_path}")
280 found_errors = True
282 rule('Input Layout')
284 # input mode: LVS or existing LVSDB?
285 if args.gds_path:
286 info(f"GDS input file passed, running in LVS mode")
287 args.input_mode = InputMode.GDS
288 if not os.path.isfile(args.gds_path):
289 error(f"Can't read GDS file (LVS input) at path {args.gds_path}")
290 found_errors = True
291 else:
292 args.layout = kdb.Layout()
293 args.layout.read(args.gds_path)
295 top_cells = args.layout.top_cells()
297 if args.cell_name: # explicit user-specified cell name
298 args.effective_cell_name = args.cell_name
300 found_cell: Optional[kdb.Cell] = None
301 for cell in args.layout.cells('*'):
302 if cell.name == args.effective_cell_name:
303 found_cell = cell
304 break
305 if not found_cell:
306 error(f"Could not find cell {args.cell_name} in GDS {args.gds_path}")
307 found_errors = True
309 is_only_top_cell = len(top_cells) == 1 and top_cells[0].name == args.cell_name
310 if is_only_top_cell:
311 info(f"Found cell {args.cell_name} in GDS {args.gds_path} (only top cell)")
312 else: # there are other cells => extract the top cell to a tmp layout
313 args.effective_gds_path = os.path.join(args.output_dir_path, f"{args.cell_name}_exported.gds.gz")
314 info(f"Found cell {args.cell_name} in GDS {args.gds_path}, "
315 f"but it is not the only top cell, "
316 f"so layout is exported to: {args.effective_gds_path}")
318 found_cell.write(args.effective_gds_path)
319 else: # find top cell
320 if len(top_cells) == 1:
321 args.effective_cell_name = top_cells[0].name
322 info(f"No explicit top cell specified, using top cell '{args.effective_cell_name}'")
323 else:
324 args.effective_cell_name = 'TOP'
325 error(f"Could not determine the default top cell in GDS {args.gds_path}, "
326 f"there are multiple: {', '.join([c.name for c in top_cells])}. "
327 f"Use --cell to specify the cell")
328 found_errors = True
330 args.effective_gds_path = args.gds_path
331 else:
332 info(f"LVSDB input file passed, bypassing LVS")
333 args.input_mode = InputMode.LVSDB
334 if not hasattr(args, 'lvsdb_path'):
335 error(f"LVSDB input path not specified (argument --lvsdb)")
336 found_errors = True
337 elif not os.path.isfile(args.lvsdb_path):
338 error(f"Can't read KLayout LVSDB file at path {args.lvsdb_path}")
339 found_errors = True
340 else:
341 lvsdb = kdb.LayoutVsSchematic()
342 lvsdb.read(args.lvsdb_path)
343 top_cell: kdb.Cell = lvsdb.internal_top_cell()
344 args.effective_cell_name = top_cell.name
346 def input_file_stem(path: str):
347 # could be *.gds, or *.gds.gz, so remove all extensions
348 return os.path.basename(path).split(sep='.')[0]
350 if hasattr(args, 'effective_cell_name'):
351 run_dir_id: str
352 match args.input_mode:
353 case InputMode.GDS:
354 run_dir_id = f"{input_file_stem(args.gds_path)}__{args.effective_cell_name}"
355 case InputMode.LVSDB:
356 run_dir_id = f"{input_file_stem(args.lvsdb_path)}__{args.effective_cell_name}"
357 case _:
358 raise NotImplementedError(f"Unknown input mode {args.input_mode}")
360 args.output_dir_path = os.path.join(args.output_dir_base_path, run_dir_id)
361 os.makedirs(args.output_dir_path, exist_ok=True)
362 if args.input_mode == InputMode.GDS:
363 if args.schematic_path:
364 args.effective_schematic_path = args.schematic_path
365 if not os.path.isfile(args.schematic_path):
366 error(f"Can't read schematic (LVS input) at path {args.schematic_path}")
367 found_errors = True
368 else:
369 info(f"LVS input schematic not specified (argument --schematic), using dummy schematic")
370 args.effective_schematic_path = os.path.join(args.output_dir_path,
371 f"{args.effective_cell_name}_dummy_schematic.spice")
372 with open(args.effective_schematic_path, 'w') as f:
373 f.writelines([
374 f".subckt {args.effective_cell_name} VDD VSS",
375 '.ends',
376 '.end'
377 ])
379 try:
380 args.log_level = LogLevel[args.log_level.upper()]
381 except KeyError:
382 error(f"Requested log level {args.log_level.lower()} does not exist, "
383 f"{render_enum_help(topic='log_level', enum_cls=LogLevel, print_default=False)}")
384 found_errors = True
386 try:
387 pattern_string: str = args.dielectric_filter
388 args.dielectric_filter = MultipleChoicePattern(pattern=pattern_string)
389 except ValueError as e:
390 error("Failed to parse --diel arg", e)
391 found_errors = True
393 # at least one engine must be activated
395 if not (args.run_magic or args.run_fastcap or args.run_fastercap or args.run_2_5D):
396 error("No PEX engines activated")
397 engine_help = """
398| Argument | Description |
399| -------------- | --------------------------------------- |
400| --fastercap y | Run kpex/FasterCap engine |
401| --2.5D y | Run kpex/2.5D engine |
402| --magic y | Run MAGIC engine |
403"""
404 subproc(f"\nPlease activate one or more engines using the arguments:\n{engine_help}")
405 found_errors = True
407 if args.cache_dir_path is None:
408 args.cache_dir_path = os.path.join(args.output_dir_base_path, '.kpex_cache')
410 if found_errors:
411 raise ArgumentValidationError("Argument validation failed")
413 def build_fastercap_input(self,
414 args: argparse.Namespace,
415 pex_context: KLayoutExtractionContext,
416 tech_info: TechInfo) -> str:
417 rule('Process stackup')
418 fastercap_input_builder = FasterCapInputBuilder(pex_context=pex_context,
419 tech_info=tech_info,
420 k_void=args.k_void,
421 delaunay_amax=args.delaunay_amax,
422 delaunay_b=args.delaunay_b)
423 gen: FasterCapModelGenerator = fastercap_input_builder.build()
425 rule('FasterCap Input File Generation')
426 faster_cap_input_dir_path = os.path.join(args.output_dir_path, 'FasterCap_Input_Files')
427 os.makedirs(faster_cap_input_dir_path, exist_ok=True)
429 lst_file = gen.write_fastcap(output_dir_path=faster_cap_input_dir_path, prefix='FasterCap_Input_')
431 rule('STL File Generation')
432 geometry_dir_path = os.path.join(args.output_dir_path, 'Geometries')
433 os.makedirs(geometry_dir_path, exist_ok=True)
434 gen.dump_stl(output_dir_path=geometry_dir_path, prefix='')
436 if args.geometry_check:
437 rule('Geometry Validation')
438 gen.check()
440 return lst_file
443 def run_fastercap_extraction(self,
444 args: argparse.Namespace,
445 pex_context: KLayoutExtractionContext,
446 lst_file: str):
447 rule('FasterCap Execution')
448 info(f"Configure number of OpenMP threads (environmental variable OMP_NUM_THREADS) as {args.num_threads}")
449 os.environ['OMP_NUM_THREADS'] = f"{args.num_threads}"
451 exe_path = "FasterCap"
452 log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Output.txt")
453 raw_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Result_Matrix_Raw.csv")
454 avg_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Result_Matrix_Avg.csv")
455 expanded_netlist_path = os.path.join(args.output_dir_path,
456 f"{args.effective_cell_name}_FasterCap_Expanded_Netlist.cir")
457 expanded_netlist_csv_path = os.path.join(args.output_dir_path,
458 f"{args.effective_cell_name}_FasterCap_Expanded_Netlist.csv")
459 reduced_netlist_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Reduced_Netlist.cir")
461 run_fastercap(exe_path=exe_path,
462 lst_file_path=lst_file,
463 log_path=log_path,
464 tolerance=args.fastercap_tolerance,
465 d_coeff=args.fastercap_d_coeff,
466 mesh_refinement_value=args.fastercap_mesh_refinement_value,
467 ooc_condition=args.fastercap_ooc_condition,
468 auto_preconditioner=args.fastercap_auto_preconditioner,
469 galerkin_scheme=args.fastercap_galerkin_scheme,
470 jacobi_preconditioner=args.fastercap_jacobi_preconditioner)
472 cap_matrix = fastercap_parse_capacitance_matrix(log_path)
473 cap_matrix.write_csv(raw_csv_path)
475 cap_matrix = cap_matrix.averaged_off_diagonals()
476 cap_matrix.write_csv(avg_csv_path)
478 netlist_expander = NetlistExpander()
479 expanded_netlist = netlist_expander.expand(
480 extracted_netlist=pex_context.lvsdb.netlist(),
481 top_cell_name=pex_context.top_cell.name,
482 cap_matrix=cap_matrix,
483 blackbox_devices=args.blackbox_devices
484 )
486 # create a nice CSV for reports, useful for spreadsheets
487 netlist_csv_writer = NetlistCSVWriter()
488 netlist_csv_writer.write_csv(netlist=expanded_netlist,
489 top_cell_name=pex_context.top_cell.name,
490 output_path=expanded_netlist_csv_path)
492 rule("Extended netlist (CSV format):")
493 with open(expanded_netlist_csv_path, 'r') as f:
494 for line in f.readlines():
495 subproc(line[:-1]) # abusing subproc, simply want verbatim
496 rule()
498 info(f"Wrote expanded netlist CSV to: {expanded_netlist_csv_path}")
500 spice_writer = kdb.NetlistSpiceWriter()
501 spice_writer.use_net_names = True
502 spice_writer.with_comments = False
503 expanded_netlist.write(expanded_netlist_path, spice_writer)
504 info(f"Wrote expanded netlist to: {expanded_netlist_path}")
506 netlist_reducer = NetlistReducer()
507 reduced_netlist = netlist_reducer.reduce(netlist=expanded_netlist,
508 top_cell_name=pex_context.top_cell.name)
509 reduced_netlist.write(reduced_netlist_path, spice_writer)
510 info(f"Wrote reduced netlist to: {reduced_netlist_path}")
512 self._fastercap_extracted_csv_path = expanded_netlist_csv_path
514 def run_magic_extraction(self,
515 args: argparse.Namespace):
516 if args.input_mode != InputMode.GDS:
517 error(f"MAGIC engine only works with GDS input mode"
518 f" (currently {args.input_mode})")
519 return
521 magic_run_dir = os.path.join(args.output_dir_path, f"magic_{args.magic_pex_mode}")
522 magic_log_path = os.path.join(magic_run_dir, f"{args.effective_cell_name}_MAGIC_CC_Output.txt")
523 magic_script_path = os.path.join(magic_run_dir, f"{args.effective_cell_name}_MAGIC_CC_Script.tcl")
525 output_netlist_path = f"{magic_run_dir}/{args.effective_cell_name}.pex.spice"
527 os.makedirs(magic_run_dir, exist_ok=True)
529 prepare_magic_script(gds_path=args.effective_gds_path,
530 cell_name=args.effective_cell_name,
531 run_dir_path=magic_run_dir,
532 script_path=magic_script_path,
533 output_netlist_path=output_netlist_path,
534 pex_mode=args.magic_pex_mode,
535 c_threshold=args.magic_cthresh,
536 r_threshold=args.magic_rthresh,
537 halo=args.magic_halo)
539 run_magic(exe_path=args.magic_exe_path,
540 magicrc_path=args.magicrc_path,
541 script_path=magic_script_path,
542 log_path=magic_log_path)
544 subproc(f"SPICE netlist saved at: {output_netlist_path}")
545 rule("MAGIC PEX SPICE netlist")
546 with open(output_netlist_path, 'r') as f:
547 subproc(f.read())
548 rule()
550 def run_fastcap_extraction(self,
551 args: argparse.Namespace,
552 pex_context: KLayoutExtractionContext,
553 lst_file: str):
554 rule('FastCap2 Execution')
555 exe_path = "fastcap"
556 log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Output.txt")
557 raw_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Result_Matrix_Raw.csv")
558 avg_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Result_Matrix_Avg.csv")
559 expanded_netlist_path = os.path.join(args.output_dir_path,
560 f"{args.effective_cell_name}_FastCap2_Expanded_Netlist.cir")
561 reduced_netlist_path = os.path.join(args.output_dir_path,
562 f"{args.effective_cell_name}_FastCap2_Reduced_Netlist.cir")
564 run_fastcap(exe_path=exe_path,
565 lst_file_path=lst_file,
566 log_path=log_path)
568 cap_matrix = fastcap_parse_capacitance_matrix(log_path)
569 cap_matrix.write_csv(raw_csv_path)
571 cap_matrix = cap_matrix.averaged_off_diagonals()
572 cap_matrix.write_csv(avg_csv_path)
574 netlist_expander = NetlistExpander()
575 expanded_netlist = netlist_expander.expand(
576 extracted_netlist=pex_context.lvsdb.netlist(),
577 top_cell_name=pex_context.top_cell.name,
578 cap_matrix=cap_matrix,
579 blackbox_devices=args.blackbox_devices
580 )
582 spice_writer = kdb.NetlistSpiceWriter()
583 spice_writer.use_net_names = True
584 spice_writer.with_comments = False
585 expanded_netlist.write(expanded_netlist_path, spice_writer)
586 info(f"Wrote expanded netlist to: {expanded_netlist_path}")
588 netlist_reducer = NetlistReducer()
589 reduced_netlist = netlist_reducer.reduce(netlist=expanded_netlist,
590 top_cell_name=pex_context.top_cell.name)
591 reduced_netlist.write(reduced_netlist_path, spice_writer)
592 info(f"Wrote reduced netlist to: {reduced_netlist_path}")
594 def run_kpex_2_5d_engine(self,
595 args: argparse.Namespace,
596 pex_context: KLayoutExtractionContext,
597 tech_info: TechInfo,
598 report_path: str,
599 netlist_csv_path: str):
600 extractor = RCExtractor(pex_context=pex_context,
601 tech_info=tech_info,
602 report_path=report_path)
603 extraction_results = extractor.extract()
605 with open(netlist_csv_path, 'w') as f:
606 f.write('Device;Net1;Net2;Capacitance [fF]\n')
607 # f.write('Device;Net1;Net2;Capacitance [F];Capacitance [fF]\n')
608 summary = extraction_results.summarize()
609 for idx, (key, cap_value) in enumerate(summary.capacitances.items()):
610 # f.write(f"C{idx + 1};{key.net1};{key.net2};{cap_value / 1e15};{round(cap_value, 3)}\n")
611 f.write(f"C{idx + 1};{key.net1};{key.net2};{round(cap_value, 3)}\n")
613 rule("kpex/2.5D extracted netlist (CSV format):")
614 with open(netlist_csv_path, 'r') as f:
615 for line in f.readlines():
616 subproc(line[:-1]) # abusing subproc, simply want verbatim
618 rule("Extracted netlist CSV")
619 subproc(f"{netlist_csv_path}")
622 # NOTE: there was a KLayout bug that some of the categories were lost,
623 # so that the marker browser could not load the report file
624 try:
625 report = rdb.ReportDatabase('')
626 report.load(report_path) # try loading rdb
627 except Exception as e:
628 rule("Repair broken marker DB")
629 warning(f"Detected KLayout bug: RDB can't be loaded due to exception {e}")
630 repair_rdb(report_path)
632 return extraction_results
634 def setup_logging(self, args: argparse.Namespace):
635 def register_log_file_handler(log_path: str,
636 formatter: Optional[logging.Formatter]) -> logging.Handler:
637 handler = logging.FileHandler(log_path)
638 handler.setLevel(LogLevel.SUBPROCESS)
639 if formatter:
640 handler.setFormatter(formatter)
641 register_additional_handler(handler)
642 return handler
644 def reregister_log_file_handler(handler: logging.Handler,
645 log_path: str,
646 formatter: Optional[logging.Formatter]):
647 deregister_additional_handler(handler)
648 handler.flush()
649 handler.close()
650 os.makedirs(args.output_dir_path, exist_ok=True)
651 new_path = os.path.join(args.output_dir_path, os.path.basename(log_path))
652 if os.path.exists(new_path):
653 ctime = os.path.getctime(new_path)
654 dt = datetime.fromtimestamp(ctime)
655 timestamp = dt.strftime('%Y-%m-%d_%H-%M-%S')
656 backup_path = f"{new_path[:-4]}_{timestamp}.bak.log"
657 shutil.move(new_path, backup_path)
658 log_path = shutil.move(log_path, new_path)
659 register_log_file_handler(log_path, formatter)
661 # setup preliminary logger
662 cli_log_path_plain = os.path.join(args.output_dir_base_path, f"kpex_plain.log")
663 cli_log_path_formatted = os.path.join(args.output_dir_base_path, f"kpex.log")
664 formatter = logging.Formatter('[%(asctime)s] [%(levelname)s] %(message)s')
665 file_handler_plain = register_log_file_handler(cli_log_path_plain, None)
666 file_handler_formatted = register_log_file_handler(cli_log_path_formatted, formatter)
667 try:
668 self.validate_args(args)
669 except ArgumentValidationError:
670 if hasattr(args, 'output_dir_path'):
671 reregister_log_file_handler(file_handler_plain, cli_log_path_plain, None)
672 reregister_log_file_handler(file_handler_formatted, cli_log_path_formatted, formatter)
673 sys.exit(1)
674 reregister_log_file_handler(file_handler_plain, cli_log_path_plain, None)
675 reregister_log_file_handler(file_handler_formatted, cli_log_path_formatted, formatter)
677 set_log_level(args.log_level)
679 @staticmethod
680 def modification_date(filename: str) -> datetime:
681 t = os.path.getmtime(filename)
682 return datetime.fromtimestamp(t)
684 def create_lvsdb(self, args: argparse.Namespace) -> kdb.LayoutVsSchematic:
685 lvsdb = kdb.LayoutVsSchematic()
687 match args.input_mode:
688 case InputMode.LVSDB:
689 lvsdb.read(args.lvsdb_path)
690 case InputMode.GDS:
691 lvs_log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_lvs.log")
692 lvsdb_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}.lvsdb.gz")
693 lvsdb_cache_path = os.path.join(args.cache_dir_path, args.pdk,
694 os.path.splitroot(os.path.abspath(args.gds_path))[-1],
695 f"{args.effective_cell_name}.lvsdb.gz")
697 lvs_needed = True
699 if args.cache_lvs:
700 if not os.path.exists(lvsdb_cache_path):
701 info(f"Cache miss: extracted LVSDB does not exist")
702 subproc(lvsdb_cache_path)
703 elif self.modification_date(lvsdb_cache_path) <= self.modification_date(args.gds_path):
704 info(f"Cache miss: extracted LVSDB is older than the input GDS")
705 subproc(lvsdb_cache_path)
706 else:
707 warning(f"Cache hit: Reusing cached LVSDB")
708 subproc(lvsdb_cache_path)
709 lvs_needed = False
711 if lvs_needed:
712 lvs_runner = LVSRunner()
713 lvs_runner.run_klayout_lvs(exe_path=args.klayout_exe_path,
714 lvs_script=args.lvs_script_path,
715 gds_path=args.effective_gds_path,
716 schematic_path=args.effective_schematic_path,
717 log_path=lvs_log_path,
718 lvsdb_path=lvsdb_path)
719 if args.cache_lvs:
720 cache_dir_path = os.path.dirname(lvsdb_cache_path)
721 if not os.path.exists(cache_dir_path):
722 os.makedirs(cache_dir_path, exist_ok=True)
723 shutil.copy(lvsdb_path, lvsdb_cache_path)
725 lvsdb.read(lvsdb_path)
726 return lvsdb
728 def main(self, argv: List[str]):
729 if '-v' not in argv and \
730 '--version' not in argv and \
731 '-h' not in argv and \
732 '--help' not in argv:
733 rule('Command line arguments')
734 subproc(' '.join(map(shlex.quote, sys.argv)))
736 args = self.parse_args(argv[1:])
738 os.makedirs(args.output_dir_base_path, exist_ok=True)
739 self.setup_logging(args)
741 tech_info = TechInfo.from_json(args.tech_pbjson_path,
742 dielectric_filter=args.dielectric_filter)
744 if args.run_magic:
745 rule('MAGIC')
746 self.run_magic_extraction(args)
748 # no need to run LVS etc if only running magic engine
749 if not (args.run_fastcap or args.run_fastercap or args.run_2_5D):
750 return
752 rule('Prepare LVSDB')
753 lvsdb = self.create_lvsdb(args)
755 pex_context = KLayoutExtractionContext.prepare_extraction(top_cell=args.effective_cell_name,
756 lvsdb=lvsdb,
757 tech=tech_info,
758 blackbox_devices=args.blackbox_devices)
759 rule('Non-empty layers in LVS database')
760 for gds_pair, layer_info in pex_context.extracted_layers.items():
761 names = [l.lvs_layer_name for l in layer_info.source_layers]
762 info(f"{gds_pair} -> ({' '.join(names)})")
764 gds_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_l2n_extracted.gds.gz")
765 pex_context.target_layout.write(gds_path)
767 gds_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_l2n_internal.gds.gz")
768 pex_context.lvsdb.internal_layout().write(gds_path)
770 def dump_layers(cell: str,
771 layers: List[KLayoutExtractedLayerInfo],
772 layout_dump_path: str):
773 layout = kdb.Layout()
774 layout.dbu = lvsdb.internal_layout().dbu
776 top_cell = layout.create_cell(cell)
777 for ulyr in layers:
778 li = kdb.LayerInfo(*ulyr.gds_pair)
779 li.name = ulyr.lvs_layer_name
780 layer = layout.insert_layer(li)
781 layout.insert(top_cell.cell_index(), layer, ulyr.region.dup())
783 layout.write(layout_dump_path)
785 if len(pex_context.unnamed_layers) >= 1:
786 layout_dump_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_unnamed_LVS_layers.gds.gz")
787 dump_layers(cell=args.effective_cell_name,
788 layers=pex_context.unnamed_layers,
789 layout_dump_path=layout_dump_path)
791 if len(pex_context.extracted_layers) >= 1:
792 layout_dump_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_nonempty_LVS_layers.gds.gz")
793 nonempty_layers = [l \
794 for layers in pex_context.extracted_layers.values() \
795 for l in layers.source_layers]
796 dump_layers(cell=args.effective_cell_name,
797 layers=nonempty_layers,
798 layout_dump_path=layout_dump_path)
799 else:
800 error("No extracted layers found")
801 sys.exit(1)
803 if args.run_fastcap or args.run_fastercap:
804 lst_file = self.build_fastercap_input(args=args,
805 pex_context=pex_context,
806 tech_info=tech_info)
807 if args.run_fastercap:
808 self.run_fastercap_extraction(args=args,
809 pex_context=pex_context,
810 lst_file=lst_file)
811 if args.run_fastcap:
812 self.run_fastcap_extraction(args=args,
813 pex_context=pex_context,
814 lst_file=lst_file)
816 if args.run_2_5D:
817 rule("kpex/2.5D PEX Engine")
818 report_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_k25d_pex_report.rdb.gz")
819 netlist_csv_path = os.path.abspath(os.path.join(args.output_dir_path, f"{args.effective_cell_name}_k25d_pex_netlist.csv"))
821 self._rcx25_extraction_results = self.run_kpex_2_5d_engine( # NOTE: store for test case
822 args=args,
823 pex_context=pex_context,
824 tech_info=tech_info,
825 report_path=report_path,
826 netlist_csv_path=netlist_csv_path
827 )
829 self._rcx25_extracted_csv_path = netlist_csv_path
831 @property
832 def rcx25_extraction_results(self) -> ExtractionResults:
833 if not hasattr(self, '_rcx25_extraction_results'):
834 raise Exception('rcx25_extraction_results is not initialized, was run_kpex_2_5d_engine called?')
835 return self._rcx25_extraction_results
837 @property
838 def rcx25_extracted_csv_path(self) -> str:
839 if not hasattr(self, '_rcx25_extracted_csv_path'):
840 raise Exception('rcx25_extracted_csv_path is not initialized, was run_kpex_2_5d_engine called?')
841 return self._rcx25_extracted_csv_path
843 @property
844 def fastercap_extracted_csv_path(self) -> str:
845 if not hasattr(self, '_fastercap_extracted_csv_path'):
846 raise Exception('fastercap_extracted_csv_path is not initialized, was run_fastercap_extraction called?')
847 return self._fastercap_extracted_csv_path
850if __name__ == "__main__":
851 cli = KpexCLI()
852 cli.main(sys.argv)