Coverage for klayout_pex/kpex_cli.py: 14%

524 statements  

« prev     ^ index     » next       coverage.py v7.10.2, created at 2025-08-08 18:54 +0000

1#! /usr/bin/env python3 

2# 

3# -------------------------------------------------------------------------------- 

4# SPDX-FileCopyrightText: 2024-2025 Martin Jan Köhler and Harald Pretl 

5# Johannes Kepler University, Institute for Integrated Circuits. 

6# 

7# This file is part of KPEX  

8# (see https://github.com/martinjankoehler/klayout-pex). 

9# 

10# This program is free software: you can redistribute it and/or modify 

11# it under the terms of the GNU General Public License as published by 

12# the Free Software Foundation, either version 3 of the License, or 

13# (at your option) any later version. 

14# 

15# This program is distributed in the hope that it will be useful, 

16# but WITHOUT ANY WARRANTY; without even the implied warranty of 

17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

18# GNU General Public License for more details. 

19# 

20# You should have received a copy of the GNU General Public License 

21# along with this program. If not, see <http://www.gnu.org/licenses/>. 

22# SPDX-License-Identifier: GPL-3.0-or-later 

23# -------------------------------------------------------------------------------- 

24# 

25 

26import argparse 

27from datetime import datetime 

28from enum import StrEnum 

29from functools import cached_property 

30import logging 

31import os 

32import os.path 

33from pathlib import Path 

34import rich.console 

35import rich.markdown 

36import rich.text 

37from rich_argparse import RichHelpFormatter 

38import shlex 

39import shutil 

40import sys 

41from typing import * 

42 

43import klayout.db as kdb 

44import klayout.rdb as rdb 

45 

46from .env import EnvVar, Env 

47from .fastercap.fastercap_input_builder import FasterCapInputBuilder 

48from .fastercap.fastercap_model_generator import FasterCapModelGenerator 

49from .fastercap.fastercap_runner import run_fastercap, fastercap_parse_capacitance_matrix 

50from .fastcap.fastcap_runner import run_fastcap, fastcap_parse_capacitance_matrix 

51from .klayout.lvs_runner import LVSRunner 

52from .klayout.lvsdb_extractor import KLayoutExtractionContext, KLayoutExtractedLayerInfo 

53from .klayout.netlist_expander import NetlistExpander 

54from .klayout.netlist_csv import NetlistCSVWriter 

55from .klayout.netlist_reducer import NetlistReducer 

56from .klayout.repair_rdb import repair_rdb 

57from .log import ( 

58 LogLevel, 

59 set_log_level, 

60 register_additional_handler, 

61 deregister_additional_handler, 

62 # console, 

63 # debug, 

64 info, 

65 warning, 

66 subproc, 

67 error, 

68 rule 

69) 

70from .magic.magic_ext_file_parser import parse_magic_pex_run 

71from .magic.magic_runner import ( 

72 MagicPEXMode, 

73 MagicShortMode, 

74 MagicMergeMode, 

75 run_magic, 

76 prepare_magic_script, 

77) 

78from .magic.magic_log_analyzer import MagicLogAnalyzer 

79from .pdk_config import PDKConfig 

80from .rcx25.extractor import RCX25Extractor, ExtractionResults 

81from .rcx25.netlist_expander import RCX25NetlistExpander 

82from .rcx25.pex_mode import PEXMode 

83from .tech_info import TechInfo 

84from .util.multiple_choice import MultipleChoicePattern 

85from .util.argparse_helpers import render_enum_help, true_or_false 

86from .version import __version__ 

87 

88 

89# ------------------------------------------------------------------------------------ 

90 

91PROGRAM_NAME = "kpex" 

92 

93 

94class ArgumentValidationError(Exception): 

95 pass 

96 

97 

98class InputMode(StrEnum): 

99 LVSDB = "lvsdb" 

100 GDS = "gds" 

101 

102 

103# TODO: this should be externally configurable 

104class PDK(StrEnum): 

105 IHP_SG13G2 = 'ihp_sg13g2' 

106 SKY130A = 'sky130A' 

107 

108 @cached_property 

109 def config(self) -> PDKConfig: 

110 # NOTE: installation paths of resources in the distribution wheel differs from source repo 

111 base_dir = os.path.dirname(os.path.realpath(__file__)) 

112 

113 # NOTE: .git can be dir (standalone clone), or file (in case of submodule) 

114 if os.path.exists(os.path.join(base_dir, '..', '.git')): # in source repo 

115 base_dir = os.path.dirname(base_dir) 

116 tech_pb_json_dir = os.path.join(base_dir, 'klayout_pex_protobuf') 

117 else: # site-packages/klayout_pex -> site-packages/klayout_pex_protobuf 

118 tech_pb_json_dir = os.path.join(os.path.dirname(base_dir), 'klayout_pex_protobuf') 

119 

120 match self: 

121 case PDK.IHP_SG13G2: 

122 return PDKConfig( 

123 name=self, 

124 pex_lvs_script_path=os.path.join(base_dir, 'pdk', self, 'libs.tech', 'kpex', 'sg13g2.lvs'), 

125 tech_pb_json_path=os.path.join(tech_pb_json_dir, f"{self}_tech.pb.json") 

126 ) 

127 case PDK.SKY130A: 

128 return PDKConfig( 

129 name=self, 

130 pex_lvs_script_path=os.path.join(base_dir, 'pdk', self, 'libs.tech', 'kpex', 'sky130.lvs'), 

131 tech_pb_json_path=os.path.join(tech_pb_json_dir, f"{self}_tech.pb.json") 

132 ) 

133 

134 

135 

136class KpexCLI: 

137 @staticmethod 

138 def parse_args(arg_list: List[str], 

139 env: Env) -> argparse.Namespace: 

140 # epilog = f"See '{PROGRAM_NAME} <subcommand> -h' for help on subcommand" 

141 epilog = EnvVar.help_epilog_table() 

142 epilog_md = rich.console.Group( 

143 rich.text.Text('Environmental variables:', style='argparse.groups'), 

144 rich.markdown.Markdown(epilog, style='argparse.text') 

145 ) 

146 main_parser = argparse.ArgumentParser(description=f"{PROGRAM_NAME}: " 

147 f"KLayout-integrated Parasitic Extraction Tool", 

148 epilog=epilog_md, 

149 add_help=False, 

150 formatter_class=RichHelpFormatter) 

151 

152 group_special = main_parser.add_argument_group("Special options") 

153 group_special.add_argument("--help", "-h", action='help', help="show this help message and exit") 

154 group_special.add_argument("--version", "-v", action='version', version=f'{PROGRAM_NAME} {__version__}') 

155 group_special.add_argument("--log_level", dest='log_level', default='subprocess', 

156 help=render_enum_help(topic='log_level', enum_cls=LogLevel)) 

157 group_special.add_argument("--threads", dest='num_threads', type=int, 

158 default=os.cpu_count() * 4, 

159 help="number of threads (e.g. for FasterCap) (default is %(default)s)") 

160 

161 group_pex = main_parser.add_argument_group("Parasitic Extraction Setup") 

162 group_pex.add_argument("--pdk", dest="pdk", required=True, 

163 type=PDK, choices=list(PDK), 

164 help=render_enum_help(topic='pdk', enum_cls=PDK)) 

165 

166 group_pex.add_argument("--out_dir", "-o", dest="output_dir_base_path", default="output", 

167 help="Output directory path (default is '%(default)s')") 

168 

169 group_pex_input = main_parser.add_argument_group("Parasitic Extraction Input", 

170 description="Either LVS is run, or an existing LVSDB is used") 

171 group_pex_input.add_argument("--gds", "-g", dest="gds_path", default=None, 

172 help="GDS path (for LVS)") 

173 group_pex_input.add_argument("--schematic", "-s", dest="schematic_path", 

174 help="Schematic SPICE netlist path (for LVS). " 

175 "If none given, a dummy schematic will be created") 

176 group_pex_input.add_argument("--lvsdb", "-l", dest="lvsdb_path", default=None, 

177 help="KLayout LVSDB path (bypass LVS)") 

178 group_pex_input.add_argument("--cell", "-c", dest="cell_name", default=None, 

179 help="Cell (default is the top cell)") 

180 

181 group_pex_input.add_argument("--cache-lvs", dest="cache_lvs", 

182 type=true_or_false, default=True, 

183 help="Used cached LVSDB (for given input GDS) (default is %(default)s)") 

184 group_pex_input.add_argument("--cache-dir", dest="cache_dir_path", default=None, 

185 help="Path for cached LVSDB (default is .kpex_cache within --out_dir)") 

186 group_pex_input.add_argument("--lvs-verbose", dest="klayout_lvs_verbose", 

187 type=true_or_false, default=False, 

188 help="Verbose KLayout LVS output (default is %(default)s)") 

189 

190 group_pex_options = main_parser.add_argument_group("Parasitic Extraction Options") 

191 group_pex_options.add_argument("--blackbox", dest="blackbox_devices", 

192 type=true_or_false, default=False, # TODO: in the future this should be True by default 

193 help="Blackbox devices like MIM/MOM caps, as they are handled by SPICE models " 

194 "(default is %(default)s for testing now)") 

195 group_pex_options.add_argument("--fastercap", dest="run_fastercap", 

196 action='store_true', default=False, 

197 help="Run FasterCap engine (default is %(default)s)") 

198 group_pex_options.add_argument("--fastcap", dest="run_fastcap", 

199 action='store_true', default=False, 

200 help="Run FastCap2 engine (default is %(default)s)") 

201 group_pex_options.add_argument("--magic", dest="run_magic", 

202 action='store_true', default=False, 

203 help="Run MAGIC engine (default is %(default)s)") 

204 group_pex_options.add_argument("--2.5D", dest="run_2_5D", 

205 action='store_true', default=False, 

206 help="Run 2.5D analytical engine (default is %(default)s)") 

207 

208 group_fastercap = main_parser.add_argument_group("FasterCap options") 

209 group_fastercap.add_argument("--k_void", "-k", dest="k_void", 

210 type=float, default=3.9, 

211 help="Dielectric constant of void (default is %(default)s)") 

212 

213 # TODO: reflect that these are also now used by KPEX/2.5D engine! 

214 group_fastercap.add_argument("--delaunay_amax", "-a", dest="delaunay_amax", 

215 type=float, default=50, 

216 help="Delaunay triangulation maximum area (default is %(default)s)") 

217 group_fastercap.add_argument("--delaunay_b", "-b", dest="delaunay_b", 

218 type=float, default=0.5, 

219 help="Delaunay triangulation b (default is %(default)s)") 

220 group_fastercap.add_argument("--geo_check", dest="geometry_check", 

221 type=true_or_false, default=False, 

222 help=f"Validate geometries before passing to FasterCap " 

223 f"(default is False)") 

224 group_fastercap.add_argument("--diel", dest="dielectric_filter", 

225 type=str, default="all", 

226 help=f"Comma separated list of dielectric filter patterns. " 

227 f"Allowed patterns are: (none, all, -dielname1, +dielname2) " 

228 f"(default is %(default)s)") 

229 

230 group_fastercap.add_argument("--tolerance", dest="fastercap_tolerance", 

231 type=float, default=0.05, 

232 help="FasterCap -aX error tolerance (default is %(default)s)") 

233 group_fastercap.add_argument("--d_coeff", dest="fastercap_d_coeff", 

234 type=float, default=0.5, 

235 help=f"FasterCap -d direct potential interaction coefficient to mesh refinement " 

236 f"(default is %(default)s)") 

237 group_fastercap.add_argument("--mesh", dest="fastercap_mesh_refinement_value", 

238 type=float, default=0.5, 

239 help="FasterCap -m Mesh relative refinement value (default is %(default)s)") 

240 group_fastercap.add_argument("--ooc", dest="fastercap_ooc_condition", 

241 type=float, default=2, 

242 help="FasterCap -f out-of-core free memory to link memory condition " 

243 "(0 = don't go OOC, default is %(default)s)") 

244 group_fastercap.add_argument("--auto_precond", dest="fastercap_auto_preconditioner", 

245 type=true_or_false, default=True, 

246 help=f"FasterCap -ap Automatic preconditioner usage (default is %(default)s)") 

247 group_fastercap.add_argument("--galerkin", dest="fastercap_galerkin_scheme", 

248 action='store_true', default=False, 

249 help=f"FasterCap -g Use Galerkin scheme (default is %(default)s)") 

250 group_fastercap.add_argument("--jacobi", dest="fastercap_jacobi_preconditioner", 

251 action='store_true', default=False, 

252 help="FasterCap -pj Use Jacobi preconditioner (default is %(default)s)") 

253 

254 PDKPATH = os.environ.get('PDKPATH', None) 

255 default_magicrc_path = \ 

256 None if PDKPATH is None \ 

257 else os.path.abspath(f"{PDKPATH}/libs.tech/magic/{os.environ['PDK']}.magicrc") 

258 group_magic = main_parser.add_argument_group("MAGIC options") 

259 group_magic.add_argument('--magicrc', dest='magicrc_path', default=default_magicrc_path, 

260 help=f"Path to magicrc configuration file (default is '%(default)s')") 

261 group_magic.add_argument("--magic_mode", dest='magic_pex_mode', 

262 default=MagicPEXMode.DEFAULT, type=MagicPEXMode, choices=list(MagicPEXMode), 

263 help=render_enum_help(topic='magic_mode', enum_cls=MagicPEXMode)) 

264 group_magic.add_argument("--magic_cthresh", dest="magic_cthresh", 

265 type=float, default=0.01, 

266 help="Threshold (in fF) for ignored parasitic capacitances (default is %(default)s). " 

267 "(MAGIC command: ext2spice cthresh <value>)") 

268 group_magic.add_argument("--magic_rthresh", dest="magic_rthresh", 

269 type=int, default=100, 

270 help="Threshold (in Ω) for ignored parasitic resistances (default is %(default)s). " 

271 "(MAGIC command: ext2spice rthresh <value>)") 

272 group_magic.add_argument("--magic_tolerance", dest="magic_tolerance", 

273 type=float, default=1, 

274 help="Set ratio between resistor and device tolerance (default is %(default)s). " 

275 "(MAGIC command: extresist tolerance <value>)") 

276 group_magic.add_argument("--magic_halo", dest="magic_halo", 

277 type=float, default=None, 

278 help="Custom sidewall halo distance (in µm) " 

279 "(MAGIC command: extract halo <value>) (default is no custom halo)") 

280 group_magic.add_argument("--magic_short", dest='magic_short_mode', 

281 default=MagicShortMode.DEFAULT, type=MagicShortMode, choices=list(MagicShortMode), 

282 help=render_enum_help(topic='magic_short', enum_cls=MagicShortMode)) 

283 group_magic.add_argument("--magic_merge", dest='magic_merge_mode', 

284 default=MagicMergeMode.DEFAULT, type=MagicMergeMode, choices=list(MagicMergeMode), 

285 help=render_enum_help(topic='magic_merge', enum_cls=MagicMergeMode)) 

286 

287 group_25d = main_parser.add_argument_group("2.5D options") 

288 group_25d.add_argument("--mode", dest='pex_mode', 

289 default=PEXMode.DEFAULT, type=PEXMode, choices=list(PEXMode), 

290 help=render_enum_help(topic='mode', enum_cls=PEXMode)) 

291 group_25d.add_argument("--halo", dest="halo", 

292 type=float, default=None, 

293 help="Custom sidewall halo distance (in µm) to override tech info " 

294 "(default is no custom halo)") 

295 group_25d.add_argument("--scale", dest="scale_ratio_to_fit_halo", 

296 type=true_or_false, default=True, 

297 help=f"Scale fringe ratios, so that halo distance is 100%% (default is %(default)s)") 

298 

299 if arg_list is None: 

300 arg_list = sys.argv[1:] 

301 args = main_parser.parse_args(arg_list) 

302 

303 # environmental variables and their defaults 

304 args.fastcap_exe_path = env[EnvVar.FASTCAP_EXE] 

305 args.fastercap_exe_path = env[EnvVar.FASTERCAP_EXE] 

306 args.klayout_exe_path = env[EnvVar.KLAYOUT_EXE] 

307 args.magic_exe_path = env[EnvVar.MAGIC_EXE] 

308 

309 return args 

310 

311 @staticmethod 

312 def validate_args(args: argparse.Namespace): 

313 found_errors = False 

314 

315 pdk_config: PDKConfig = args.pdk.config 

316 args.tech_pbjson_path = pdk_config.tech_pb_json_path 

317 args.lvs_script_path = pdk_config.pex_lvs_script_path 

318 

319 def input_file_stem(path: str): 

320 # could be *.gds, or *.gds.gz, so remove all extensions 

321 return os.path.basename(path).split(sep='.')[0] 

322 

323 if not os.path.isfile(args.klayout_exe_path): 

324 path = shutil.which(args.klayout_exe_path) 

325 if not path: 

326 error(f"Can't locate KLayout executable at {args.klayout_exe_path}") 

327 found_errors = True 

328 

329 if not os.path.isfile(args.tech_pbjson_path): 

330 error(f"Can't read technology file at path {args.tech_pbjson_path}") 

331 found_errors = True 

332 

333 if not os.path.isfile(args.lvs_script_path): 

334 error(f"Can't locate LVS script path at {args.lvs_script_path}") 

335 found_errors = True 

336 

337 rule('Input Layout') 

338 

339 # check engines VS input possiblities 

340 match (args.run_magic, args.run_fastcap, args.run_fastercap, args.run_2_5D, 

341 args.gds_path, args.lvsdb_path): 

342 case (True, _, _, _, None, _): 

343 error(f"Running PEX engine MAGIC requires --gds (--lvsdb not possible)") 

344 found_errors = True 

345 case (False, False, False, False, _, _): # at least one engine must be activated 

346 error("No PEX engines activated") 

347 engine_help = """ 

348 | Argument | Description | 

349 | ------------ | ------------------------------- | 

350 | --2.5D | Run KPEX/2.5D analytical engine | 

351 | --fastercap | Run KPEX/FastCap 3D engine | 

352 | --fastercap | Run KPEX/FasterCap 3D engine | 

353 | --magic | Run MAGIC wrapper engine | 

354 """ 

355 subproc(f"\n\nPlease activate one or more engines using the arguments:") 

356 rich.print(rich.markdown.Markdown(engine_help, style='argparse.text')) 

357 found_errors = True 

358 case (_, _, _, _, None, None): 

359 error(f"Neither GDS nor LVSDB was provided") 

360 found_errors = True 

361 

362 # input mode: LVS or existing LVSDB? 

363 if args.gds_path: 

364 info(f"GDS input file passed, running in LVS mode") 

365 args.input_mode = InputMode.GDS 

366 if not os.path.isfile(args.gds_path): 

367 error(f"Can't read GDS file (LVS input) at path {args.gds_path}") 

368 found_errors = True 

369 else: 

370 args.layout = kdb.Layout() 

371 args.layout.read(args.gds_path) 

372 

373 top_cells = args.layout.top_cells() 

374 

375 if args.cell_name: # explicit user-specified cell name 

376 args.effective_cell_name = args.cell_name 

377 

378 found_cell: Optional[kdb.Cell] = None 

379 for cell in args.layout.cells('*'): 

380 if cell.name == args.effective_cell_name: 

381 found_cell = cell 

382 break 

383 if not found_cell: 

384 error(f"Could not find cell {args.cell_name} in GDS {args.gds_path}") 

385 found_errors = True 

386 

387 is_only_top_cell = len(top_cells) == 1 and top_cells[0].name == args.cell_name 

388 if is_only_top_cell: 

389 info(f"Found cell {args.cell_name} in GDS {args.gds_path} (only top cell)") 

390 else: # there are other cells => extract the top cell to a tmp layout 

391 run_dir_id = f"{input_file_stem(args.gds_path)}__{args.effective_cell_name}" 

392 args.output_dir_path = os.path.join(args.output_dir_base_path, run_dir_id) 

393 os.makedirs(args.output_dir_path, exist_ok=True) 

394 args.effective_gds_path = os.path.join(args.output_dir_path, 

395 f"{args.cell_name}_exported.gds.gz") 

396 info(f"Found cell {args.cell_name} in GDS {args.gds_path}, " 

397 f"but it is not the only top cell, " 

398 f"so layout is exported to: {args.effective_gds_path}") 

399 

400 found_cell.write(args.effective_gds_path) 

401 else: # find top cell 

402 if len(top_cells) == 1: 

403 args.effective_cell_name = top_cells[0].name 

404 info(f"No explicit top cell specified, using top cell '{args.effective_cell_name}'") 

405 else: 

406 args.effective_cell_name = 'TOP' 

407 error(f"Could not determine the default top cell in GDS {args.gds_path}, " 

408 f"there are multiple: {', '.join([c.name for c in top_cells])}. " 

409 f"Use --cell to specify the cell") 

410 found_errors = True 

411 

412 if not hasattr(args, 'effective_gds_path'): 

413 args.effective_gds_path = args.gds_path 

414 elif args.lvsdb_path is not None: 

415 info(f"LVSDB input file passed, bypassing LVS") 

416 args.input_mode = InputMode.LVSDB 

417 if not os.path.isfile(args.lvsdb_path): 

418 error(f"Can't read KLayout LVSDB file at path {args.lvsdb_path}") 

419 found_errors = True 

420 else: 

421 lvsdb = kdb.LayoutVsSchematic() 

422 lvsdb.read(args.lvsdb_path) 

423 top_cell: kdb.Cell = lvsdb.internal_top_cell() 

424 args.effective_cell_name = top_cell.name 

425 

426 if hasattr(args, 'effective_cell_name'): 

427 run_dir_id: str 

428 match args.input_mode: 

429 case InputMode.GDS: 

430 run_dir_id = f"{input_file_stem(args.gds_path)}__{args.effective_cell_name}" 

431 case InputMode.LVSDB: 

432 run_dir_id = f"{input_file_stem(args.lvsdb_path)}__{args.effective_cell_name}" 

433 case _: 

434 raise NotImplementedError(f"Unknown input mode {args.input_mode}") 

435 

436 args.output_dir_path = os.path.join(args.output_dir_base_path, run_dir_id) 

437 os.makedirs(args.output_dir_path, exist_ok=True) 

438 if args.input_mode == InputMode.GDS: 

439 if args.schematic_path: 

440 args.effective_schematic_path = args.schematic_path 

441 if not os.path.isfile(args.schematic_path): 

442 error(f"Can't read schematic (LVS input) at path {args.schematic_path}") 

443 found_errors = True 

444 else: 

445 info(f"LVS input schematic not specified (argument --schematic), using dummy schematic") 

446 args.effective_schematic_path = os.path.join(args.output_dir_path, 

447 f"{args.effective_cell_name}_dummy_schematic.spice") 

448 with open(args.effective_schematic_path, 'w', encoding='utf-8') as f: 

449 f.writelines([ 

450 f".subckt {args.effective_cell_name} VDD VSS\n", 

451 '.ends\n', 

452 '.end\n' 

453 ]) 

454 

455 try: 

456 args.log_level = LogLevel[args.log_level.upper()] 

457 except KeyError: 

458 error(f"Requested log level {args.log_level.lower()} does not exist, " 

459 f"{render_enum_help(topic='log_level', enum_cls=LogLevel, print_default=False)}") 

460 found_errors = True 

461 

462 try: 

463 pattern_string: str = args.dielectric_filter 

464 args.dielectric_filter = MultipleChoicePattern(pattern=pattern_string) 

465 except ValueError as e: 

466 error("Failed to parse --diel arg", e) 

467 found_errors = True 

468 

469 if args.cache_dir_path is None: 

470 args.cache_dir_path = os.path.join(args.output_dir_base_path, '.kpex_cache') 

471 

472 if found_errors: 

473 raise ArgumentValidationError("Argument validation failed") 

474 

475 def build_fastercap_input(self, 

476 args: argparse.Namespace, 

477 pex_context: KLayoutExtractionContext, 

478 tech_info: TechInfo) -> str: 

479 rule('Process stackup') 

480 fastercap_input_builder = FasterCapInputBuilder(pex_context=pex_context, 

481 tech_info=tech_info, 

482 k_void=args.k_void, 

483 delaunay_amax=args.delaunay_amax, 

484 delaunay_b=args.delaunay_b) 

485 gen: FasterCapModelGenerator = fastercap_input_builder.build() 

486 

487 rule('FasterCap Input File Generation') 

488 faster_cap_input_dir_path = os.path.join(args.output_dir_path, 'FasterCap_Input_Files') 

489 os.makedirs(faster_cap_input_dir_path, exist_ok=True) 

490 

491 lst_file = gen.write_fastcap(output_dir_path=faster_cap_input_dir_path, prefix='FasterCap_Input_') 

492 

493 rule('STL File Generation') 

494 geometry_dir_path = os.path.join(args.output_dir_path, 'Geometries') 

495 os.makedirs(geometry_dir_path, exist_ok=True) 

496 gen.dump_stl(output_dir_path=geometry_dir_path, prefix='') 

497 

498 if args.geometry_check: 

499 rule('Geometry Validation') 

500 gen.check() 

501 

502 return lst_file 

503 

504 

505 def run_fastercap_extraction(self, 

506 args: argparse.Namespace, 

507 pex_context: KLayoutExtractionContext, 

508 lst_file: str): 

509 rule('FasterCap Execution') 

510 info(f"Configure number of OpenMP threads (environmental variable OMP_NUM_THREADS) as {args.num_threads}") 

511 os.environ['OMP_NUM_THREADS'] = f"{args.num_threads}" 

512 

513 log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Output.txt") 

514 raw_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Result_Matrix_Raw.csv") 

515 avg_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Result_Matrix_Avg.csv") 

516 expanded_netlist_path = os.path.join(args.output_dir_path, 

517 f"{args.effective_cell_name}_FasterCap_Expanded_Netlist.cir") 

518 expanded_netlist_csv_path = os.path.join(args.output_dir_path, 

519 f"{args.effective_cell_name}_FasterCap_Expanded_Netlist.csv") 

520 reduced_netlist_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Reduced_Netlist.cir") 

521 

522 run_fastercap(exe_path=args.fastercap_exe_path, 

523 lst_file_path=lst_file, 

524 log_path=log_path, 

525 tolerance=args.fastercap_tolerance, 

526 d_coeff=args.fastercap_d_coeff, 

527 mesh_refinement_value=args.fastercap_mesh_refinement_value, 

528 ooc_condition=args.fastercap_ooc_condition, 

529 auto_preconditioner=args.fastercap_auto_preconditioner, 

530 galerkin_scheme=args.fastercap_galerkin_scheme, 

531 jacobi_preconditioner=args.fastercap_jacobi_preconditioner) 

532 

533 cap_matrix = fastercap_parse_capacitance_matrix(log_path) 

534 cap_matrix.write_csv(raw_csv_path) 

535 

536 cap_matrix = cap_matrix.averaged_off_diagonals() 

537 cap_matrix.write_csv(avg_csv_path) 

538 

539 netlist_expander = NetlistExpander() 

540 expanded_netlist = netlist_expander.expand( 

541 extracted_netlist=pex_context.lvsdb.netlist(), 

542 top_cell_name=pex_context.annotated_top_cell.name, 

543 cap_matrix=cap_matrix, 

544 blackbox_devices=args.blackbox_devices 

545 ) 

546 

547 # create a nice CSV for reports, useful for spreadsheets 

548 netlist_csv_writer = NetlistCSVWriter() 

549 netlist_csv_writer.write_csv(netlist=expanded_netlist, 

550 top_cell_name=pex_context.annotated_top_cell.name, 

551 output_path=expanded_netlist_csv_path) 

552 

553 rule("Extended netlist (CSV format):") 

554 with open(expanded_netlist_csv_path, 'r') as f: 

555 for line in f.readlines(): 

556 subproc(line[:-1]) # abusing subproc, simply want verbatim 

557 rule() 

558 

559 info(f"Wrote expanded netlist CSV to: {expanded_netlist_csv_path}") 

560 

561 spice_writer = kdb.NetlistSpiceWriter() 

562 spice_writer.use_net_names = True 

563 spice_writer.with_comments = False 

564 expanded_netlist.write(expanded_netlist_path, spice_writer) 

565 info(f"Wrote expanded netlist to: {expanded_netlist_path}") 

566 

567 netlist_reducer = NetlistReducer() 

568 reduced_netlist = netlist_reducer.reduce(netlist=expanded_netlist, 

569 top_cell_name=pex_context.annotated_top_cell.name) 

570 reduced_netlist.write(reduced_netlist_path, spice_writer) 

571 info(f"Wrote reduced netlist to: {reduced_netlist_path}") 

572 

573 self._fastercap_extracted_csv_path = expanded_netlist_csv_path 

574 

575 def run_magic_extraction(self, 

576 args: argparse.Namespace): 

577 if args.input_mode != InputMode.GDS: 

578 error(f"MAGIC engine only works with GDS input mode" 

579 f" (currently {args.input_mode})") 

580 return 

581 

582 magic_run_dir = os.path.join(args.output_dir_path, f"magic_{args.magic_pex_mode}") 

583 magic_log_path = os.path.join(magic_run_dir, 

584 f"{args.effective_cell_name}_MAGIC_{args.magic_pex_mode}_Output.txt") 

585 magic_script_path = os.path.join(magic_run_dir, 

586 f"{args.effective_cell_name}_MAGIC_{args.magic_pex_mode}_Script.tcl") 

587 

588 output_netlist_path = os.path.join(magic_run_dir, f"{args.effective_cell_name}.pex.spice") 

589 report_db_path = os.path.join(magic_run_dir, f"{args.effective_cell_name}_MAGIC_report.rdb.gz") 

590 

591 os.makedirs(magic_run_dir, exist_ok=True) 

592 

593 prepare_magic_script(gds_path=args.effective_gds_path, 

594 cell_name=args.effective_cell_name, 

595 run_dir_path=magic_run_dir, 

596 script_path=magic_script_path, 

597 output_netlist_path=output_netlist_path, 

598 pex_mode=args.magic_pex_mode, 

599 c_threshold=args.magic_cthresh, 

600 r_threshold=args.magic_rthresh, 

601 tolerance=args.magic_tolerance, 

602 halo=args.magic_halo, 

603 short_mode=args.magic_short_mode, 

604 merge_mode=args.magic_merge_mode) 

605 

606 run_magic(exe_path=args.magic_exe_path, 

607 magicrc_path=args.magicrc_path, 

608 script_path=magic_script_path, 

609 log_path=magic_log_path) 

610 

611 magic_pex_run = parse_magic_pex_run(Path(magic_run_dir)) 

612 

613 layout = kdb.Layout() 

614 layout.read(args.effective_gds_path) 

615 

616 report = rdb.ReportDatabase('') 

617 magic_log_analyzer = MagicLogAnalyzer(magic_pex_run=magic_pex_run, 

618 report=report, 

619 dbu=layout.dbu) 

620 magic_log_analyzer.analyze() 

621 report.save(report_db_path) 

622 

623 rule("Paths") 

624 subproc(f"Report DB saved at: {report_db_path}") 

625 subproc(f"SPICE netlist saved at: {output_netlist_path}") 

626 

627 rule("MAGIC PEX SPICE netlist") 

628 with open(output_netlist_path, 'r') as f: 

629 subproc(f.read()) 

630 rule() 

631 

632 def run_fastcap_extraction(self, 

633 args: argparse.Namespace, 

634 pex_context: KLayoutExtractionContext, 

635 lst_file: str): 

636 rule('FastCap2 Execution') 

637 

638 log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Output.txt") 

639 raw_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Result_Matrix_Raw.csv") 

640 avg_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Result_Matrix_Avg.csv") 

641 expanded_netlist_path = os.path.join(args.output_dir_path, 

642 f"{args.effective_cell_name}_FastCap2_Expanded_Netlist.cir") 

643 reduced_netlist_path = os.path.join(args.output_dir_path, 

644 f"{args.effective_cell_name}_FastCap2_Reduced_Netlist.cir") 

645 

646 run_fastcap(exe_path=args.fastcap_exe_path, 

647 lst_file_path=lst_file, 

648 log_path=log_path) 

649 

650 cap_matrix = fastcap_parse_capacitance_matrix(log_path) 

651 cap_matrix.write_csv(raw_csv_path) 

652 

653 cap_matrix = cap_matrix.averaged_off_diagonals() 

654 cap_matrix.write_csv(avg_csv_path) 

655 

656 netlist_expander = NetlistExpander() 

657 expanded_netlist = netlist_expander.expand( 

658 extracted_netlist=pex_context.lvsdb.netlist(), 

659 top_cell_name=pex_context.annotated_top_cell.name, 

660 cap_matrix=cap_matrix, 

661 blackbox_devices=args.blackbox_devices 

662 ) 

663 

664 spice_writer = kdb.NetlistSpiceWriter() 

665 spice_writer.use_net_names = True 

666 spice_writer.with_comments = False 

667 expanded_netlist.write(expanded_netlist_path, spice_writer) 

668 info(f"Wrote expanded netlist to: {expanded_netlist_path}") 

669 

670 netlist_reducer = NetlistReducer() 

671 reduced_netlist = netlist_reducer.reduce(netlist=expanded_netlist, 

672 top_cell_name=pex_context.annotated_top_cell.name) 

673 reduced_netlist.write(reduced_netlist_path, spice_writer) 

674 info(f"Wrote reduced netlist to: {reduced_netlist_path}") 

675 

676 def run_kpex_2_5d_engine(self, 

677 args: argparse.Namespace, 

678 pex_context: KLayoutExtractionContext, 

679 tech_info: TechInfo, 

680 report_path: str, 

681 netlist_csv_path: Optional[str], 

682 expanded_netlist_path: Optional[str]): 

683 # TODO: make this separatly configurable 

684 # for now we use 0 

685 args.rcx25d_delaunay_amax = 0 

686 args.rcx25d_delaunay_b = 0.5 

687 

688 extractor = RCX25Extractor(pex_context=pex_context, 

689 pex_mode=args.pex_mode, 

690 delaunay_amax=args.rcx25d_delaunay_amax, 

691 delaunay_b=args.rcx25d_delaunay_b, 

692 scale_ratio_to_fit_halo=args.scale_ratio_to_fit_halo, 

693 tech_info=tech_info, 

694 report_path=report_path) 

695 extraction_results = extractor.extract() 

696 

697 if netlist_csv_path is not None: 

698 # TODO: merge this with klayout_pex/klayout/netlist_csv.py 

699 

700 with open(netlist_csv_path, 'w', encoding='utf-8') as f: 

701 summary = extraction_results.summarize() 

702 

703 f.write('Device;Net1;Net2;Capacitance [fF];Resistance [Ω]\n') 

704 for idx, (key, cap_value) in enumerate(sorted(summary.capacitances.items())): 

705 f.write(f"C{idx + 1};{key.net1};{key.net2};{round(cap_value, 3)};\n") 

706 for idx, (key, res_value) in enumerate(sorted(summary.resistances.items())): 

707 f.write(f"R{idx + 1};{key.net1};{key.net2};;{round(res_value, 3)}\n") 

708 

709 rule('kpex/2.5D extracted netlist (CSV format)') 

710 with open(netlist_csv_path, 'r') as f: 

711 for line in f.readlines(): 

712 subproc(line[:-1]) # abusing subproc, simply want verbatim 

713 

714 rule('Extracted netlist CSV') 

715 subproc(f"{netlist_csv_path}") 

716 

717 if expanded_netlist_path is not None: 

718 rule('kpex/2.5D extracted netlist (SPICE format)') 

719 netlist_expander = RCX25NetlistExpander() 

720 expanded_netlist = netlist_expander.expand( 

721 extracted_netlist=pex_context.lvsdb.netlist(), 

722 top_cell_name=pex_context.annotated_top_cell.name, 

723 extraction_results=extraction_results, 

724 blackbox_devices=args.blackbox_devices 

725 ) 

726 

727 spice_writer = kdb.NetlistSpiceWriter() 

728 spice_writer.use_net_names = True 

729 spice_writer.with_comments = False 

730 expanded_netlist.write(expanded_netlist_path, spice_writer) 

731 subproc(f"Wrote expanded netlist to: {expanded_netlist_path}") 

732 

733 # NOTE: there was a KLayout bug that some of the categories were lost, 

734 # so that the marker browser could not load the report file 

735 try: 

736 report = rdb.ReportDatabase('') 

737 report.load(report_path) # try loading rdb 

738 except Exception as e: 

739 rule("Repair broken marker DB") 

740 warning(f"Detected KLayout bug: RDB can't be loaded due to exception {e}") 

741 repair_rdb(report_path) 

742 

743 return extraction_results 

744 

745 def setup_logging(self, args: argparse.Namespace): 

746 def register_log_file_handler(log_path: str, 

747 formatter: Optional[logging.Formatter]) -> logging.Handler: 

748 handler = logging.FileHandler(log_path) 

749 handler.setLevel(LogLevel.SUBPROCESS) 

750 if formatter: 

751 handler.setFormatter(formatter) 

752 register_additional_handler(handler) 

753 return handler 

754 

755 def reregister_log_file_handler(handler: logging.Handler, 

756 log_path: str, 

757 formatter: Optional[logging.Formatter]): 

758 deregister_additional_handler(handler) 

759 handler.flush() 

760 handler.close() 

761 os.makedirs(args.output_dir_path, exist_ok=True) 

762 new_path = os.path.join(args.output_dir_path, os.path.basename(log_path)) 

763 if os.path.exists(new_path): 

764 ctime = os.path.getctime(new_path) 

765 dt = datetime.fromtimestamp(ctime) 

766 timestamp = dt.strftime('%Y-%m-%d_%H-%M-%S') 

767 backup_path = f"{new_path[:-4]}_{timestamp}.bak.log" 

768 shutil.move(new_path, backup_path) 

769 log_path = shutil.move(log_path, new_path) 

770 register_log_file_handler(log_path, formatter) 

771 

772 # setup preliminary logger 

773 cli_log_path_plain = os.path.join(args.output_dir_base_path, f"kpex_plain.log") 

774 cli_log_path_formatted = os.path.join(args.output_dir_base_path, f"kpex.log") 

775 formatter = logging.Formatter('[%(asctime)s] [%(levelname)s] %(message)s') 

776 file_handler_plain = register_log_file_handler(cli_log_path_plain, None) 

777 file_handler_formatted = register_log_file_handler(cli_log_path_formatted, formatter) 

778 try: 

779 self.validate_args(args) 

780 except ArgumentValidationError: 

781 if hasattr(args, 'output_dir_path'): 

782 reregister_log_file_handler(file_handler_plain, cli_log_path_plain, None) 

783 reregister_log_file_handler(file_handler_formatted, cli_log_path_formatted, formatter) 

784 sys.exit(1) 

785 reregister_log_file_handler(file_handler_plain, cli_log_path_plain, None) 

786 reregister_log_file_handler(file_handler_formatted, cli_log_path_formatted, formatter) 

787 

788 set_log_level(args.log_level) 

789 

790 @staticmethod 

791 def modification_date(filename: str) -> datetime: 

792 t = os.path.getmtime(filename) 

793 return datetime.fromtimestamp(t) 

794 

795 def create_lvsdb(self, args: argparse.Namespace) -> kdb.LayoutVsSchematic: 

796 lvsdb = kdb.LayoutVsSchematic() 

797 

798 match args.input_mode: 

799 case InputMode.LVSDB: 

800 lvsdb.read(args.lvsdb_path) 

801 case InputMode.GDS: 

802 lvs_log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_lvs.log") 

803 lvsdb_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}.lvsdb.gz") 

804 lvsdb_cache_path = os.path.join(args.cache_dir_path, args.pdk, 

805 os.path.splitroot(os.path.abspath(args.gds_path))[-1], 

806 f"{args.effective_cell_name}.lvsdb.gz") 

807 

808 lvs_needed = True 

809 

810 if args.cache_lvs: 

811 if not os.path.exists(lvsdb_cache_path): 

812 info(f"Cache miss: extracted LVSDB does not exist") 

813 subproc(lvsdb_cache_path) 

814 elif self.modification_date(lvsdb_cache_path) <= self.modification_date(args.gds_path): 

815 info(f"Cache miss: extracted LVSDB is older than the input GDS") 

816 subproc(lvsdb_cache_path) 

817 else: 

818 warning(f"Cache hit: Reusing cached LVSDB") 

819 subproc(lvsdb_cache_path) 

820 lvs_needed = False 

821 

822 if lvs_needed: 

823 lvs_runner = LVSRunner() 

824 lvs_runner.run_klayout_lvs(exe_path=args.klayout_exe_path, 

825 lvs_script=args.lvs_script_path, 

826 gds_path=args.effective_gds_path, 

827 schematic_path=args.effective_schematic_path, 

828 log_path=lvs_log_path, 

829 lvsdb_path=lvsdb_path, 

830 verbose=args.klayout_lvs_verbose) 

831 if args.cache_lvs: 

832 cache_dir_path = os.path.dirname(lvsdb_cache_path) 

833 if not os.path.exists(cache_dir_path): 

834 os.makedirs(cache_dir_path, exist_ok=True) 

835 shutil.copy(lvsdb_path, lvsdb_cache_path) 

836 

837 lvsdb.read(lvsdb_path) 

838 return lvsdb 

839 

840 def main(self, argv: List[str]): 

841 if '-v' not in argv and \ 

842 '--version' not in argv and \ 

843 '-h' not in argv and \ 

844 '--help' not in argv: 

845 rule('Command line arguments') 

846 subproc(' '.join(map(shlex.quote, sys.argv))) 

847 

848 env = Env.from_os_environ() 

849 args = self.parse_args(arg_list=argv[1:], env=env) 

850 

851 os.makedirs(args.output_dir_base_path, exist_ok=True) 

852 self.setup_logging(args) 

853 

854 tech_info = TechInfo.from_json(args.tech_pbjson_path, 

855 dielectric_filter=args.dielectric_filter) 

856 

857 if args.halo is not None: 

858 tech_info.tech.process_parasitics.side_halo = args.halo 

859 

860 if args.run_magic: 

861 rule('MAGIC') 

862 self.run_magic_extraction(args) 

863 

864 # no need to run LVS etc if only running magic engine 

865 if not (args.run_fastcap or args.run_fastercap or args.run_2_5D): 

866 return 

867 

868 rule('Prepare LVSDB') 

869 lvsdb = self.create_lvsdb(args) 

870 

871 pex_context = KLayoutExtractionContext.prepare_extraction(top_cell=args.effective_cell_name, 

872 lvsdb=lvsdb, 

873 tech=tech_info, 

874 blackbox_devices=args.blackbox_devices) 

875 rule('Non-empty layers in LVS database') 

876 for gds_pair, layer_info in pex_context.extracted_layers.items(): 

877 names = [l.lvs_layer_name for l in layer_info.source_layers] 

878 info(f"{gds_pair} -> ({' '.join(names)})") 

879 

880 gds_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_l2n_extracted.oas") 

881 pex_context.annotated_layout.write(gds_path) 

882 

883 gds_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_l2n_internal.oas") 

884 pex_context.lvsdb.internal_layout().write(gds_path) 

885 

886 def dump_layers(cell: str, 

887 layers: List[KLayoutExtractedLayerInfo], 

888 layout_dump_path: str): 

889 layout = kdb.Layout() 

890 layout.dbu = lvsdb.internal_layout().dbu 

891 

892 top_cell = layout.create_cell(cell) 

893 for ulyr in layers: 

894 li = kdb.LayerInfo(*ulyr.gds_pair) 

895 li.name = ulyr.lvs_layer_name 

896 layer = layout.insert_layer(li) 

897 layout.insert(top_cell.cell_index(), layer, ulyr.region.dup()) 

898 

899 layout.write(layout_dump_path) 

900 

901 if len(pex_context.unnamed_layers) >= 1: 

902 layout_dump_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_unnamed_LVS_layers.gds.gz") 

903 dump_layers(cell=args.effective_cell_name, 

904 layers=pex_context.unnamed_layers, 

905 layout_dump_path=layout_dump_path) 

906 

907 if len(pex_context.extracted_layers) >= 1: 

908 layout_dump_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_nonempty_LVS_layers.gds.gz") 

909 nonempty_layers = [l \ 

910 for layers in pex_context.extracted_layers.values() \ 

911 for l in layers.source_layers] 

912 dump_layers(cell=args.effective_cell_name, 

913 layers=nonempty_layers, 

914 layout_dump_path=layout_dump_path) 

915 else: 

916 error("No extracted layers found") 

917 sys.exit(1) 

918 

919 if args.run_fastcap or args.run_fastercap: 

920 lst_file = self.build_fastercap_input(args=args, 

921 pex_context=pex_context, 

922 tech_info=tech_info) 

923 if args.run_fastercap: 

924 self.run_fastercap_extraction(args=args, 

925 pex_context=pex_context, 

926 lst_file=lst_file) 

927 if args.run_fastcap: 

928 self.run_fastcap_extraction(args=args, 

929 pex_context=pex_context, 

930 lst_file=lst_file) 

931 

932 if args.run_2_5D: 

933 rule("kpex/2.5D PEX Engine") 

934 report_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_k25d_pex_report.rdb.gz") 

935 netlist_csv_path = os.path.abspath(os.path.join(args.output_dir_path, 

936 f"{args.effective_cell_name}_k25d_pex_netlist.csv")) 

937 netlist_spice_path = os.path.abspath(os.path.join(args.output_dir_path, 

938 f"{args.effective_cell_name}_k25d_pex_netlist.spice")) 

939 

940 self._rcx25_extraction_results = self.run_kpex_2_5d_engine( # NOTE: store for test case 

941 args=args, 

942 pex_context=pex_context, 

943 tech_info=tech_info, 

944 report_path=report_path, 

945 netlist_csv_path=netlist_csv_path, 

946 expanded_netlist_path=netlist_spice_path 

947 ) 

948 

949 self._rcx25_extracted_csv_path = netlist_csv_path 

950 

951 @property 

952 def rcx25_extraction_results(self) -> ExtractionResults: 

953 if not hasattr(self, '_rcx25_extraction_results'): 

954 raise Exception('rcx25_extraction_results is not initialized, was run_kpex_2_5d_engine called?') 

955 return self._rcx25_extraction_results 

956 

957 @property 

958 def rcx25_extracted_csv_path(self) -> str: 

959 if not hasattr(self, '_rcx25_extracted_csv_path'): 

960 raise Exception('rcx25_extracted_csv_path is not initialized, was run_kpex_2_5d_engine called?') 

961 return self._rcx25_extracted_csv_path 

962 

963 @property 

964 def fastercap_extracted_csv_path(self) -> str: 

965 if not hasattr(self, '_fastercap_extracted_csv_path'): 

966 raise Exception('fastercap_extracted_csv_path is not initialized, was run_fastercap_extraction called?') 

967 return self._fastercap_extracted_csv_path 

968 

969 

970if __name__ == "__main__": 

971 cli = KpexCLI() 

972 cli.main(sys.argv)