Coverage for klayout_pex/kpex_cli.py: 73%

519 statements  

« prev     ^ index     » next       coverage.py v7.8.0, created at 2025-05-12 13:45 +0000

1#! /usr/bin/env python3 

2# 

3# -------------------------------------------------------------------------------- 

4# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

5# Johannes Kepler University, Institute for Integrated Circuits. 

6# 

7# This file is part of KPEX  

8# (see https://github.com/martinjankoehler/klayout-pex). 

9# 

10# This program is free software: you can redistribute it and/or modify 

11# it under the terms of the GNU General Public License as published by 

12# the Free Software Foundation, either version 3 of the License, or 

13# (at your option) any later version. 

14# 

15# This program is distributed in the hope that it will be useful, 

16# but WITHOUT ANY WARRANTY; without even the implied warranty of 

17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

18# GNU General Public License for more details. 

19# 

20# You should have received a copy of the GNU General Public License 

21# along with this program. If not, see <http://www.gnu.org/licenses/>. 

22# SPDX-License-Identifier: GPL-3.0-or-later 

23# -------------------------------------------------------------------------------- 

24# 

25 

26import argparse 

27from datetime import datetime 

28from enum import StrEnum 

29from functools import cached_property 

30import logging 

31import os 

32import os.path 

33from pathlib import Path 

34import rich.console 

35import rich.markdown 

36import rich.text 

37from rich_argparse import RichHelpFormatter 

38import shlex 

39import shutil 

40import sys 

41from typing import * 

42 

43import klayout.db as kdb 

44import klayout.rdb as rdb 

45 

46from .env import EnvVar, Env 

47from .fastercap.fastercap_input_builder import FasterCapInputBuilder 

48from .fastercap.fastercap_model_generator import FasterCapModelGenerator 

49from .fastercap.fastercap_runner import run_fastercap, fastercap_parse_capacitance_matrix 

50from .fastcap.fastcap_runner import run_fastcap, fastcap_parse_capacitance_matrix 

51from .klayout.lvs_runner import LVSRunner 

52from .klayout.lvsdb_extractor import KLayoutExtractionContext, KLayoutExtractedLayerInfo 

53from .klayout.netlist_expander import NetlistExpander 

54from .klayout.netlist_csv import NetlistCSVWriter 

55from .klayout.netlist_reducer import NetlistReducer 

56from .klayout.repair_rdb import repair_rdb 

57from .log import ( 

58 LogLevel, 

59 set_log_level, 

60 register_additional_handler, 

61 deregister_additional_handler, 

62 # console, 

63 # debug, 

64 info, 

65 warning, 

66 subproc, 

67 error, 

68 rule 

69) 

70from .magic.magic_ext_file_parser import parse_magic_pex_run 

71from .magic.magic_runner import ( 

72 MagicPEXMode, 

73 MagicShortMode, 

74 MagicMergeMode, 

75 run_magic, 

76 prepare_magic_script, 

77) 

78from .magic.magic_log_analyzer import MagicLogAnalyzer 

79from .pdk_config import PDKConfig 

80from .rcx25.extractor import RCX25Extractor, ExtractionResults 

81from .rcx25.netlist_expander import RCX25NetlistExpander 

82from .rcx25.pex_mode import PEXMode 

83from .tech_info import TechInfo 

84from .util.multiple_choice import MultipleChoicePattern 

85from .util.argparse_helpers import render_enum_help, true_or_false 

86from .version import __version__ 

87 

88 

89# ------------------------------------------------------------------------------------ 

90 

91PROGRAM_NAME = "kpex" 

92 

93 

94class ArgumentValidationError(Exception): 

95 pass 

96 

97 

98class InputMode(StrEnum): 

99 LVSDB = "lvsdb" 

100 GDS = "gds" 

101 

102 

103# TODO: this should be externally configurable 

104class PDK(StrEnum): 

105 IHP_SG13G2 = 'ihp_sg13g2' 

106 SKY130A = 'sky130A' 

107 

108 @cached_property 

109 def config(self) -> PDKConfig: 

110 # NOTE: installation paths of resources in the distribution wheel differs from source repo 

111 base_dir = os.path.dirname(os.path.realpath(__file__)) 

112 

113 # NOTE: .git can be dir (standalone clone), or file (in case of submodule) 

114 if os.path.exists(os.path.join(base_dir, '..', '.git')): # in source repo 

115 base_dir = os.path.dirname(base_dir) 

116 tech_pb_json_dir = os.path.join(base_dir, 'klayout_pex_protobuf') 

117 else: # site-packages/klayout_pex -> site-packages/klayout_pex_protobuf 

118 tech_pb_json_dir = os.path.join(os.path.dirname(base_dir), 'klayout_pex_protobuf') 

119 

120 match self: 

121 case PDK.IHP_SG13G2: 

122 return PDKConfig( 

123 name=self, 

124 pex_lvs_script_path=os.path.join(base_dir, 'pdk', self, 'libs.tech', 'kpex', 'sg13g2.lvs'), 

125 tech_pb_json_path=os.path.join(tech_pb_json_dir, f"{self}_tech.pb.json") 

126 ) 

127 case PDK.SKY130A: 

128 return PDKConfig( 

129 name=self, 

130 pex_lvs_script_path=os.path.join(base_dir, 'pdk', self, 'libs.tech', 'kpex', 'sky130.lvs'), 

131 tech_pb_json_path=os.path.join(tech_pb_json_dir, f"{self}_tech.pb.json") 

132 ) 

133 

134 

135 

136class KpexCLI: 

137 @staticmethod 

138 def parse_args(arg_list: List[str], 

139 env: Env) -> argparse.Namespace: 

140 # epilog = f"See '{PROGRAM_NAME} <subcommand> -h' for help on subcommand" 

141 epilog = EnvVar.help_epilog_table() 

142 epilog_md = rich.console.Group( 

143 rich.text.Text('Environmental variables:', style='argparse.groups'), 

144 rich.markdown.Markdown(epilog, style='argparse.text') 

145 ) 

146 main_parser = argparse.ArgumentParser(description=f"{PROGRAM_NAME}: " 

147 f"KLayout-integrated Parasitic Extraction Tool", 

148 epilog=epilog_md, 

149 add_help=False, 

150 formatter_class=RichHelpFormatter) 

151 

152 group_special = main_parser.add_argument_group("Special options") 

153 group_special.add_argument("--help", "-h", action='help', help="show this help message and exit") 

154 group_special.add_argument("--version", "-v", action='version', version=f'{PROGRAM_NAME} {__version__}') 

155 group_special.add_argument("--log_level", dest='log_level', default='subprocess', 

156 help=render_enum_help(topic='log_level', enum_cls=LogLevel)) 

157 group_special.add_argument("--threads", dest='num_threads', type=int, 

158 default=os.cpu_count() * 4, 

159 help="number of threads (e.g. for FasterCap) (default is %(default)s)") 

160 

161 group_pex = main_parser.add_argument_group("Parasitic Extraction Setup") 

162 group_pex.add_argument("--pdk", dest="pdk", required=True, 

163 type=PDK, choices=list(PDK), 

164 help=render_enum_help(topic='pdk', enum_cls=PDK)) 

165 

166 group_pex.add_argument("--out_dir", "-o", dest="output_dir_base_path", default="output", 

167 help="Output directory path (default is '%(default)s')") 

168 

169 group_pex_input = main_parser.add_argument_group("Parasitic Extraction Input", 

170 description="Either LVS is run, or an existing LVSDB is used") 

171 group_pex_input.add_argument("--gds", "-g", dest="gds_path", help="GDS path (for LVS)") 

172 group_pex_input.add_argument("--schematic", "-s", dest="schematic_path", 

173 help="Schematic SPICE netlist path (for LVS). " 

174 "If none given, a dummy schematic will be created") 

175 group_pex_input.add_argument("--lvsdb", "-l", dest="lvsdb_path", help="KLayout LVSDB path (bypass LVS)") 

176 group_pex_input.add_argument("--cell", "-c", dest="cell_name", default=None, 

177 help="Cell (default is the top cell)") 

178 

179 group_pex_input.add_argument("--cache-lvs", dest="cache_lvs", 

180 type=true_or_false, default=True, 

181 help="Used cached LVSDB (for given input GDS) (default is %(default)s)") 

182 group_pex_input.add_argument("--cache-dir", dest="cache_dir_path", default=None, 

183 help="Path for cached LVSDB (default is .kpex_cache within --out_dir)") 

184 group_pex_input.add_argument("--lvs-verbose", dest="klayout_lvs_verbose", 

185 type=true_or_false, default=False, 

186 help="Verbose KLayout LVS output (default is %(default)s)") 

187 

188 group_pex_options = main_parser.add_argument_group("Parasitic Extraction Options") 

189 group_pex_options.add_argument("--blackbox", dest="blackbox_devices", 

190 type=true_or_false, default=False, # TODO: in the future this should be True by default 

191 help="Blackbox devices like MIM/MOM caps, as they are handled by SPICE models " 

192 "(default is %(default)s for testing now)") 

193 group_pex_options.add_argument("--fastercap", dest="run_fastercap", 

194 action='store_true', default=False, 

195 help="Run FasterCap engine (default is %(default)s)") 

196 group_pex_options.add_argument("--fastcap", dest="run_fastcap", 

197 action='store_true', default=False, 

198 help="Run FastCap2 engine (default is %(default)s)") 

199 group_pex_options.add_argument("--magic", dest="run_magic", 

200 action='store_true', default=False, 

201 help="Run MAGIC engine (default is %(default)s)") 

202 group_pex_options.add_argument("--2.5D", dest="run_2_5D", 

203 action='store_true', default=False, 

204 help="Run 2.5D analytical engine (default is %(default)s)") 

205 

206 group_fastercap = main_parser.add_argument_group("FasterCap options") 

207 group_fastercap.add_argument("--k_void", "-k", dest="k_void", 

208 type=float, default=3.9, 

209 help="Dielectric constant of void (default is %(default)s)") 

210 

211 # TODO: reflect that these are also now used by KPEX/2.5D engine! 

212 group_fastercap.add_argument("--delaunay_amax", "-a", dest="delaunay_amax", 

213 type=float, default=50, 

214 help="Delaunay triangulation maximum area (default is %(default)s)") 

215 group_fastercap.add_argument("--delaunay_b", "-b", dest="delaunay_b", 

216 type=float, default=0.5, 

217 help="Delaunay triangulation b (default is %(default)s)") 

218 group_fastercap.add_argument("--geo_check", dest="geometry_check", 

219 type=true_or_false, default=False, 

220 help=f"Validate geometries before passing to FasterCap " 

221 f"(default is False)") 

222 group_fastercap.add_argument("--diel", dest="dielectric_filter", 

223 type=str, default="all", 

224 help=f"Comma separated list of dielectric filter patterns. " 

225 f"Allowed patterns are: (none, all, -dielname1, +dielname2) " 

226 f"(default is %(default)s)") 

227 

228 group_fastercap.add_argument("--tolerance", dest="fastercap_tolerance", 

229 type=float, default=0.05, 

230 help="FasterCap -aX error tolerance (default is %(default)s)") 

231 group_fastercap.add_argument("--d_coeff", dest="fastercap_d_coeff", 

232 type=float, default=0.5, 

233 help=f"FasterCap -d direct potential interaction coefficient to mesh refinement " 

234 f"(default is %(default)s)") 

235 group_fastercap.add_argument("--mesh", dest="fastercap_mesh_refinement_value", 

236 type=float, default=0.5, 

237 help="FasterCap -m Mesh relative refinement value (default is %(default)s)") 

238 group_fastercap.add_argument("--ooc", dest="fastercap_ooc_condition", 

239 type=float, default=2, 

240 help="FasterCap -f out-of-core free memory to link memory condition " 

241 "(0 = don't go OOC, default is %(default)s)") 

242 group_fastercap.add_argument("--auto_precond", dest="fastercap_auto_preconditioner", 

243 type=true_or_false, default=True, 

244 help=f"FasterCap -ap Automatic preconditioner usage (default is %(default)s)") 

245 group_fastercap.add_argument("--galerkin", dest="fastercap_galerkin_scheme", 

246 action='store_true', default=False, 

247 help=f"FasterCap -g Use Galerkin scheme (default is %(default)s)") 

248 group_fastercap.add_argument("--jacobi", dest="fastercap_jacobi_preconditioner", 

249 action='store_true', default=False, 

250 help="FasterCap -pj Use Jacobi preconditioner (default is %(default)s)") 

251 

252 PDKPATH = os.environ.get('PDKPATH', None) 

253 default_magicrc_path = \ 

254 None if PDKPATH is None \ 

255 else os.path.abspath(f"{PDKPATH}/libs.tech/magic/{os.environ['PDK']}.magicrc") 

256 group_magic = main_parser.add_argument_group("MAGIC options") 

257 group_magic.add_argument('--magicrc', dest='magicrc_path', default=default_magicrc_path, 

258 help=f"Path to magicrc configuration file (default is '%(default)s')") 

259 group_magic.add_argument("--magic_mode", dest='magic_pex_mode', 

260 default=MagicPEXMode.DEFAULT, type=MagicPEXMode, choices=list(MagicPEXMode), 

261 help=render_enum_help(topic='magic_mode', enum_cls=MagicPEXMode)) 

262 group_magic.add_argument("--magic_cthresh", dest="magic_cthresh", 

263 type=float, default=0.01, 

264 help="Threshold (in fF) for ignored parasitic capacitances (default is %(default)s). " 

265 "(MAGIC command: ext2spice cthresh <value>)") 

266 group_magic.add_argument("--magic_rthresh", dest="magic_rthresh", 

267 type=int, default=100, 

268 help="Threshold (in Ω) for ignored parasitic resistances (default is %(default)s). " 

269 "(MAGIC command: ext2spice rthresh <value>)") 

270 group_magic.add_argument("--magic_tolerance", dest="magic_tolerance", 

271 type=float, default=1, 

272 help="Set ratio between resistor and device tolerance (default is %(default)s). " 

273 "(MAGIC command: extresist tolerance <value>)") 

274 group_magic.add_argument("--magic_halo", dest="magic_halo", 

275 type=float, default=None, 

276 help="Custom sidewall halo distance (in µm) " 

277 "(MAGIC command: extract halo <value>) (default is no custom halo)") 

278 group_magic.add_argument("--magic_short", dest='magic_short_mode', 

279 default=MagicShortMode.DEFAULT, type=MagicShortMode, choices=list(MagicShortMode), 

280 help=render_enum_help(topic='magic_short', enum_cls=MagicShortMode)) 

281 group_magic.add_argument("--magic_merge", dest='magic_merge_mode', 

282 default=MagicMergeMode.DEFAULT, type=MagicMergeMode, choices=list(MagicMergeMode), 

283 help=render_enum_help(topic='magic_merge', enum_cls=MagicMergeMode)) 

284 

285 group_25d = main_parser.add_argument_group("2.5D options") 

286 group_25d.add_argument("--mode", dest='pex_mode', 

287 default=PEXMode.DEFAULT, type=PEXMode, choices=list(PEXMode), 

288 help=render_enum_help(topic='mode', enum_cls=PEXMode)) 

289 group_25d.add_argument("--halo", dest="halo", 

290 type=float, default=None, 

291 help="Custom sidewall halo distance (in µm) to override tech info " 

292 "(default is no custom halo)") 

293 group_25d.add_argument("--scale", dest="scale_ratio_to_fit_halo", 

294 type=true_or_false, default=True, 

295 help=f"Scale fringe ratios, so that halo distance is 100%% (default is %(default)s)") 

296 

297 if arg_list is None: 

298 arg_list = sys.argv[1:] 

299 args = main_parser.parse_args(arg_list) 

300 

301 # environmental variables and their defaults 

302 args.fastcap_exe_path = env[EnvVar.FASTCAP_EXE] 

303 args.fastercap_exe_path = env[EnvVar.FASTERCAP_EXE] 

304 args.klayout_exe_path = env[EnvVar.KLAYOUT_EXE] 

305 args.magic_exe_path = env[EnvVar.MAGIC_EXE] 

306 

307 return args 

308 

309 @staticmethod 

310 def validate_args(args: argparse.Namespace): 

311 found_errors = False 

312 

313 pdk_config: PDKConfig = args.pdk.config 

314 args.tech_pbjson_path = pdk_config.tech_pb_json_path 

315 args.lvs_script_path = pdk_config.pex_lvs_script_path 

316 

317 def input_file_stem(path: str): 

318 # could be *.gds, or *.gds.gz, so remove all extensions 

319 return os.path.basename(path).split(sep='.')[0] 

320 

321 if not os.path.isfile(args.klayout_exe_path): 

322 path = shutil.which(args.klayout_exe_path) 

323 if not path: 

324 error(f"Can't locate KLayout executable at {args.klayout_exe_path}") 

325 found_errors = True 

326 

327 if not os.path.isfile(args.tech_pbjson_path): 

328 error(f"Can't read technology file at path {args.tech_pbjson_path}") 

329 found_errors = True 

330 

331 if not os.path.isfile(args.lvs_script_path): 

332 error(f"Can't locate LVS script path at {args.lvs_script_path}") 

333 found_errors = True 

334 

335 rule('Input Layout') 

336 

337 # input mode: LVS or existing LVSDB? 

338 if args.gds_path: 

339 info(f"GDS input file passed, running in LVS mode") 

340 args.input_mode = InputMode.GDS 

341 if not os.path.isfile(args.gds_path): 

342 error(f"Can't read GDS file (LVS input) at path {args.gds_path}") 

343 found_errors = True 

344 else: 

345 args.layout = kdb.Layout() 

346 args.layout.read(args.gds_path) 

347 

348 top_cells = args.layout.top_cells() 

349 

350 if args.cell_name: # explicit user-specified cell name 

351 args.effective_cell_name = args.cell_name 

352 

353 found_cell: Optional[kdb.Cell] = None 

354 for cell in args.layout.cells('*'): 

355 if cell.name == args.effective_cell_name: 

356 found_cell = cell 

357 break 

358 if not found_cell: 

359 error(f"Could not find cell {args.cell_name} in GDS {args.gds_path}") 

360 found_errors = True 

361 

362 is_only_top_cell = len(top_cells) == 1 and top_cells[0].name == args.cell_name 

363 if is_only_top_cell: 

364 info(f"Found cell {args.cell_name} in GDS {args.gds_path} (only top cell)") 

365 else: # there are other cells => extract the top cell to a tmp layout 

366 run_dir_id = f"{input_file_stem(args.gds_path)}__{args.effective_cell_name}" 

367 args.output_dir_path = os.path.join(args.output_dir_base_path, run_dir_id) 

368 os.makedirs(args.output_dir_path, exist_ok=True) 

369 args.effective_gds_path = os.path.join(args.output_dir_path, 

370 f"{args.cell_name}_exported.gds.gz") 

371 info(f"Found cell {args.cell_name} in GDS {args.gds_path}, " 

372 f"but it is not the only top cell, " 

373 f"so layout is exported to: {args.effective_gds_path}") 

374 

375 found_cell.write(args.effective_gds_path) 

376 else: # find top cell 

377 if len(top_cells) == 1: 

378 args.effective_cell_name = top_cells[0].name 

379 info(f"No explicit top cell specified, using top cell '{args.effective_cell_name}'") 

380 else: 

381 args.effective_cell_name = 'TOP' 

382 error(f"Could not determine the default top cell in GDS {args.gds_path}, " 

383 f"there are multiple: {', '.join([c.name for c in top_cells])}. " 

384 f"Use --cell to specify the cell") 

385 found_errors = True 

386 

387 if not hasattr(args, 'effective_gds_path'): 

388 args.effective_gds_path = args.gds_path 

389 else: 

390 info(f"LVSDB input file passed, bypassing LVS") 

391 args.input_mode = InputMode.LVSDB 

392 if not hasattr(args, 'lvsdb_path'): 

393 error(f"LVSDB input path not specified (argument --lvsdb)") 

394 found_errors = True 

395 elif not os.path.isfile(args.lvsdb_path): 

396 error(f"Can't read KLayout LVSDB file at path {args.lvsdb_path}") 

397 found_errors = True 

398 else: 

399 lvsdb = kdb.LayoutVsSchematic() 

400 lvsdb.read(args.lvsdb_path) 

401 top_cell: kdb.Cell = lvsdb.internal_top_cell() 

402 args.effective_cell_name = top_cell.name 

403 

404 if hasattr(args, 'effective_cell_name'): 

405 run_dir_id: str 

406 match args.input_mode: 

407 case InputMode.GDS: 

408 run_dir_id = f"{input_file_stem(args.gds_path)}__{args.effective_cell_name}" 

409 case InputMode.LVSDB: 

410 run_dir_id = f"{input_file_stem(args.lvsdb_path)}__{args.effective_cell_name}" 

411 case _: 

412 raise NotImplementedError(f"Unknown input mode {args.input_mode}") 

413 

414 args.output_dir_path = os.path.join(args.output_dir_base_path, run_dir_id) 

415 os.makedirs(args.output_dir_path, exist_ok=True) 

416 if args.input_mode == InputMode.GDS: 

417 if args.schematic_path: 

418 args.effective_schematic_path = args.schematic_path 

419 if not os.path.isfile(args.schematic_path): 

420 error(f"Can't read schematic (LVS input) at path {args.schematic_path}") 

421 found_errors = True 

422 else: 

423 info(f"LVS input schematic not specified (argument --schematic), using dummy schematic") 

424 args.effective_schematic_path = os.path.join(args.output_dir_path, 

425 f"{args.effective_cell_name}_dummy_schematic.spice") 

426 with open(args.effective_schematic_path, 'w', encoding='utf-8') as f: 

427 f.writelines([ 

428 f".subckt {args.effective_cell_name} VDD VSS\n", 

429 '.ends\n', 

430 '.end\n' 

431 ]) 

432 

433 try: 

434 args.log_level = LogLevel[args.log_level.upper()] 

435 except KeyError: 

436 error(f"Requested log level {args.log_level.lower()} does not exist, " 

437 f"{render_enum_help(topic='log_level', enum_cls=LogLevel, print_default=False)}") 

438 found_errors = True 

439 

440 try: 

441 pattern_string: str = args.dielectric_filter 

442 args.dielectric_filter = MultipleChoicePattern(pattern=pattern_string) 

443 except ValueError as e: 

444 error("Failed to parse --diel arg", e) 

445 found_errors = True 

446 

447 # at least one engine must be activated 

448 

449 if not (args.run_magic or args.run_fastcap or args.run_fastercap or args.run_2_5D): 

450 error("No PEX engines activated") 

451 engine_help = """ 

452| Argument | Description | 

453| ------------ | ------------------------- | 

454| --fastercap | Run kpex/FasterCap engine | 

455| --2.5D | Run kpex/2.5D engine | 

456| --magic | Run MAGIC engine | 

457""" 

458 subproc(f"\n\nPlease activate one or more engines using the arguments:") 

459 rich.print(rich.markdown.Markdown(engine_help, style='argparse.text')) 

460 found_errors = True 

461 

462 if args.cache_dir_path is None: 

463 args.cache_dir_path = os.path.join(args.output_dir_base_path, '.kpex_cache') 

464 

465 if found_errors: 

466 raise ArgumentValidationError("Argument validation failed") 

467 

468 def build_fastercap_input(self, 

469 args: argparse.Namespace, 

470 pex_context: KLayoutExtractionContext, 

471 tech_info: TechInfo) -> str: 

472 rule('Process stackup') 

473 fastercap_input_builder = FasterCapInputBuilder(pex_context=pex_context, 

474 tech_info=tech_info, 

475 k_void=args.k_void, 

476 delaunay_amax=args.delaunay_amax, 

477 delaunay_b=args.delaunay_b) 

478 gen: FasterCapModelGenerator = fastercap_input_builder.build() 

479 

480 rule('FasterCap Input File Generation') 

481 faster_cap_input_dir_path = os.path.join(args.output_dir_path, 'FasterCap_Input_Files') 

482 os.makedirs(faster_cap_input_dir_path, exist_ok=True) 

483 

484 lst_file = gen.write_fastcap(output_dir_path=faster_cap_input_dir_path, prefix='FasterCap_Input_') 

485 

486 rule('STL File Generation') 

487 geometry_dir_path = os.path.join(args.output_dir_path, 'Geometries') 

488 os.makedirs(geometry_dir_path, exist_ok=True) 

489 gen.dump_stl(output_dir_path=geometry_dir_path, prefix='') 

490 

491 if args.geometry_check: 

492 rule('Geometry Validation') 

493 gen.check() 

494 

495 return lst_file 

496 

497 

498 def run_fastercap_extraction(self, 

499 args: argparse.Namespace, 

500 pex_context: KLayoutExtractionContext, 

501 lst_file: str): 

502 rule('FasterCap Execution') 

503 info(f"Configure number of OpenMP threads (environmental variable OMP_NUM_THREADS) as {args.num_threads}") 

504 os.environ['OMP_NUM_THREADS'] = f"{args.num_threads}" 

505 

506 log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Output.txt") 

507 raw_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Result_Matrix_Raw.csv") 

508 avg_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Result_Matrix_Avg.csv") 

509 expanded_netlist_path = os.path.join(args.output_dir_path, 

510 f"{args.effective_cell_name}_FasterCap_Expanded_Netlist.cir") 

511 expanded_netlist_csv_path = os.path.join(args.output_dir_path, 

512 f"{args.effective_cell_name}_FasterCap_Expanded_Netlist.csv") 

513 reduced_netlist_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Reduced_Netlist.cir") 

514 

515 run_fastercap(exe_path=args.fastercap_exe_path, 

516 lst_file_path=lst_file, 

517 log_path=log_path, 

518 tolerance=args.fastercap_tolerance, 

519 d_coeff=args.fastercap_d_coeff, 

520 mesh_refinement_value=args.fastercap_mesh_refinement_value, 

521 ooc_condition=args.fastercap_ooc_condition, 

522 auto_preconditioner=args.fastercap_auto_preconditioner, 

523 galerkin_scheme=args.fastercap_galerkin_scheme, 

524 jacobi_preconditioner=args.fastercap_jacobi_preconditioner) 

525 

526 cap_matrix = fastercap_parse_capacitance_matrix(log_path) 

527 cap_matrix.write_csv(raw_csv_path) 

528 

529 cap_matrix = cap_matrix.averaged_off_diagonals() 

530 cap_matrix.write_csv(avg_csv_path) 

531 

532 netlist_expander = NetlistExpander() 

533 expanded_netlist = netlist_expander.expand( 

534 extracted_netlist=pex_context.lvsdb.netlist(), 

535 top_cell_name=pex_context.annotated_top_cell.name, 

536 cap_matrix=cap_matrix, 

537 blackbox_devices=args.blackbox_devices 

538 ) 

539 

540 # create a nice CSV for reports, useful for spreadsheets 

541 netlist_csv_writer = NetlistCSVWriter() 

542 netlist_csv_writer.write_csv(netlist=expanded_netlist, 

543 top_cell_name=pex_context.annotated_top_cell.name, 

544 output_path=expanded_netlist_csv_path) 

545 

546 rule("Extended netlist (CSV format):") 

547 with open(expanded_netlist_csv_path, 'r') as f: 

548 for line in f.readlines(): 

549 subproc(line[:-1]) # abusing subproc, simply want verbatim 

550 rule() 

551 

552 info(f"Wrote expanded netlist CSV to: {expanded_netlist_csv_path}") 

553 

554 spice_writer = kdb.NetlistSpiceWriter() 

555 spice_writer.use_net_names = True 

556 spice_writer.with_comments = False 

557 expanded_netlist.write(expanded_netlist_path, spice_writer) 

558 info(f"Wrote expanded netlist to: {expanded_netlist_path}") 

559 

560 netlist_reducer = NetlistReducer() 

561 reduced_netlist = netlist_reducer.reduce(netlist=expanded_netlist, 

562 top_cell_name=pex_context.annotated_top_cell.name) 

563 reduced_netlist.write(reduced_netlist_path, spice_writer) 

564 info(f"Wrote reduced netlist to: {reduced_netlist_path}") 

565 

566 self._fastercap_extracted_csv_path = expanded_netlist_csv_path 

567 

568 def run_magic_extraction(self, 

569 args: argparse.Namespace): 

570 if args.input_mode != InputMode.GDS: 

571 error(f"MAGIC engine only works with GDS input mode" 

572 f" (currently {args.input_mode})") 

573 return 

574 

575 magic_run_dir = os.path.join(args.output_dir_path, f"magic_{args.magic_pex_mode}") 

576 magic_log_path = os.path.join(magic_run_dir, 

577 f"{args.effective_cell_name}_MAGIC_{args.magic_pex_mode}_Output.txt") 

578 magic_script_path = os.path.join(magic_run_dir, 

579 f"{args.effective_cell_name}_MAGIC_{args.magic_pex_mode}_Script.tcl") 

580 

581 output_netlist_path = os.path.join(magic_run_dir, f"{args.effective_cell_name}.pex.spice") 

582 report_db_path = os.path.join(magic_run_dir, f"{args.effective_cell_name}_MAGIC_report.rdb.gz") 

583 

584 os.makedirs(magic_run_dir, exist_ok=True) 

585 

586 prepare_magic_script(gds_path=args.effective_gds_path, 

587 cell_name=args.effective_cell_name, 

588 run_dir_path=magic_run_dir, 

589 script_path=magic_script_path, 

590 output_netlist_path=output_netlist_path, 

591 pex_mode=args.magic_pex_mode, 

592 c_threshold=args.magic_cthresh, 

593 r_threshold=args.magic_rthresh, 

594 tolerance=args.magic_tolerance, 

595 halo=args.magic_halo, 

596 short_mode=args.magic_short_mode, 

597 merge_mode=args.magic_merge_mode) 

598 

599 run_magic(exe_path=args.magic_exe_path, 

600 magicrc_path=args.magicrc_path, 

601 script_path=magic_script_path, 

602 log_path=magic_log_path) 

603 

604 magic_pex_run = parse_magic_pex_run(Path(magic_run_dir)) 

605 

606 layout = kdb.Layout() 

607 layout.read(args.effective_gds_path) 

608 

609 report = rdb.ReportDatabase('') 

610 magic_log_analyzer = MagicLogAnalyzer(magic_pex_run=magic_pex_run, 

611 report=report, 

612 dbu=layout.dbu) 

613 magic_log_analyzer.analyze() 

614 report.save(report_db_path) 

615 

616 rule("Paths") 

617 subproc(f"Report DB saved at: {report_db_path}") 

618 subproc(f"SPICE netlist saved at: {output_netlist_path}") 

619 

620 rule("MAGIC PEX SPICE netlist") 

621 with open(output_netlist_path, 'r') as f: 

622 subproc(f.read()) 

623 rule() 

624 

625 def run_fastcap_extraction(self, 

626 args: argparse.Namespace, 

627 pex_context: KLayoutExtractionContext, 

628 lst_file: str): 

629 rule('FastCap2 Execution') 

630 

631 log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Output.txt") 

632 raw_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Result_Matrix_Raw.csv") 

633 avg_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Result_Matrix_Avg.csv") 

634 expanded_netlist_path = os.path.join(args.output_dir_path, 

635 f"{args.effective_cell_name}_FastCap2_Expanded_Netlist.cir") 

636 reduced_netlist_path = os.path.join(args.output_dir_path, 

637 f"{args.effective_cell_name}_FastCap2_Reduced_Netlist.cir") 

638 

639 run_fastcap(exe_path=args.fastcap_exe_path, 

640 lst_file_path=lst_file, 

641 log_path=log_path) 

642 

643 cap_matrix = fastcap_parse_capacitance_matrix(log_path) 

644 cap_matrix.write_csv(raw_csv_path) 

645 

646 cap_matrix = cap_matrix.averaged_off_diagonals() 

647 cap_matrix.write_csv(avg_csv_path) 

648 

649 netlist_expander = NetlistExpander() 

650 expanded_netlist = netlist_expander.expand( 

651 extracted_netlist=pex_context.lvsdb.netlist(), 

652 top_cell_name=pex_context.annotated_top_cell.name, 

653 cap_matrix=cap_matrix, 

654 blackbox_devices=args.blackbox_devices 

655 ) 

656 

657 spice_writer = kdb.NetlistSpiceWriter() 

658 spice_writer.use_net_names = True 

659 spice_writer.with_comments = False 

660 expanded_netlist.write(expanded_netlist_path, spice_writer) 

661 info(f"Wrote expanded netlist to: {expanded_netlist_path}") 

662 

663 netlist_reducer = NetlistReducer() 

664 reduced_netlist = netlist_reducer.reduce(netlist=expanded_netlist, 

665 top_cell_name=pex_context.annotated_top_cell.name) 

666 reduced_netlist.write(reduced_netlist_path, spice_writer) 

667 info(f"Wrote reduced netlist to: {reduced_netlist_path}") 

668 

669 def run_kpex_2_5d_engine(self, 

670 args: argparse.Namespace, 

671 pex_context: KLayoutExtractionContext, 

672 tech_info: TechInfo, 

673 report_path: str, 

674 netlist_csv_path: Optional[str], 

675 expanded_netlist_path: Optional[str]): 

676 # TODO: make this separatly configurable 

677 # for now we use 0 

678 args.rcx25d_delaunay_amax = 0 

679 args.rcx25d_delaunay_b = 0.5 

680 

681 extractor = RCX25Extractor(pex_context=pex_context, 

682 pex_mode=args.pex_mode, 

683 delaunay_amax=args.rcx25d_delaunay_amax, 

684 delaunay_b=args.rcx25d_delaunay_b, 

685 scale_ratio_to_fit_halo=args.scale_ratio_to_fit_halo, 

686 tech_info=tech_info, 

687 report_path=report_path) 

688 extraction_results = extractor.extract() 

689 

690 if netlist_csv_path is not None: 

691 # TODO: merge this with klayout_pex/klayout/netlist_csv.py 

692 

693 with open(netlist_csv_path, 'w', encoding='utf-8') as f: 

694 summary = extraction_results.summarize() 

695 

696 f.write('Device;Net1;Net2;Capacitance [fF];Resistance [Ω]\n') 

697 for idx, (key, cap_value) in enumerate(sorted(summary.capacitances.items())): 

698 f.write(f"C{idx + 1};{key.net1};{key.net2};{round(cap_value, 3)};\n") 

699 for idx, (key, res_value) in enumerate(sorted(summary.resistances.items())): 

700 f.write(f"R{idx + 1};{key.net1};{key.net2};;{round(res_value, 3)}\n") 

701 

702 rule('kpex/2.5D extracted netlist (CSV format)') 

703 with open(netlist_csv_path, 'r') as f: 

704 for line in f.readlines(): 

705 subproc(line[:-1]) # abusing subproc, simply want verbatim 

706 

707 rule('Extracted netlist CSV') 

708 subproc(f"{netlist_csv_path}") 

709 

710 if expanded_netlist_path is not None: 

711 rule('kpex/2.5D extracted netlist (SPICE format)') 

712 netlist_expander = RCX25NetlistExpander() 

713 expanded_netlist = netlist_expander.expand( 

714 extracted_netlist=pex_context.lvsdb.netlist(), 

715 top_cell_name=pex_context.annotated_top_cell.name, 

716 extraction_results=extraction_results, 

717 blackbox_devices=args.blackbox_devices 

718 ) 

719 

720 spice_writer = kdb.NetlistSpiceWriter() 

721 spice_writer.use_net_names = True 

722 spice_writer.with_comments = False 

723 expanded_netlist.write(expanded_netlist_path, spice_writer) 

724 subproc(f"Wrote expanded netlist to: {expanded_netlist_path}") 

725 

726 # NOTE: there was a KLayout bug that some of the categories were lost, 

727 # so that the marker browser could not load the report file 

728 try: 

729 report = rdb.ReportDatabase('') 

730 report.load(report_path) # try loading rdb 

731 except Exception as e: 

732 rule("Repair broken marker DB") 

733 warning(f"Detected KLayout bug: RDB can't be loaded due to exception {e}") 

734 repair_rdb(report_path) 

735 

736 return extraction_results 

737 

738 def setup_logging(self, args: argparse.Namespace): 

739 def register_log_file_handler(log_path: str, 

740 formatter: Optional[logging.Formatter]) -> logging.Handler: 

741 handler = logging.FileHandler(log_path) 

742 handler.setLevel(LogLevel.SUBPROCESS) 

743 if formatter: 

744 handler.setFormatter(formatter) 

745 register_additional_handler(handler) 

746 return handler 

747 

748 def reregister_log_file_handler(handler: logging.Handler, 

749 log_path: str, 

750 formatter: Optional[logging.Formatter]): 

751 deregister_additional_handler(handler) 

752 handler.flush() 

753 handler.close() 

754 os.makedirs(args.output_dir_path, exist_ok=True) 

755 new_path = os.path.join(args.output_dir_path, os.path.basename(log_path)) 

756 if os.path.exists(new_path): 

757 ctime = os.path.getctime(new_path) 

758 dt = datetime.fromtimestamp(ctime) 

759 timestamp = dt.strftime('%Y-%m-%d_%H-%M-%S') 

760 backup_path = f"{new_path[:-4]}_{timestamp}.bak.log" 

761 shutil.move(new_path, backup_path) 

762 log_path = shutil.move(log_path, new_path) 

763 register_log_file_handler(log_path, formatter) 

764 

765 # setup preliminary logger 

766 cli_log_path_plain = os.path.join(args.output_dir_base_path, f"kpex_plain.log") 

767 cli_log_path_formatted = os.path.join(args.output_dir_base_path, f"kpex.log") 

768 formatter = logging.Formatter('[%(asctime)s] [%(levelname)s] %(message)s') 

769 file_handler_plain = register_log_file_handler(cli_log_path_plain, None) 

770 file_handler_formatted = register_log_file_handler(cli_log_path_formatted, formatter) 

771 try: 

772 self.validate_args(args) 

773 except ArgumentValidationError: 

774 if hasattr(args, 'output_dir_path'): 

775 reregister_log_file_handler(file_handler_plain, cli_log_path_plain, None) 

776 reregister_log_file_handler(file_handler_formatted, cli_log_path_formatted, formatter) 

777 sys.exit(1) 

778 reregister_log_file_handler(file_handler_plain, cli_log_path_plain, None) 

779 reregister_log_file_handler(file_handler_formatted, cli_log_path_formatted, formatter) 

780 

781 set_log_level(args.log_level) 

782 

783 @staticmethod 

784 def modification_date(filename: str) -> datetime: 

785 t = os.path.getmtime(filename) 

786 return datetime.fromtimestamp(t) 

787 

788 def create_lvsdb(self, args: argparse.Namespace) -> kdb.LayoutVsSchematic: 

789 lvsdb = kdb.LayoutVsSchematic() 

790 

791 match args.input_mode: 

792 case InputMode.LVSDB: 

793 lvsdb.read(args.lvsdb_path) 

794 case InputMode.GDS: 

795 lvs_log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_lvs.log") 

796 lvsdb_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}.lvsdb.gz") 

797 lvsdb_cache_path = os.path.join(args.cache_dir_path, args.pdk, 

798 os.path.splitroot(os.path.abspath(args.gds_path))[-1], 

799 f"{args.effective_cell_name}.lvsdb.gz") 

800 

801 lvs_needed = True 

802 

803 if args.cache_lvs: 

804 if not os.path.exists(lvsdb_cache_path): 

805 info(f"Cache miss: extracted LVSDB does not exist") 

806 subproc(lvsdb_cache_path) 

807 elif self.modification_date(lvsdb_cache_path) <= self.modification_date(args.gds_path): 

808 info(f"Cache miss: extracted LVSDB is older than the input GDS") 

809 subproc(lvsdb_cache_path) 

810 else: 

811 warning(f"Cache hit: Reusing cached LVSDB") 

812 subproc(lvsdb_cache_path) 

813 lvs_needed = False 

814 

815 if lvs_needed: 

816 lvs_runner = LVSRunner() 

817 lvs_runner.run_klayout_lvs(exe_path=args.klayout_exe_path, 

818 lvs_script=args.lvs_script_path, 

819 gds_path=args.effective_gds_path, 

820 schematic_path=args.effective_schematic_path, 

821 log_path=lvs_log_path, 

822 lvsdb_path=lvsdb_path, 

823 verbose=args.klayout_lvs_verbose) 

824 if args.cache_lvs: 

825 cache_dir_path = os.path.dirname(lvsdb_cache_path) 

826 if not os.path.exists(cache_dir_path): 

827 os.makedirs(cache_dir_path, exist_ok=True) 

828 shutil.copy(lvsdb_path, lvsdb_cache_path) 

829 

830 lvsdb.read(lvsdb_path) 

831 return lvsdb 

832 

833 def main(self, argv: List[str]): 

834 if '-v' not in argv and \ 

835 '--version' not in argv and \ 

836 '-h' not in argv and \ 

837 '--help' not in argv: 

838 rule('Command line arguments') 

839 subproc(' '.join(map(shlex.quote, sys.argv))) 

840 

841 env = Env.from_os_environ() 

842 args = self.parse_args(arg_list=argv[1:], env=env) 

843 

844 os.makedirs(args.output_dir_base_path, exist_ok=True) 

845 self.setup_logging(args) 

846 

847 tech_info = TechInfo.from_json(args.tech_pbjson_path, 

848 dielectric_filter=args.dielectric_filter) 

849 

850 if args.halo is not None: 

851 tech_info.tech.process_parasitics.side_halo = args.halo 

852 

853 if args.run_magic: 

854 rule('MAGIC') 

855 self.run_magic_extraction(args) 

856 

857 # no need to run LVS etc if only running magic engine 

858 if not (args.run_fastcap or args.run_fastercap or args.run_2_5D): 

859 return 

860 

861 rule('Prepare LVSDB') 

862 lvsdb = self.create_lvsdb(args) 

863 

864 pex_context = KLayoutExtractionContext.prepare_extraction(top_cell=args.effective_cell_name, 

865 lvsdb=lvsdb, 

866 tech=tech_info, 

867 blackbox_devices=args.blackbox_devices) 

868 rule('Non-empty layers in LVS database') 

869 for gds_pair, layer_info in pex_context.extracted_layers.items(): 

870 names = [l.lvs_layer_name for l in layer_info.source_layers] 

871 info(f"{gds_pair} -> ({' '.join(names)})") 

872 

873 gds_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_l2n_extracted.oas") 

874 pex_context.annotated_layout.write(gds_path) 

875 

876 gds_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_l2n_internal.oas") 

877 pex_context.lvsdb.internal_layout().write(gds_path) 

878 

879 def dump_layers(cell: str, 

880 layers: List[KLayoutExtractedLayerInfo], 

881 layout_dump_path: str): 

882 layout = kdb.Layout() 

883 layout.dbu = lvsdb.internal_layout().dbu 

884 

885 top_cell = layout.create_cell(cell) 

886 for ulyr in layers: 

887 li = kdb.LayerInfo(*ulyr.gds_pair) 

888 li.name = ulyr.lvs_layer_name 

889 layer = layout.insert_layer(li) 

890 layout.insert(top_cell.cell_index(), layer, ulyr.region.dup()) 

891 

892 layout.write(layout_dump_path) 

893 

894 if len(pex_context.unnamed_layers) >= 1: 

895 layout_dump_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_unnamed_LVS_layers.gds.gz") 

896 dump_layers(cell=args.effective_cell_name, 

897 layers=pex_context.unnamed_layers, 

898 layout_dump_path=layout_dump_path) 

899 

900 if len(pex_context.extracted_layers) >= 1: 

901 layout_dump_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_nonempty_LVS_layers.gds.gz") 

902 nonempty_layers = [l \ 

903 for layers in pex_context.extracted_layers.values() \ 

904 for l in layers.source_layers] 

905 dump_layers(cell=args.effective_cell_name, 

906 layers=nonempty_layers, 

907 layout_dump_path=layout_dump_path) 

908 else: 

909 error("No extracted layers found") 

910 sys.exit(1) 

911 

912 if args.run_fastcap or args.run_fastercap: 

913 lst_file = self.build_fastercap_input(args=args, 

914 pex_context=pex_context, 

915 tech_info=tech_info) 

916 if args.run_fastercap: 

917 self.run_fastercap_extraction(args=args, 

918 pex_context=pex_context, 

919 lst_file=lst_file) 

920 if args.run_fastcap: 

921 self.run_fastcap_extraction(args=args, 

922 pex_context=pex_context, 

923 lst_file=lst_file) 

924 

925 if args.run_2_5D: 

926 rule("kpex/2.5D PEX Engine") 

927 report_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_k25d_pex_report.rdb.gz") 

928 netlist_csv_path = os.path.abspath(os.path.join(args.output_dir_path, 

929 f"{args.effective_cell_name}_k25d_pex_netlist.csv")) 

930 netlist_spice_path = os.path.abspath(os.path.join(args.output_dir_path, 

931 f"{args.effective_cell_name}_k25d_pex_netlist.spice")) 

932 

933 self._rcx25_extraction_results = self.run_kpex_2_5d_engine( # NOTE: store for test case 

934 args=args, 

935 pex_context=pex_context, 

936 tech_info=tech_info, 

937 report_path=report_path, 

938 netlist_csv_path=netlist_csv_path, 

939 expanded_netlist_path=netlist_spice_path 

940 ) 

941 

942 self._rcx25_extracted_csv_path = netlist_csv_path 

943 

944 @property 

945 def rcx25_extraction_results(self) -> ExtractionResults: 

946 if not hasattr(self, '_rcx25_extraction_results'): 

947 raise Exception('rcx25_extraction_results is not initialized, was run_kpex_2_5d_engine called?') 

948 return self._rcx25_extraction_results 

949 

950 @property 

951 def rcx25_extracted_csv_path(self) -> str: 

952 if not hasattr(self, '_rcx25_extracted_csv_path'): 

953 raise Exception('rcx25_extracted_csv_path is not initialized, was run_kpex_2_5d_engine called?') 

954 return self._rcx25_extracted_csv_path 

955 

956 @property 

957 def fastercap_extracted_csv_path(self) -> str: 

958 if not hasattr(self, '_fastercap_extracted_csv_path'): 

959 raise Exception('fastercap_extracted_csv_path is not initialized, was run_fastercap_extraction called?') 

960 return self._fastercap_extracted_csv_path 

961 

962 

963if __name__ == "__main__": 

964 cli = KpexCLI() 

965 cli.main(sys.argv)