-
Notifications
You must be signed in to change notification settings - Fork 553
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Refactor deltadebug #5223
Draft
habibayassin
wants to merge
7
commits into
The-OpenROAD-Project:master
Choose a base branch
from
habibayassin:refactor-deltadebug
base: master
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Draft
Refactor deltadebug #5223
Changes from 3 commits
Commits
Show all changes
7 commits
Select commit
Hold shift + click to select a range
5c81c2f
refactor deltadebug
habibayassin e729cca
fix def file path
habibayassin b5a57cb
replace all instances
habibayassin 77a9e90
format and cleanup
habibayassin a9ccf96
prev refactor
habibayassin 7cb43a7
take args dynamically
habibayassin 2a8b7cf
parse vars
habibayassin File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -19,8 +19,10 @@ | |
################################ | ||
|
||
import odb | ||
import re | ||
from openroad import Design | ||
import os | ||
import glob | ||
import sys | ||
import signal | ||
import subprocess | ||
|
@@ -72,12 +74,17 @@ | |
help= | ||
'Exit early on unrelated errors to speed things up, but risks exiting on false negatives.' | ||
) | ||
parser.add_argument('--lib_path', | ||
type=str, | ||
help='Path to the library files (.lib)') | ||
parser.add_argument('--lef_path', | ||
type=str, | ||
help='Path to the macro files (.lef)') | ||
parser.add_argument( | ||
'--dump_def', | ||
action='store_true', | ||
help='Determines whether to dumb def at each step in addition to the odb') | ||
|
||
|
||
class cutLevel(enum.Enum): | ||
Nets = 0 | ||
Insts = 1 | ||
|
@@ -94,6 +101,11 @@ def __init__(self, opt): | |
base_db_name = os.path.basename(opt.base_db_path) | ||
self.base_db_file = opt.base_db_path | ||
|
||
self.lib_directory = opt.lib_path | ||
self.lef_directory = opt.lef_path | ||
self.reduced_lib = False | ||
self.reduced_lef = False | ||
|
||
self.error_string = opt.error_string | ||
self.use_stdout = opt.use_stdout | ||
self.exit_early_on_error = opt.exit_early_on_error | ||
|
@@ -114,10 +126,14 @@ def __init__(self, opt): | |
base_db_directory, f"deltaDebug_base_temp_{base_db_name}") | ||
|
||
# The name of the result file after running deltaDebug | ||
self.deltaDebug_result_def_file = os.path.join( | ||
base_db_directory, f"deltaDebug_base_result_def.def") | ||
|
||
# # The name of the result file after running deltaDebug | ||
self.deltaDebug_result_base_file = os.path.join( | ||
base_db_directory, f"deltaDebug_base_result_{base_db_name}") | ||
|
||
# This determines whether design def shall be dumped or not | ||
# # This determines whether design def shall be dumped or not | ||
self.dump_def = opt.dump_def | ||
if (self.dump_def != 0): | ||
self.base_def_file = self.base_db_file[:-3] + "def" | ||
|
@@ -196,14 +212,16 @@ def debug(self): | |
|
||
# Change deltaDebug resultant base_db file name to a representative name | ||
if os.path.exists(self.temp_base_db_file): | ||
self.write_final_def() | ||
os.rename(self.temp_base_db_file, self.deltaDebug_result_base_file) | ||
|
||
# Restoring the original base_db file | ||
if os.path.exists(self.original_base_db_file): | ||
os.rename(self.original_base_db_file, self.base_db_file) | ||
|
||
print("___________________________________") | ||
print(f"Resultant file is {self.deltaDebug_result_base_file}") | ||
print(f"Resultant odb file is {self.deltaDebug_result_base_file}") | ||
print(f"Resultant def file is {self.deltaDebug_result_def_file}") | ||
print("Delta Debugging Done!") | ||
|
||
# A function that do a cut in the db, writes the base db to disk | ||
|
@@ -213,6 +231,12 @@ def perform_step(self, cut_index=-1): | |
self.base_db = Design.createDetachedDb() | ||
self.base_db = odb.read_db(self.base_db, self.temp_base_db_file) | ||
|
||
# reduce .lib and .lef files | ||
if (not self.reduced_lib): | ||
self.reduce_lib_files() | ||
if (not self.reduced_lef): | ||
self.reduce_lef_files() | ||
|
||
# Cut the block with the given step index. | ||
# if cut index of -1 is provided it means | ||
# that no cut will be made. | ||
|
@@ -223,8 +247,7 @@ def perform_step(self, cut_index=-1): | |
odb.write_db(self.base_db, self.base_db_file) | ||
if (self.dump_def != 0): | ||
print("Writing def file") | ||
odb.write_def(self.base_db.getChip().getBlock(), | ||
self.base_def_file) | ||
self.write_dump_def(self.base_def_file) | ||
|
||
cuts = self.get_cuts() if cut_index != -1 else None | ||
|
||
|
@@ -406,13 +429,161 @@ def remove_unused_masters(self): | |
|
||
if (self.dump_def != 0): | ||
print("Writing def file") | ||
odb.write_def(self.base_db.getChip().getBlock(), | ||
self.temp_base_db_file[:-3] + "def") | ||
self.write_dump_def(self.temp_base_db_file[:-3] + "def") | ||
|
||
if (self.base_db is not None): | ||
self.base_db.destroy(self.base_db) | ||
self.base_db = None | ||
|
||
def reduce_lib_files(self): | ||
print("Attempt to reduce lib files in", self.lib_directory) | ||
if not os.path.exists(self.lib_directory): | ||
return | ||
for lib_file in glob.glob(os.path.join( self.lib_directory, "*.lib")): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. run through a python formatter |
||
used_cells = self.get_used_cells() | ||
with open(lib_file, 'r') as f: | ||
lines = f.readlines() | ||
|
||
with open(lib_file, 'w') as f: | ||
write_lines = False | ||
for line in lines: | ||
if any(cell in line for cell in used_cells): | ||
write_lines = True | ||
if 'cell (' in line and not any(cell in line for cell in used_cells): | ||
write_lines = False | ||
if write_lines: | ||
f.write(line) | ||
self.reduced_lib = True | ||
|
||
def reduce_lef_files(self): | ||
print("Attempt to reduce lef files in", self.lef_directory) | ||
if not os.path.exists(self.lef_directory): | ||
return | ||
|
||
for lef_file in glob.glob(os.path.join( self.lef_directory, "*.lef")): | ||
with open(lef_file, 'r') as infile: | ||
lines = infile.readlines() | ||
|
||
in_layer_block = False | ||
essential_lines = [] | ||
|
||
for line in lines: | ||
if re.match(r'\s*LAYER\s+\w+', line): | ||
in_layer_block = True | ||
essential_lines.append(line) | ||
elif in_layer_block and re.match(r'\s*END\s+\w+', line): | ||
essential_lines.append(line) | ||
in_layer_block = False | ||
elif in_layer_block: | ||
essential_lines.append(line) | ||
|
||
with open(lef_file, 'w') as outfile: | ||
outfile.writelines(essential_lines) | ||
|
||
self.reduced_lef = True | ||
|
||
def get_used_cells(self): | ||
block = self.base_db.getChip().getBlock() | ||
used_cells = set() | ||
for inst in block.getInsts(): | ||
master = inst.getMaster() | ||
if master: | ||
used_cells.add(master.getName()) | ||
return used_cells | ||
|
||
def get_used_macros(self): | ||
block = self.base_db.getChip().getBlock() | ||
used_macros = set() | ||
for inst in block.getInsts(): | ||
master = inst.getMaster() | ||
if master and master.isMacro(): | ||
used_macros.add(master.getName()) | ||
return used_macros | ||
|
||
def write_dump_def(self, output_file): | ||
if self.base_db is None: | ||
raise ValueError("Database is not loaded.") | ||
|
||
block = self.base_db.getChip().getBlock() | ||
if block is None: | ||
raise ValueError("Block is not present in the database.") | ||
odb.write_def(block, output_file) | ||
self.mangle_def_file(output_file) | ||
|
||
def write_final_def(self): | ||
self.base_db = odb.read_db(self.base_db, self.temp_base_db_file) | ||
if self.base_db is None: | ||
raise ValueError("Database is not loaded.") | ||
|
||
block = self.base_db.getChip().getBlock() | ||
if block is None: | ||
raise ValueError("Block is not present in the database.") | ||
|
||
odb.write_def(block, self.deltaDebug_result_def_file) | ||
self.mangle_def_file(self.deltaDebug_result_def_file) | ||
|
||
if (self.base_db is not None): | ||
self.base_db.destroy(self.base_db) | ||
self.base_db = None | ||
|
||
|
||
def mangle_def_file(self, input_def_file): | ||
patterns = { | ||
'nets': r'(-\s+(\S+)\s+\(.*?\)\s+\+\s+USE\s+\S+\s*;)', | ||
'components': r'(-\s+(\S+)\s+\S+\s+\+\s+PLACED\s+\(\s+\d+\s+\d+\s+\)\s+\S\s*;)' | ||
} | ||
|
||
net_count = 1 | ||
element_count = 1 | ||
net_mapping = {} | ||
element_mapping = {} | ||
|
||
def rename_nets(text): | ||
nonlocal net_count | ||
def repl(match): | ||
nonlocal net_count | ||
original_name = match.group(2) | ||
if original_name not in net_mapping: | ||
new_name = f"net{net_count}" | ||
net_mapping[original_name] = new_name | ||
net_count += 1 | ||
return match.group(1).replace(original_name, net_mapping[original_name]) | ||
return re.sub(patterns['nets'], repl, text) | ||
|
||
def rename_elements(text): | ||
nonlocal element_count | ||
def repl(match): | ||
nonlocal element_count | ||
original_name = match.group(2) | ||
if original_name not in element_mapping: | ||
new_name = f"element{element_count}" | ||
element_mapping[original_name] = new_name | ||
element_count += 1 | ||
return match.group(1).replace(original_name, element_mapping[original_name]) | ||
return re.sub(patterns['components'], repl, text) | ||
|
||
with open(input_def_file, 'r') as file: | ||
content = file.read() | ||
|
||
content = rename_nets(content) | ||
content = rename_elements(content) | ||
|
||
def replace_all(text, mapping): | ||
for original, new in mapping.items(): | ||
text = re.sub(rf'\b{re.escape(original)}\b', new, text) | ||
return text | ||
|
||
content = replace_all(content, net_mapping) | ||
content = replace_all(content, element_mapping) | ||
|
||
base_name = os.path.splitext(input_def_file)[0] | ||
output_def_file = f"{base_name}_mangled.def" | ||
|
||
with open(output_def_file, 'w') as file: | ||
file.write(content) | ||
|
||
print(f"Mangled DEF file has been written to {output_def_file}") | ||
|
||
|
||
if __name__ == '__main__': | ||
opt = parser.parse_args() | ||
|
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
nit: Extranous
# #