33import json
44import argparse
55import re
6+ import sys
67
78def move_solidity_files (base_dir ):
89 for root , dirs , files in os .walk (base_dir ):
910 if root == base_dir :
1011 continue # Skip the base directory itself
11-
12+
1213 # Determine the parent directory (bridge directory)
1314 parent_dir = os .path .dirname (root )
1415 source_code_dir = os .path .join (parent_dir , "source-code" )
15-
16+
1617 # Create 'source-code' directory if it does not exist
1718 os .makedirs (source_code_dir , exist_ok = True )
18-
19+
1920 for file in files :
2021 if file .endswith (".sol" ):
2122 src_path = os .path .join (root , file )
2223 dest_path = os .path .join (source_code_dir , file )
23-
24+
2425 # Handle duplicates by renaming files
2526 counter = 1
2627 while os .path .exists (dest_path ):
2728 file_name , file_ext = os .path .splitext (file )
2829 new_file_name = f"{ file_name } _{ counter } { file_ext } "
2930 dest_path = os .path .join (source_code_dir , new_file_name )
3031 counter += 1
31-
32+
3233 shutil .move (src_path , dest_path )
3334 print (f"Moved: { src_path } -> { dest_path } " )
3435
@@ -61,7 +62,7 @@ def analyze_vulnerabilities_bridge(json_path):
6162 num_contracts = bridge .get ("number_of_contracts" , "N/A" )
6263 print (f"Bridge: { name } " )
6364 print (f" Number of contracts: { num_contracts } " )
64-
65+
6566 # Print aggregate vulnerability data
6667 aggregate_vulns = bridge .get ("bridge_vulnerabilities" , {})
6768 if not aggregate_vulns :
@@ -70,15 +71,15 @@ def analyze_vulnerabilities_bridge(json_path):
7071 print (" Aggregate vulnerabilities:" )
7172 for key , value in aggregate_vulns .items ():
7273 print (f" { key } : { value } " )
73-
74+
7475 # Print per-contract vulnerability data
7576 contracts_vulns = bridge .get ("contract_vulnerabilities" , [])
7677 if not contracts_vulns :
7778 print (" No contract vulnerability data available." )
7879 else :
7980 total_contracts = len (contracts_vulns )
8081 print (f" Vulnerability data for { total_contracts } contracts:" )
81-
82+
8283 # Sum vulnerabilities per type across contracts
8384 total_vulnerabilities = {}
8485 for vuln_entry in contracts_vulns :
@@ -127,25 +128,25 @@ def simplify_imports_in_solidity_files(base_dir):
127128 Explores all .sol files in the given directory and removes folder paths from import statements.
128129 """
129130 solidity_file_pattern = re .compile (r'import\s+["\'](.+?/.+?)["\'];' )
130-
131+
131132 for root , _ , files in os .walk (base_dir ):
132133 for file in files :
133134 if file .endswith (".sol" ):
134135 file_path = os .path .join (root , file )
135-
136+
136137 with open (file_path , 'r' , encoding = 'utf-8' ) as f :
137138 content = f .read ()
138-
139+
139140 # Replace import paths with only the file name
140141 modified_content = solidity_file_pattern .sub (
141142 lambda match : f'import "./{ os .path .basename (match .group (1 ))} ";' ,
142143 content
143144 )
144-
145+
145146 # Write the modified content back to the file
146147 with open (file_path , 'w' , encoding = 'utf-8' ) as f :
147148 f .write (modified_content )
148-
149+
149150 print (f"Processed imports in: { file_path } " )
150151
151152def simplify_named_imports (base_dir ):
@@ -159,20 +160,20 @@ def simplify_named_imports(base_dir):
159160 for file in files :
160161 if file .endswith (".sol" ):
161162 file_path = os .path .join (root , file )
162-
163+
163164 with open (file_path , 'r' , encoding = 'utf-8' ) as f :
164165 content = f .read ()
165-
166+
166167 # Replace import paths with only the file name
167168 modified_content = solidity_named_import_pattern .sub (
168169 lambda match : f'import {{{ match .group (1 ).strip ()} }} from "./{ os .path .basename (match .group (2 ))} ";' ,
169170 content
170171 )
171-
172+
172173 # Write the modified content back to the file
173174 with open (file_path , 'w' , encoding = 'utf-8' ) as f :
174175 f .write (modified_content )
175-
176+
176177 print (f"Processed named imports in: { file_path } " )
177178
178179def replace_specific_imports (base_dir ):
@@ -194,30 +195,100 @@ def replace_specific_imports(base_dir):
194195 for file in files :
195196 if file .endswith (".sol" ):
196197 file_path = os .path .join (root , file )
197-
198+
198199 with open (file_path , 'r' , encoding = 'utf-8' ) as f :
199200 content = f .read ()
200-
201+
201202 # Replace specific imports
202203 modified_content = solidity_import_pattern .sub (
203204 lambda match : f'import "{ import_replacements [match .group (1 )]} ";'
204205 if match .group (1 ) in import_replacements else match .group (0 ),
205206 content
206207 )
207-
208+
208209 # Write the modified content back to the file
209210 with open (file_path , 'w' , encoding = 'utf-8' ) as f :
210211 f .write (modified_content )
211-
212+
212213 print (f"Processed imports in: { file_path } " )
213214
215+ def remove_duplicate_vulnerabilities (json_path ):
216+ """
217+ Legge un file JSON, itera attraverso la struttura dati e rimuove le stringhe
218+ duplicate dalla lista 'vulnerabilities' all'interno di ogni contratto.
219+
220+ Args:
221+ json_path (str): Il percorso del file JSON di input.
222+
223+ Returns:
224+ list or None: La struttura dati JSON modificata (lista di oggetti bridge)
225+ con le vulnerabilità duplicate rimosse, oppure None se
226+ si verifica un errore durante la lettura o il parsing
227+ o se il formato non è corretto.
228+ """
229+ try :
230+ with open (json_path , "r" , encoding = "utf-8" ) as file :
231+ data = json .load (file )
232+ except FileNotFoundError :
233+ print (f"Errore: File non trovato a '{ json_path } '" , file = sys .stderr )
234+ return None
235+ except json .JSONDecodeError as e :
236+ print (f"Errore: Impossibile decodificare JSON da '{ json_path } '. Dettagli: { e } " , file = sys .stderr )
237+ return None
238+ except Exception as e :
239+ print (f"Errore imprevisto durante la lettura di '{ json_path } ': { e } " , file = sys .stderr )
240+ return None
241+
242+ if not isinstance (data , list ):
243+ print (f"Errore: L'input JSON in '{ json_path } ' deve essere una lista di oggetti bridge." , file = sys .stderr )
244+ return None
245+
246+ modified = False # Traccia se sono state fatte modifiche
247+ for bridge in data :
248+ # Controlla se 'contracts' esiste ed è una lista
249+ if 'contracts' not in bridge or not isinstance (bridge ['contracts' ], list ):
250+ # Stampa un avviso se la struttura non è come previsto, ma continua
251+ print (f"Attenzione: Bridge '{ bridge .get ('bridge' , 'Sconosciuto' )} ' in '{ json_path } ' non ha una lista 'contracts' valida. Salto." , file = sys .stderr )
252+ continue
253+
254+ for contract in bridge ['contracts' ]:
255+ # Controlla se 'vulnerabilities' esiste ed è una lista
256+ if 'vulnerabilities' in contract and isinstance (contract ['vulnerabilities' ], list ):
257+ original_count = len (contract ['vulnerabilities' ])
258+ # Usa dict.fromkeys per rimuovere duplicati mantenendo l'ordine
259+ unique_vulnerabilities = list (dict .fromkeys (contract ['vulnerabilities' ]))
260+ if len (unique_vulnerabilities ) < original_count :
261+ modified = True
262+ contract ['vulnerabilities' ] = unique_vulnerabilities
263+ # else: # Opzionale: Avvisa se la chiave vulnerabilities manca o non è una lista
264+ # print(f"Attenzione: Contratto '{contract.get('bytecode-name', 'Sconosciuto')}' nel bridge '{bridge.get('bridge', 'Sconosciuto')}' non ha una lista 'vulnerabilities' valida.", file=sys.stderr)
265+
266+ if not modified :
267+ print (f"Nessuna vulnerabilità duplicata trovata da rimuovere in '{ json_path } '." )
268+
269+ return data # Restituisce la struttura dati modificata (o originale se non c'erano duplicati)
270+
271+
214272if __name__ == "__main__" :
215- json_file_path = "benchmark_results.json"
216- analyze_vulnerabilities (json_file_path )
273+ json_file_path = "benchmark_results.json"
274+ analyze_vulnerabilities_bridge (json_file_path )
217275
218- base_directory = "cross-chain/smartaxe"
276+ # base_directory = "cross-chain/smartaxe"
219277 # move_solidity_files(base_directory)
220278 # rename_files_in_directory(base_directory)
221279 # simplify_imports_in_solidity_files(base_directory)
222280 # simplify_named_imports(base_directory)
223- # replace_specific_imports(base_directory)
281+ # replace_specific_imports(base_directory)
282+
283+ # output_json_path = "benchmark_results_function_updated.json"
284+ # modified_data = remove_duplicate_vulnerabilities("benchmark_results_function-missing-notification.json")
285+
286+ # output_json_str = json.dumps(modified_data, indent=4) # Use indent for pretty printing
287+
288+ # try:
289+ # with open(output_json_path, 'w', encoding='utf-8') as f:
290+ # f.write(output_json_str)
291+ # print(f"Successfully processed and saved modified JSON to '{output_json_path}'")
292+ # except Exception as e:
293+ # print(f"Error writing output file '{output_json_path}': {e}")
294+
0 commit comments