-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcore_logic.py
1688 lines (1488 loc) · 91.9 KB
/
core_logic.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# core_logic.py
# -*- coding: utf-8 -*-
from datetime import datetime
import logging
import os
import json
import config
import re
import platform
import glob
try:
from thefuzz import fuzz
THEFUZZ_AVAILABLE = True
logging.info("Libreria 'thefuzz' trovata e caricata.")
except ImportError:
THEFUZZ_AVAILABLE = False
# Logga il warning una sola volta qui all'avvio se manca
logging.warning("Libreria 'thefuzz' non trovata. Il fuzzy matching sarà disabilitato.")
logging.warning("Installala con: pip install thefuzz[speedup]")
# --- Definisci percorso file profili ---
PROFILES_FILENAME = "game_save_profiles.json"
APP_DATA_FOLDER = config.get_app_data_folder() # Ottieni cartella base
if APP_DATA_FOLDER: # Controlla se valido
PROFILES_FILE_PATH = os.path.join(APP_DATA_FOLDER, PROFILES_FILENAME)
else:
# Fallback
logging.error("Unable to determine APP_DATA_FOLDER, use relative path for game_save_profiles.json.")
PROFILES_FILE_PATH = os.path.abspath(PROFILES_FILENAME)
logging.info(f"Profile file path in use: {PROFILES_FILE_PATH}")
# --- Fine definizione ---
# <<< Funzione per generare abbreviazioni multiple >>>
def generate_abbreviations(name, game_install_dir=None):
"""
Genera una lista di possibili abbreviazioni/nomi alternativi per il gioco.
Include gestione colon e parsing exe migliorato.
"""
abbreviations = set()
if not name: return []
# Nome base pulito
sanitized_name = re.sub(r'[™®©:]', '', name).strip() # Rimuovi : per processing base
sanitized_name_nospace = re.sub(r'\s+', '', sanitized_name)
abbreviations.add(sanitized_name)
abbreviations.add(sanitized_name_nospace)
abbreviations.add(re.sub(r'[^a-zA-Z0-9]', '', sanitized_name)) # Solo alfanumerico
# Ignora parole (da config?)
ignore_words = getattr(config, 'SIMILARITY_IGNORE_WORDS',
{'a', 'an', 'the', 'of', 'and', 'remake', 'intergrade',
'edition', 'goty', 'demo', 'trial', 'play', 'launch',
'definitive', 'enhanced', 'complete', 'collection',
'hd', 'ultra', 'deluxe', 'game', 'year'})
ignore_words_lower = {w.lower() for w in ignore_words}
# --- Logica Acronimi Standard ---
words = re.findall(r'\b\w+\b', sanitized_name)
significant_words = [w for w in words if w.lower() not in ignore_words_lower and len(w) > 1]
significant_words_capitalized = [w for w in significant_words if w and w[0].isupper()] # Check w non sia vuoto
if significant_words:
acr_all = "".join(w[0] for w in significant_words if w).upper()
if len(acr_all) >= 2: abbreviations.add(acr_all)
if significant_words_capitalized:
acr_caps = "".join(w[0] for w in significant_words_capitalized if w).upper()
if len(acr_caps) >= 2: abbreviations.add(acr_caps) # Es: HMCC
# --- NUOVO: Logica Acronimi Post-Colon ---
if ':' in name: # Usa nome originale con :
parts = name.split(':', 1)
if len(parts) > 1 and parts[1].strip():
name_after_colon = parts[1].strip()
logging.debug(f"Found colon, analyzing part: '{name_after_colon}'")
words_after_colon = re.findall(r'\b\w+\b', name_after_colon)
sig_words_after_colon = [w for w in words_after_colon if w.lower() not in ignore_words_lower and len(w) > 1]
sig_words_caps_after_colon = [w for w in sig_words_after_colon if w and w[0].isupper()]
if sig_words_caps_after_colon:
# Genera acronimo solo dalle maiuscole DOPO il colon
acr_caps_after_colon = "".join(w[0] for w in sig_words_caps_after_colon if w).upper()
if len(acr_caps_after_colon) >= 2:
logging.info(f"Derived abbreviation from capitalized words after colon: {acr_caps_after_colon}")
abbreviations.add(acr_caps_after_colon) # Es: MCC da "Master Chief Collection"
# --- NUOVO: Parsing Eseguibile Migliorato ---
if game_install_dir and os.path.isdir(game_install_dir):
exe_base_name = None
try:
# ... (Logica di ricerca .exe come prima usando glob) ...
common_suffixes = ['Win64-Shipping.exe', 'Win32-Shipping.exe', '.exe']
found_exe_path = None
# ... (ciclo glob per cercare exe) ...
# Assumiamo che trovi 'mcclauncher.exe' e lo metta in found_exe_path
# --- Blocco di ricerca exe ---
found_exe = None
exe_search_patterns = [
os.path.join(game_install_dir, f"*{suffix}") for suffix in common_suffixes
] + [
os.path.join(game_install_dir, "Binaries", "Win64", f"*{suffix}") for suffix in common_suffixes
] + [
os.path.join(game_install_dir, "bin", f"*{suffix}") for suffix in common_suffixes
]
for pattern in exe_search_patterns:
executables = glob.glob(pattern)
if executables:
# Scegli l'eseguibile più probabile (es. non uno piccolo?)
valid_exes = [e for e in executables if os.path.getsize(e) > 100*1024] # Ignora exe piccoli?
if valid_exes: found_exe = os.path.basename(valid_exes[0])
elif executables: found_exe = os.path.basename(executables[0]) # Fallback al primo trovato
if found_exe: break
# --- Fine blocco ricerca ---
if found_exe:
logging.info(f"Found executable: {found_exe}")
# Estrai nome base rimuovendo suffissi noti
exe_base_name_temp = found_exe
for suffix in common_suffixes + ['-Win64-Shipping', '-Win32-Shipping', '-Shipping']:
if exe_base_name_temp.lower().endswith(suffix.lower()): # Case insensitive
exe_base_name_temp = exe_base_name_temp[:-len(suffix)]
break
exe_base_name_temp = re.sub(r'[-_]+$', '', exe_base_name_temp) # Rimuovi trattini finali
# <<< NUOVO: Rimuovi parole chiave comuni come 'launcher' >>>
common_exe_keywords = ['launcher', 'server', 'client', 'editor']
processed_name = exe_base_name_temp
keyword_removed = False
for keyword in common_exe_keywords:
if processed_name.lower().endswith(keyword):
processed_name = processed_name[:-len(keyword)]
keyword_removed = True
break # Rimuovi solo la prima occorrenza trovata
# Pulisci ancora eventuali separatori rimasti alla fine
processed_name = re.sub(r'[-_]+$', '', processed_name)
if len(processed_name) >= 2:
exe_base_name = processed_name # Usa nome processato
logging.info(f"Derived abbreviation from executable: {exe_base_name}")
abbreviations.add(exe_base_name)
elif len(exe_base_name_temp) >= 2:
# Fallback: se la rimozione keyword ha reso il nome troppo corto, usa quello originale pre-rimozione
exe_base_name = exe_base_name_temp
logging.info(f"Derived abbreviation from executable (fallback): {exe_base_name}")
abbreviations.add(exe_base_name)
except Exception as e_exe:
logging.warning(f"Could not derive name from executable: {e_exe}")
# Rimuovi None/vuoti e ordina (opzionale)
final_list = sorted(list(filter(lambda x: x and len(x) >= 2, abbreviations)), key=len, reverse=True)
logging.debug(f"Generated abbreviations for '{name}': {final_list}")
return final_list
# <<< Helper per check sequenza iniziali >>>
def matches_initial_sequence(folder_name, game_title_words):
"""
Controlla se folder_name (es. "ME") CORRISPONDE ESATTAMENTE alla sequenza
delle iniziali di game_title_words (es. ["Metro", "Exodus"]).
"""
if not folder_name or not game_title_words:
return False
try:
# Estrai le iniziali MAIUSCOLE dalle parole significative
word_initials = [word[0].upper() for word in game_title_words if word]
# Unisci le iniziali per formare la sequenza attesa (es. "ME")
expected_sequence = "".join(word_initials)
# Confronta (case-insensitive) il nome della cartella con la sequenza attesa
return folder_name.upper() == expected_sequence
except Exception as e:
# Logga eventuali errori imprevisti durante l'elaborazione
logging.error(f"Error in matches_initial_sequence ('{folder_name}', {game_title_words}): {e}")
return False
def sanitize_foldername(name):
"""Rimuove o sostituisce caratteri non validi per nomi di file/cartelle,
preservando i punti interni e rimuovendo quelli esterni."""
if not isinstance(name, str):
return "_invalid_profile_name_" # Gestisce input non stringa
# 1. Rimuovi caratteri universalmente non validi nei nomi file/cartella
# ( <>:"/\|?* ) Manteniamo lettere, numeri, spazi, _, -, .
# Usiamo un'espressione regolare per questo.
safe_name = re.sub(r'[<>:"/\\|?*]', '', name)
# 2. Rimuovi spazi bianchi iniziali/finali
safe_name = safe_name.strip()
# 3. Rimuovi PUNTI iniziali/finali (DOPO aver rimosso gli spazi)
# Questo ciclo rimuove multipli punti se presenti (es. "..nome..")
if safe_name: # Evita errori se la stringa è diventata vuota
safe_name = safe_name.strip('.')
# 4. Rimuovi di nuovo eventuali spazi bianchi che potrebbero essere
# rimasti esposti dopo aver tolto i punti (es. ". nome .")
safe_name = safe_name.strip()
# 5. Gestisci caso in cui il nome diventi vuoto o solo spazi dopo la pulizia
if not safe_name or safe_name.isspace():
safe_name = "_invalid_profile_name_" # Nome di fallback
return safe_name
# --- Gestione Profili ---
def get_profile_backup_summary(profile_name, backup_base_dir):
"""
Restituisce un riassunto dei backup per un profilo.
Returns: tuple (count: int, last_backup_datetime: datetime | None)
"""
# Usa la funzione esistente che già ordina per data (nuovo prima)
backups = list_available_backups(profile_name, backup_base_dir) # Passa l'argomento
count = len(backups)
last_backup_dt = None
if count > 0:
most_recent_backup_path = backups[0][1] # Indice 1 è il percorso completo
try:
mtime_timestamp = os.path.getmtime(most_recent_backup_path)
last_backup_dt = datetime.fromtimestamp(mtime_timestamp)
except FileNotFoundError:
logging.error(f"Last backup file not found ({most_recent_backup_path}) during getmtime for {profile_name}.")
except Exception as e:
logging.error(f"Unable to get last backup date for {profile_name} by '{most_recent_backup_path}': {e}")
return count, last_backup_dt
# --- Funzione per caricare i profili ---
def load_profiles():
"""Carica i profili da PROFILES_FILE_PATH, assicurandosi che
i valori siano dizionari contenenti almeno la chiave 'path'.
"""
profiles_data = {} # Inizializza vuoto
# Prima prova a caricare il contenuto grezzo del file JSON
if os.path.exists(PROFILES_FILE_PATH):
try:
with open(PROFILES_FILE_PATH, 'r', encoding='utf-8') as f:
profiles_data = json.load(f)
logging.debug(f"File '{PROFILES_FILE_PATH}' caricato.")
except json.JSONDecodeError:
logging.warning(f"File profili '{PROFILES_FILE_PATH}' corrotto o vuoto. Sarà sovrascritto al prossimo salvataggio.")
profiles_data = {} # Tratta come vuoto se corrotto
except Exception as e:
logging.error(f"Errore imprevisto durante la lettura iniziale di '{PROFILES_FILE_PATH}': {e}")
profiles_data = {} # Tratta come vuoto per altri errori
# Ora processa i dati caricati (profiles_data)
loaded_profiles = {} # Dizionario per i profili processati e validati
profiles_dict_source = {} # Dizionario sorgente da cui leggere i profili effettivi
try:
if isinstance(profiles_data, dict):
# Controlla se è il nuovo formato con metadati o il vecchio formato
if "__metadata__" in profiles_data and "profiles" in profiles_data:
profiles_dict_source = profiles_data.get("profiles", {}) # Nuovo formato
logging.debug("Processing profiles from new format (with metadata).")
elif "__metadata__" not in profiles_data:
# Se non ci sono metadati, assumi sia il vecchio formato (dict nome->path)
profiles_dict_source = profiles_data
logging.debug("Processing profiles assuming old format (name -> path string).")
else:
# Formato con metadata ma senza chiave 'profiles'? Strano.
logging.warning("Profile file has '__metadata__' but missing 'profiles' key. Treating as empty.")
profiles_dict_source = {}
# --- Ciclo di Conversione e Validazione ---
for name, path_or_data in profiles_dict_source.items():
if isinstance(path_or_data, str):
# Vecchio formato: converti in dizionario base
logging.debug(f"Converting old format profile '{name}' to dict.")
# Verifica validità percorso prima di aggiungere
if os.path.isdir(path_or_data): # Controlla se il percorso è valido
loaded_profiles[name] = {'path': path_or_data}
else:
logging.warning(f"Path '{path_or_data}' for profile '{name}' (old format) is invalid. Skipping.")
elif isinstance(path_or_data, dict):
# Nuovo formato o già convertito: assicurati che 'path' esista e sia valido
profile_path = path_or_data.get('path')
if profile_path and isinstance(profile_path, str) and os.path.isdir(profile_path):
# Se il path esiste ed è valido, copia il dizionario
loaded_profiles[name] = path_or_data.copy() # Usa copy per sicurezza
elif profile_path:
# Se il path esiste ma non è valido
logging.warning(f"Path '{profile_path}' in profile dict for '{name}' is invalid. Setting empty path.")
temp_profile = path_or_data.copy()
temp_profile['path'] = "" # Imposta path vuoto
loaded_profiles[name] = temp_profile
else:
# Se manca la chiave 'path'
logging.warning(f"Profile '{name}' is a dict but missing 'path' key or path is invalid. Setting empty path.")
temp_profile = path_or_data.copy()
temp_profile['path'] = "" # Imposta path vuoto
loaded_profiles[name] = temp_profile
else:
# Formato imprevisto per questo profilo
logging.warning(f"Unrecognized profile format for '{name}'. Skipping.")
continue # Salta questo profilo problematico
else:
# Il file JSON non conteneva un dizionario principale
logging.error(f"Profile file '{PROFILES_FILE_PATH}' content is not a valid JSON dictionary.")
loaded_profiles = {}
except Exception as e:
# Cattura errori durante l'elaborazione del dizionario caricato
logging.error(f"Error processing loaded profile data: {e}", exc_info=True)
loaded_profiles = {} # Reset in caso di errore nell'elaborazione
logging.info(f"Loaded and processed {len(loaded_profiles)} profiles from '{PROFILES_FILE_PATH}'.")
return loaded_profiles # Restituisci i profili processati
# --- Funzione per salvare i profili ---
def save_profiles(profiles):
"""Salva i profili in PROFILES_FILE_PATH."""
data_to_save = {
"__metadata__": {
"version": 1, # Esempio
"saved_at": datetime.now().isoformat()
},
"profiles": profiles
}
try:
# Assicura che la directory esista
os.makedirs(os.path.dirname(PROFILES_FILE_PATH), exist_ok=True)
with open(PROFILES_FILE_PATH, 'w', encoding='utf-8') as f:
json.dump(data_to_save, f, indent=4, ensure_ascii=False)
logging.info(f"Saved {len(profiles)} profiles in '{PROFILES_FILE_PATH}'.")
return True
except Exception as e:
logging.error(f"Error saving profiles in '{PROFILES_FILE_PATH}': {e}")
return False
# --- Funzione per aggiungere un profilo ---
def delete_profile(profiles, profile_name):
"""Elimina un profilo dal dizionario. Restituisce True se eliminato, False altrimenti."""
if profile_name in profiles:
del profiles[profile_name]
logging.info(f"Profile '{profile_name}' removed from memory.")
return True
else:
logging.warning(f"Attempt to delete non-existing profile: '{profile_name}'.")
return False
# --- Operazioni Backup/Restore ---
def manage_backups(profile_name, backup_base_dir, max_backups):
"""Elimina i backup .zip più vecchi se superano il limite specificato."""
deleted_files = []
sanitized_folder_name = sanitize_foldername(profile_name)
profile_backup_dir = os.path.join(backup_base_dir, sanitized_folder_name)
logging.debug(f"ManageBackups - Original name: '{profile_name}', Folder searched: '{profile_backup_dir}'")
try:
if not os.path.isdir(profile_backup_dir): return deleted_files
logging.info(f"Checking outdated (.zip) backups in: {profile_backup_dir}")
backup_files = [f for f in os.listdir(profile_backup_dir) if f.startswith("Backup_") and f.endswith(".zip")]
if len(backup_files) <= max_backups:
logging.info(f"Found {len(backup_files)} backup (.zip) (<= limit {max_backups}).")
return deleted_files
num_to_delete = len(backup_files) - max_backups
backup_files.sort(key=lambda f: os.path.getmtime(os.path.join(profile_backup_dir, f))) # Ordina dal più vecchio
logging.info(f"Deleting {num_to_delete} older (.zip) backup...")
deleted_count = 0
for i in range(num_to_delete):
file_to_delete = os.path.join(profile_backup_dir, backup_files[i])
try:
logging.info(f" Deleting: {backup_files[i]}")
os.remove(file_to_delete)
deleted_files.append(backup_files[i])
deleted_count += 1
except Exception as e:
logging.error(f" Error deleting {backup_files[i]}: {e}")
logging.info(f"Deleted {deleted_count} outdated (.zip) backups.")
except Exception as e:
logging.error(f"Error managing outdated (.zip) backups for '{profile_name}': {e}")
return deleted_files
# --- Funzione di backup ---
def perform_backup(profile_name, save_folder_path, backup_base_dir, max_backups, max_source_size_mb, compression_mode="standard"):
"""Esegue il backup usando zipfile. Restituisce (bool successo, str messaggio)."""
import zipfile
logging.info(f"Starting perform_backup for: '{profile_name}'")
sanitized_folder_name = sanitize_foldername(profile_name)
profile_backup_dir = os.path.join(backup_base_dir, sanitized_folder_name)
logging.info(f"Original Name: '{profile_name}', Sanitized Folder Name: '{sanitized_folder_name}'")
logging.debug(f"Backup path built: '{profile_backup_dir}'")
save_folder_path = os.path.normpath(save_folder_path)
if not os.path.isdir(save_folder_path):
msg = f"ERROR: Source save folder is not valid: '{save_folder_path}'"
logging.error(msg)
return False, msg
# --- Controllo Dimensione Sorgente ---
logging.info(f"Checking source size '{save_folder_path}' (Limit: {'None' if max_source_size_mb == -1 else str(max_source_size_mb) + ' MB'})...")
if max_source_size_mb != -1:
max_source_size_bytes = max_source_size_mb * 1024 * 1024
current_size_bytes = get_directory_size(save_folder_path)
if current_size_bytes == -1:
msg = f"ERROR: Unable to calculate source size '{save_folder_path}'."
logging.error(msg)
return False, msg
current_size_mb = current_size_bytes / (1024*1024)
logging.info(f"Current source size: {current_size_mb:.2f} MB")
if current_size_bytes > max_source_size_bytes:
msg = (f"ERROR: Backup cancelled!\n"
f"Source size ({current_size_mb:.2f} MB) exceeds limit ({max_source_size_mb} MB).")
logging.error(msg)
return False, msg
else:
logging.info("Source size check skipped (No Limit set).")
# --- FINE Controllo Dimensione ---
# --- Creazione Cartella Backup ---
try:
logging.info(f"Attempting to create/verify folder: '{profile_backup_dir}'")
os.makedirs(profile_backup_dir, exist_ok=True)
logging.info(f"Backup folder verified/created: '{profile_backup_dir}'")
except Exception as e:
msg = f"ERROR creating backup folder '{profile_backup_dir}': {e}"
logging.error(msg, exc_info=True)
logging.error(f"(Original problematic profile name: '{profile_name}')")
return False, msg
# --- FINE Creazione Cartella ---
# --- Creazione Archivio ZIP ---
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
safe_profile_name_for_zip = sanitize_foldername(profile_name)
if not safe_profile_name_for_zip: safe_profile_name_for_zip = "_backup_" # Fallback
archive_name = f"Backup_{safe_profile_name_for_zip}_{timestamp}.zip"
archive_path = os.path.join(profile_backup_dir, archive_name)
logging.info(f"Starting backup (ZIP) for '{profile_name}': From '{save_folder_path}' to '{archive_path}'")
zip_compression = zipfile.ZIP_DEFLATED
zip_compresslevel = 6 # Default Standard
if compression_mode == "stored":
zip_compression = zipfile.ZIP_STORED
zip_compresslevel = None
logging.info("Compression Mode: None (Stored)")
elif compression_mode == "maximum":
zip_compresslevel = 9
logging.info("Compression Mode: Maximum (Deflate Level 9)")
else:
logging.info("Compression Mode: Standard (Deflate Level 6)")
try:
with zipfile.ZipFile(archive_path, 'w', compression=zip_compression, compresslevel=zip_compresslevel) as zipf:
for root, dirs, files in os.walk(save_folder_path):
# <<< NUOVO: Escludi file __pycache__ e simili se necessario
dirs[:] = [d for d in dirs if d != '__pycache__'] # Non scendere in pycache
files = [f for f in files if not f.endswith(('.pyc', '.pyo'))] # Non aggiungere file compilati
for file in files:
file_path_absolute = os.path.join(root, file)
arcname = os.path.relpath(file_path_absolute, save_folder_path)
try:
logging.debug(f" Adding: '{file_path_absolute}' as '{arcname}'")
zipf.write(file_path_absolute, arcname=arcname)
except FileNotFoundError:
logging.warning(f" Skipped adding file (not found?): '{file_path_absolute}'")
except Exception as e_write:
logging.error(f" Error adding file '{file_path_absolute}' to zip: {e_write}")
# Considera se interrompere il backup o solo loggare l'errore
deleted = manage_backups(profile_name, backup_base_dir, max_backups)
deleted_msg = " Deleted {0} outdated backups.".format(len(deleted)) if deleted else ""
msg = "Backup (ZIP) for '{0}' completed successfully.".format(profile_name) + deleted_msg
logging.info(msg)
return True, msg
except (IOError, OSError, zipfile.BadZipFile) as e:
msg = f"ERROR during ZIP backup creation '{archive_name}': {e}"
logging.exception(msg)
# Tenta di rimuovere l'archivio fallito
if os.path.exists(archive_path):
try: os.remove(archive_path); logging.warning(f"Failed ZIP archive removed: {archive_name}")
except Exception as rem_e: logging.error(f"Unable to remove failed ZIP archive: {rem_e}")
return False, msg
except Exception as e:
msg = f"ERROR unexpected during ZIP backup '{profile_name}': {e}"
logging.exception(msg)
if os.path.exists(archive_path):
try: os.remove(archive_path); logging.warning(f"Failed ZIP archive removed: {archive_name}")
except Exception as rem_e: logging.error(f"Unable to remove failed ZIP archive: {rem_e}")
return False, msg
# --- Funzione di supporto per calcolo dimensione cartella ---
def list_available_backups(profile_name, backup_base_dir):
"""Restituisce una lista di tuple (nome_file, percorso_completo, data_modifica_str) per i backup di un profilo."""
backups = []
sanitized_folder_name = sanitize_foldername(profile_name)
# <<< MODIFICATO: Usa backup_base_dir dalle impostazioni (richiede passaggio o accesso globale)
# Assumendo che 'config' fornisca il percorso corretto
profile_backup_dir = os.path.join(backup_base_dir, sanitized_folder_name)
logging.debug(f"ListBackups - Original name: '{profile_name}', Folder searched: '{profile_backup_dir}'")
if not os.path.isdir(profile_backup_dir):
return backups # Nessuna cartella = nessun backup
try:
backup_files = [f for f in os.listdir(profile_backup_dir) if f.startswith("Backup_") and f.endswith(".zip")]
# Ordina dal più recente
backup_files.sort(key=lambda f: os.path.getmtime(os.path.join(profile_backup_dir, f)), reverse=True)
for fname in backup_files:
fpath = os.path.join(profile_backup_dir, fname)
try:
mtime = os.path.getmtime(fpath)
backup_datetime = datetime.fromtimestamp(mtime)
#date_str = datetime.fromtimestamp(mtime).strftime("%Y-%m-%d %H:%M:%S")
except Exception:
date_str = "Unknown date"
backups.append((fname, fpath, backup_datetime))
except Exception as e:
logging.error(f"Error listing backups for '{profile_name}': {e}")
return backups
# --- Funzione di Ripristino ---
def perform_restore(profile_name, save_folder_path, archive_to_restore_path):
"""Esegue il ripristino da un archivio ZIP. Restituisce (bool successo, str messaggio)."""
import zipfile
save_folder_path = os.path.normpath(save_folder_path)
if not os.path.exists(archive_to_restore_path) or not zipfile.is_zipfile(archive_to_restore_path):
msg = f"ERROR: Archive to restore not found or is not a valid ZIP: '{archive_to_restore_path}'"
logging.error(msg)
return False, msg
# Assicurati che la destinazione esista, crea se necessario
try:
# <<< MODIFICATO: Verifica se è un file e avvisa
if os.path.exists(save_folder_path) and not os.path.isdir(save_folder_path):
msg = f"ERROR: Target path '{save_folder_path}' exists but is a file, not a directory. Cannot restore."
logging.error(msg)
return False, msg
os.makedirs(save_folder_path, exist_ok=True)
except Exception as e:
msg = f"ERROR: Could not create or access target restore folder '{save_folder_path}': {e}"
logging.error(msg)
return False, msg
logging.info(f"Starting restore for: '{profile_name}'")
logging.info(f"Restoring from: '{archive_to_restore_path}' to: '{save_folder_path}'")
# <<< NUOVO: Avviso sovrascrittura (opzionale, ma utile)
logging.warning(f"Files in '{save_folder_path}' matching the archive content WILL BE OVERWRITTEN.")
try:
with zipfile.ZipFile(archive_to_restore_path, 'r') as zipf:
# Verifica contenuto (opzionale, ma utile per debug)
# member_list = zipf.namelist()
# logging.debug(f"Archive contains {len(member_list)} members. Example: {member_list[:5]}")
logging.info(f"Extracting '{archive_to_restore_path}' to '{save_folder_path}'...")
zipf.extractall(path=save_folder_path)
msg = "Restore (ZIP) for '{0}' completed successfully.".format(profile_name)
logging.info(msg)
return True, msg
except (zipfile.BadZipFile, IOError, OSError) as e:
msg = f"ERROR during ZIP extraction '{os.path.basename(archive_to_restore_path)}': {e}"
logging.exception(msg)
return False, msg
except Exception as e:
msg = f"ERROR unexpected during ZIP restore '{profile_name}': {e}"
logging.exception(msg)
return False, msg
# --- Logica Rilevamento Steam ---
# Cache globale interna a core_logic
_steam_install_path = None
_steam_libraries = None
_installed_steam_games = None
_steam_userdata_path = None
_steam_id3 = None
_cached_possible_ids = None
_cached_id_details = None
# Trova il percorso di installazione di Steam
def get_steam_install_path():
"""Trova il percorso di installazione di Steam. Restituisce str o None."""
try:
import winreg # <-- Import spostato qui
except ImportError:
logging.warning("Modulo winreg non disponibile su questa piattaforma.")
return None # O gestisci l'errore come preferisci
global _steam_install_path
if _steam_install_path: return _steam_install_path
if platform.system() != "Windows": # <<< MODIFICATO: Rileva OS
logging.info("Steam registry check skipped (Not on Windows).")
# Qui potresti aggiungere logica per Linux/Mac se necessario (es. cercare ~/.steam/steam)
# Per ora, restituisce None se non Windows
return None
# Solo per Windows
if winreg is None: return None # Errore import winreg
try:
key_path = r"Software\Valve\Steam"
potential_hives = [(winreg.HKEY_CURRENT_USER, "HKCU"), (winreg.HKEY_LOCAL_MACHINE, "HKLM")]
for hive, hive_name in potential_hives:
try:
hkey = winreg.OpenKey(hive, key_path)
path_value, _ = winreg.QueryValueEx(hkey, "SteamPath")
winreg.CloseKey(hkey)
# <<< MODIFICATO: Usa os.path.normpath e controlla esistenza
norm_path = os.path.normpath(path_value.replace('/', '\\'))
if os.path.isdir(norm_path):
_steam_install_path = norm_path
logging.info(f"Found Steam installation ({hive_name}): {_steam_install_path}")
return _steam_install_path
except (FileNotFoundError, OSError): continue
except Exception as e: logging.warning(f"Error reading registry ({hive_name}): {e}")
logging.error("Steam installation not found in registry.")
return None
except Exception as e:
logging.error(f"Unexpected error searching for Steam: {e}")
return None
# Trova il percorso userdata di Steam
def _parse_vdf(file_path):
"""Helper interno per parsare VDF. Restituisce dict o None."""
try:
import vdf # <--- SPOSTATO QUI DENTRO
except ImportError:
vdf = None # Imposta a None se fallisce per gestirlo dopo
if vdf is None:
# Logga l'errore solo se si prova effettivamente a parsare
logging.error("Libreria 'vdf' non trovata. Impossibile parsare file VDF.")
return None
if not os.path.isfile(file_path): return None
try:
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
# Rimuovi commenti C-style se presenti (semplice rimozione linea)
content = '\n'.join(line for line in content.splitlines() if not line.strip().startswith('//'))
return vdf.loads(content, mapper=dict) # Usa mapper=dict per standardizzare
except FileNotFoundError: return None # Già controllato, ma per sicurezza
except UnicodeDecodeError:
logging.warning(f"Encoding error reading VDF '{os.path.basename(file_path)}'. Trying fallback encoding...")
try:
# Prova con un encoding diverso se UTF-8 fallisce (raro per VDF)
with open(file_path, 'r', encoding='latin-1') as f:
content = f.read()
content = '\n'.join(line for line in content.splitlines() if not line.strip().startswith('//'))
return vdf.loads(content, mapper=dict)
except Exception as e_fallback:
logging.error(f"ERROR parsing VDF '{os.path.basename(file_path)}' (fallback failed): {e_fallback}")
return None
except Exception as e:
logging.error(f"ERROR parsing VDF '{os.path.basename(file_path)}': {e}")
return None
# Trova librerie Steam
def find_steam_libraries():
"""Trova le librerie Steam. Restituisce lista di path."""
global _steam_libraries
if _steam_libraries is not None: return _steam_libraries
steam_path = get_steam_install_path()
libs = []
if not steam_path:
_steam_libraries = []
return libs
# Libreria principale (dove Steam è installato)
main_lib_steamapps = os.path.join(steam_path, 'steamapps')
if os.path.isdir(main_lib_steamapps):
libs.append(steam_path) # Aggiungi il path base di Steam
# Leggi libraryfolders.vdf per altre librerie
vdf_path = os.path.join(steam_path, 'config', 'libraryfolders.vdf') # <<< MODIFICATO: Percorso corretto
logging.info(f"Reading libraries from: {vdf_path}")
data = _parse_vdf(vdf_path)
added_libs_count = 0
if data:
# Il formato può variare leggermente, cerca 'libraryfolders' o direttamente indici numerici
lib_folders_data = data.get('libraryfolders', data)
if isinstance(lib_folders_data, dict):
for key, value in lib_folders_data.items():
# Chiavi sono solitamente indici '0', '1', ... o info sulla libreria stessa
if key.isdigit() or isinstance(value, dict):
lib_info = value if isinstance(value, dict) else lib_folders_data.get(key) # Ottieni il dict info
if isinstance(lib_info, dict) and 'path' in lib_info:
lib_path_raw = lib_info['path']
# <<< MODIFICATO: Normalizza e verifica esistenza steamapps
lib_path = os.path.normpath(lib_path_raw.replace('\\\\', '\\'))
lib_steamapps_path = os.path.join(lib_path, 'steamapps')
if os.path.isdir(lib_steamapps_path) and lib_path not in libs:
libs.append(lib_path) # Aggiungi path base della libreria
added_libs_count += 1
# else: logging.debug(f"Library path from VDF invalid or missing steamapps: '{lib_path}'")
logging.info(f"Found {len(libs)} total Steam libraries ({added_libs_count} from VDF).")
_steam_libraries = list(dict.fromkeys(libs)) # Rimuovi duplicati se presenti
return _steam_libraries
# Trova giochi installati
def find_installed_steam_games():
"""Trova giochi installati. Restituisce dict {appid: {'name':..., 'installdir':...}}."""
global _installed_steam_games
if _installed_steam_games is not None: return _installed_steam_games
library_paths = find_steam_libraries()
games = {}
if not library_paths:
_installed_steam_games = {}
return games
logging.info("Scanning libraries for installed Steam games...")
total_games_found = 0
processed_appids = set() # Per evitare sovrascritture se trovato in più librerie
for lib_path in library_paths:
steamapps_path = os.path.join(lib_path, 'steamapps')
if not os.path.isdir(steamapps_path): continue
try:
for filename in os.listdir(steamapps_path):
if filename.startswith('appmanifest_') and filename.endswith('.acf'):
acf_path = os.path.join(steamapps_path, filename)
data = _parse_vdf(acf_path)
if data and 'AppState' in data:
app_state = data['AppState']
# <<< MODIFICATO: Verifica più robusta dei campi necessari
if all(k in app_state for k in ['appid', 'name', 'installdir']) and 'StateFlags' in app_state:
appid = app_state['appid']
if appid in processed_appids: continue # Già trovato (probabilmente nella libreria principale)
installdir_relative = app_state['installdir']
# Costruisci percorso assoluto relativo alla LIBRERIA corrente
installdir_absolute = os.path.normpath(os.path.join(steamapps_path, 'common', installdir_relative))
# Verifica installazione: StateFlags 4 = installato, 1026 = installato+aggiornamento, 6 = installato+validazione?
# Controlliamo anche se la cartella esiste fisicamente come fallback
state_flags = int(app_state.get('StateFlags', 0))
is_installed = (state_flags in [4, 6, 1026]) or \
(state_flags == 2 and os.path.isdir(installdir_absolute)) # 2=UpdateRequired?
if is_installed and os.path.isdir(installdir_absolute): # Doppio check
name = app_state.get('name', f"Unknown Game {appid}").replace('™', '').replace('®', '').strip()
games[appid] = {'name': name, 'installdir': installdir_absolute}
processed_appids.add(appid)
total_games_found += 1
logging.debug(f"Found game: {name} (AppID: {appid}) in '{lib_path}'")
# else: logging.debug(f"ACF file '{filename}' missing required fields.")
except Exception as e:
logging.error(f"Error scanning games in '{steamapps_path}': {e}")
logging.info(f"Found {total_games_found} installed Steam games.")
_installed_steam_games = games
return games
# Costante per la conversione ID3 <-> ID64
STEAM_ID64_BASE = 76561197960265728
def find_steam_userdata_info():
"""Trova userdata path, SteamID3, lista ID possibili e dettagli ID (incluso display name)."""
try:
import vdf
except ImportError:
vdf = None
global _steam_userdata_path, _steam_id3, _cached_possible_ids, _cached_id_details
# Controlla cache (come prima)
if (_steam_userdata_path and _steam_id3 and
_cached_possible_ids is not None and _cached_id_details is not None and
all('display_name' in v for v in _cached_id_details.values())):
logging.debug("Using cached Steam userdata info (with display names).")
return _steam_userdata_path, _steam_id3, _cached_possible_ids, _cached_id_details
# Resetta cache
_steam_userdata_path = None; _steam_id3 = None
_cached_possible_ids = None; _cached_id_details = None
logging.info("Starting new Steam userdata scan (including profile names)...")
steam_path = get_steam_install_path()
if not steam_path:
logging.error("ERROR: Unable to find Steam installation path for userdata scan.")
return None, None, [], {}
userdata_base = os.path.join(steam_path, 'userdata')
if not os.path.isdir(userdata_base):
logging.warning(f"Steam 'userdata' folder not found in '{steam_path}'.")
return None, None, [], {}
# --- Lettura loginusers.vdf per i nomi ---
loginusers_path = os.path.join(steam_path, 'config', 'loginusers.vdf')
user_persona_names = {} # SteamID64 -> PersonaName
if vdf:
logging.info(f"Reading profile names from: {loginusers_path}")
loginusers_data = _parse_vdf(loginusers_path)
if loginusers_data and 'users' in loginusers_data:
for steam_id64_str, user_data in loginusers_data['users'].items():
if isinstance(user_data, dict) and 'PersonaName' in user_data:
user_persona_names[steam_id64_str] = user_data['PersonaName']
logging.info(f"Found {len(user_persona_names)} profile names in loginusers.vdf.")
else:
logging.warning("Format 'loginusers.vdf' not recognized or file empty/corrupted.")
else:
logging.warning("Library 'vdf' not available, unable to read Steam profile names.")
# --- FINE Lettura ---
possible_ids = []
last_modified_time = 0
likely_id = None
id_details = {}
logging.info(f"Searching Steam user IDs in: {userdata_base}")
try:
for entry in os.listdir(userdata_base): # entry è SteamID3
user_path = os.path.join(userdata_base, entry)
if entry.isdigit() and entry != '0' and os.path.isdir(user_path):
possible_ids.append(entry)
current_mtime = 0
last_mod_str = "N/D"
display_name = f"ID: {entry}" # Default se non troviamo nome
# --- Trova PersonaName usando ID3 -> ID64 ---
try:
steam_id3_int = int(entry)
steam_id64 = steam_id3_int + STEAM_ID64_BASE
steam_id64_str = str(steam_id64)
if steam_id64_str in user_persona_names:
display_name = user_persona_names[steam_id64_str] # Usa nome trovato
logging.debug(f"Matched ID3 {entry} to Name: {display_name}")
except ValueError:
logging.warning(f"User ID found in userdata is not numeric: {entry}")
except Exception as e_name:
logging.error(f"ERROR retrieving name for ID {entry}: {e_name}")
# --- FINE Trova PersonaName ---
# --- Trova data ultima modifica (come prima) ---
config_vdf_path = os.path.join(user_path, 'config', 'localconfig.vdf')
check_paths = [config_vdf_path, user_path] # Controlla VDF prima, poi cartella base utente
for check_path in check_paths:
try:
if os.path.exists(check_path):
mtime = os.path.getmtime(check_path)
if mtime > current_mtime: # Prendi il timestamp più recente tra VDF e folder
current_mtime = mtime
try:
last_mod_str = datetime.fromtimestamp(mtime).strftime('%d/%m/%Y %H:%M')
except ValueError: last_mod_str = "Invalid Date"
except Exception: pass # Ignora errori lettura mtime singolo file
# Salva dettagli
id_details[entry] = {
'mtime': current_mtime,
'last_mod_str': last_mod_str,
'display_name': display_name # Salva il nome (o l'ID se non trovato)
}
# Determina ID più probabile
if current_mtime > last_modified_time:
last_modified_time = current_mtime
likely_id = entry
except Exception as e:
logging.error(f"ERROR scanning 'userdata': {e}")
return None, None, [], {} # Resetta in caso di errore grave
# Cache e return
_steam_userdata_path = userdata_base
_steam_id3 = likely_id
_cached_possible_ids = possible_ids
_cached_id_details = id_details
logging.info(f"Found {len(possible_ids)} IDs in userdata. Most likely ID: {likely_id}")
for uid, details in id_details.items():
logging.info(f" - ID: {uid}, Name: {details.get('display_name', '?')}, Last Mod: {details.get('last_mod_str', '?')}")
return userdata_base, likely_id, possible_ids, id_details
# <<< Funzione are_names_similar usa anche check sequenza >>>
def are_names_similar(name1, name2, min_match_words=2, fuzzy_threshold=88, game_title_words_for_seq=None):
try:
# --- Pulizia Migliorata ---
# Rimuovi TUTTO tranne lettere, numeri e spazi, poi normalizza spazi
pattern_alphanum_space = r'[^a-zA-Z0-9\s]'
clean_name1 = re.sub(pattern_alphanum_space, '', name1).lower()
clean_name2 = re.sub(pattern_alphanum_space, '', name2).lower()
# Normalizza spazi multipli/iniziali/finali
clean_name1 = re.sub(r'\s+', ' ', clean_name1).strip()
clean_name2 = re.sub(r'\s+', ' ', clean_name2).strip()
try:
ignore_words = getattr(config, 'SIMILARITY_IGNORE_WORDS', {'a', 'an', 'the', 'of', 'and', 'remake', 'intergrade', 'edition', 'goty', 'demo', 'trial', 'play', 'launch', 'definitive', 'enhanced', 'complete', 'collection', 'hd', 'ultra', 'deluxe', 'game', 'year'})
ignore_words_lower = {w.lower() for w in ignore_words}
except Exception as e_config:
logging.error(f"ARE_NAMES_SIMILAR: Error getting ignore words from config: {e_config}")
ignore_words_lower = {'a', 'an', 'the', 'of', 'and'} # Fallback sicuro
# Usa il pattern aggiornato per estrarre parole (solo lettere/numeri)
pattern_words = r'\b\w+\b' # \w include numeri e underscore, va bene qui
words1 = {w for w in re.findall(pattern_words, clean_name1) if w not in ignore_words_lower and len(w) > 1}
words2 = {w for w in re.findall(pattern_words, clean_name2) if w not in ignore_words_lower and len(w) > 1}
# 1. Check parole comuni
common_words = words1.intersection(words2)
common_check_result = len(common_words) >= min_match_words
if common_check_result:
return True
# 2. Check prefix (starts_with) / uguaglianza senza spazi
name1_no_space = clean_name1.replace(' ', '') # Ora dovrebbe essere "overcooked2"
name2_no_space = clean_name2.replace(' ', '') # Questo è "overcooked2"
MIN_PREFIX_LEN = 3
starts_with_match = False
if len(name1_no_space) >= MIN_PREFIX_LEN and len(name2_no_space) >= MIN_PREFIX_LEN:
# Controlla uguaglianza esatta senza spazi PRIMA di startswith
if name1_no_space == name2_no_space:
starts_with_match = True # <<-- QUI DOVREBBE DIVENTARE TRUE
elif len(name1_no_space) > len(name2_no_space):
if name1_no_space.startswith(name2_no_space): starts_with_match = True
elif len(name2_no_space) > len(name1_no_space):
if name2_no_space.startswith(name1_no_space): starts_with_match = True
if starts_with_match:
return True
# 3. Check Sequenza Iniziali
seq_match_result = False
if game_title_words_for_seq:
seq_match_result = matches_initial_sequence(name2, game_title_words_for_seq) # Assumi matches_initial_sequence esista e funzioni
if seq_match_result:
return True
# 4. Fuzzy Matching
if THEFUZZ_AVAILABLE and fuzzy_threshold <= 100:
# Definisci ratio SOLO qui dentro
ratio = fuzz.token_sort_ratio(clean_name1, clean_name2)
fuzzy_check_result = ratio >= fuzzy_threshold
if fuzzy_check_result:
# Ritorna immediatamente se il check fuzzy passa
return True
# Se nessuna condizione sopra ha ritornato True
return False
except Exception as e_sim:
logging.error(f"ARE_NAMES_SIMILAR: === Error comparing '{name1}' vs '{name2}': {e_sim} ===", exc_info=True)
return False # Ritorna False in caso di errore
# <<< Funzione per indovinare i percorsi di salvataggio >>>
def guess_save_path(game_name, game_install_dir, appid=None, steam_userdata_path=None, steam_id3_to_use=None, is_steam_game=True, installed_steam_games_dict=None):
"""
Tenta di indovinare i possibili percorsi di salvataggio per un gioco usando varie euristiche.
Chiama le funzioni esterne `clean_for_comparison` e `final_sort_key` per l'elaborazione e l'ordinamento.
Args:
game_name (str): Nome del gioco.
game_install_dir (str|None): Percorso di installazione del gioco (se noto).
appid (str|None): Steam AppID del gioco (se noto).
steam_userdata_path (str|None): Percorso base della cartella userdata di Steam.
steam_id3_to_use (str|None): SteamID3 dell'utente da usare per la ricerca in userdata.
is_steam_game (bool): Flag che indica se è un gioco Steam.
installed_steam_games_dict (dict|None): Dizionario {appid: {'name':..., 'installdir':...}}
dei giochi Steam installati.
Returns:
list[tuple[str, int]]: Lista di tuple (percorso_trovato, punteggio) ordinate per probabilità decrescente.
"""
guesses_data = {}
checked_paths = set()
# --- Variabili comuni (accessibili anche da final_sort_key tramite il dizionario) ---
sanitized_name_base = re.sub(r'^(Play |Launch )', '', game_name, flags=re.IGNORECASE)
sanitized_name = re.sub(r'[™®©:]', '', sanitized_name_base).strip()
game_abbreviations = generate_abbreviations(sanitized_name, game_install_dir)
if sanitized_name not in game_abbreviations: game_abbreviations.insert(0, sanitized_name)
# --- Calcola set maiuscoli/minuscoli ---
game_abbreviations_upper = set(a.upper() for a in game_abbreviations if a) # Aggiunto 'if a' per sicurezza
game_abbreviations_lower = set(a.lower() for a in game_abbreviations if a) # Aggiunto 'if a' per sicurezza
# --- FINE ---
# Carica configurazioni
try:
ignore_words = getattr(config, 'SIMILARITY_IGNORE_WORDS', set())
common_save_extensions = getattr(config, 'COMMON_SAVE_EXTENSIONS', set())
common_save_filenames = getattr(config, 'COMMON_SAVE_FILENAMES', set())
common_save_subdirs = getattr(config, 'COMMON_SAVE_SUBDIRS', [])
common_publishers = getattr(config, 'COMMON_PUBLISHERS', [])
common_publishers_set = set(p.lower() for p in common_publishers if p) # Converti in minuscolo per check case-insensitive
BANNED_FOLDER_NAMES_LOWER = getattr(config, 'BANNED_FOLDER_NAMES_LOWER', {
"microsoft", "nvidia corporation", "intel", "amd", "google", "mozilla",
"common files", "internet explorer", "windows", "system32", "syswow64",