1- from .media_processing import *
2- from .exceptions import *
1+ from .media_processing import (
2+ merge_mp4_with_overlay ,
3+ merge_jpg_with_overlay
4+ )
5+ from .metadata import (
6+ write_exif ,
7+ set_file_timestamp
8+ )
9+ from .exceptions import (
10+ ZipExtractionError ,
11+ DownloadError ,
12+ VideoProcessingError ,
13+ ImageProcessingError ,
14+ DependencyError
15+ )
316from pathlib import Path
4- from .metadata import *
517import requests
618import shutil
719import time
2133 FileNotFoundError: If ZIP file doesn't exist
2234 ZipExtractionError: If extraction or processing fails
2335"""
24- def handle_zip (filepath : Path , name : str , memory : dict [str , str , str , str , str ]) -> None :
36+
37+
38+ def handle_zip (
39+ filepath : Path ,
40+ name : str ,
41+ memory : dict [str , str ],
42+ ) -> None :
2543
2644 if not filepath .exists ():
2745 raise FileNotFoundError (f"ZIP file not found: { filepath } " )
2846
29-
3047 # Create folder path for extracted files
3148 new_folder = Path (f"./memories/{ name } " )
3249
3350 if new_folder .exists ():
34- print (f"Folder already exists: { new_folder .name } , skipping extraction" )
51+ print (
52+ f"Folder already exists: { new_folder .name } , skipping extraction"
53+ )
3554
3655 # Create folder
3756 try :
@@ -54,7 +73,9 @@ def handle_zip(filepath: Path, name: str, memory: dict[str, str, str, str, str])
5473 try :
5574 os .remove (filepath )
5675 except OSError as e :
57- print (f"Warning: Could not delete ZIP file { filepath .name } : { e } " )
76+ print (
77+ f"Warning: Could not delete ZIP file { filepath .name } : { e } "
78+ )
5879
5980 # Track what files we found
6081 main_mp4 = None
@@ -92,17 +113,23 @@ def handle_zip(filepath: Path, name: str, memory: dict[str, str, str, str, str])
92113 new_path = new_folder / new_name
93114 file .rename (new_path )
94115 except OSError as e :
95- print (f"Warning: Could not rename { old_name } to { new_name } : { e } " )
116+ print (
117+ f"Warning: Could not rename { old_name } "
118+ f"to { new_name } : { e } "
119+ )
96120 continue
97121
98122 # Verify we found expected files
99123 if not main_mp4 and not main_jpg :
100124 raise ZipExtractionError (
101125 f"No main media file found in { filepath .name } . "
102- f"Exprected file ending with '-main.mp4' or '-main.jpg'"
126+ f"Expected file ending with '-main.mp4' or '-main.jpg'"
103127 )
104128 if not overlay_png :
105- print (f"Warning: No overlay PNG found in { filepath .name } " )
129+ print (
130+ f"Warning: No overlay PNG found in "
131+ f"{ filepath .name } "
132+ )
106133
107134 # Get Memory metadata values
108135 date_str = memory ["date" ]
@@ -111,9 +138,15 @@ def handle_zip(filepath: Path, name: str, memory: dict[str, str, str, str, str])
111138
112139 # Make sure valid metadata
113140 if not date_str :
114- raise ValueError ("Date string not found in Memory {filepath.name}." )
141+ raise ValueError (
142+ f"Date string not found in Memory "
143+ f"{ filepath .name } ."
144+ )
115145 if not lat or not lon :
116- raise ValueError ("GPS coordinates not found in Memory {filepath.name}." )
146+ raise ValueError (
147+ f"GPS coordinates not found in Memory "
148+ f"{ filepath .name } ."
149+ )
117150
118151 # Process MP4 if found
119152 if main_mp4 and main_mp4 .exists ():
@@ -123,15 +156,23 @@ def handle_zip(filepath: Path, name: str, memory: dict[str, str, str, str, str])
123156
124157 if overlay_png and overlay_png .exists ():
125158 try :
126- combined_path = merge_mp4_with_overlay (main_mp4 , overlay_png )
159+ combined_path = merge_mp4_with_overlay (
160+ main_mp4 , overlay_png
161+ )
127162 write_exif (combined_path , date_str , lat , lon )
128163 except (VideoProcessingError , DependencyError ) as e :
129- print (f"Warning: Failed to merge MP4 with overlay: { e } " )
164+ print (
165+ f"Warning: Failed to merge MP4 with overlay: { e } "
166+ )
130167 except Exception as e :
131- print (f"Warning: Unexpected error merging MP4: { e } " )
168+ print (
169+ f"Warning: Unexpected error merging MP4: { e } "
170+ )
132171
133172 except Exception as e :
134- print (f"Warning: Failed to process MP4: { e } " )
173+ print (
174+ f"Warning: Failed to process MP4: { e } "
175+ )
135176
136177 # Process JPG if found
137178 if main_jpg and main_jpg .exists ():
@@ -141,30 +182,41 @@ def handle_zip(filepath: Path, name: str, memory: dict[str, str, str, str, str])
141182
142183 if overlay_png and overlay_png .exists ():
143184 try :
144- combined_path = merge_jpg_with_overlay (main_jpg , overlay_png )
185+ combined_path = merge_jpg_with_overlay (
186+ main_jpg , overlay_png
187+ )
145188 write_exif (combined_path , date_str , lat , lon )
146189 except ImageProcessingError as e :
147- print (f"Warning: Failed to merge JPG with overlay: { e } " )
190+ print (
191+ f"Warning: Failed to merge JPG with overlay: { e } "
192+ )
148193 except Exception as e :
149- print (f"Warning: Unexpected error merging JPG: { e } " )
194+ print (
195+ f"Warning: Unexpected error merging JPG: { e } "
196+ )
150197
151198 except Exception as e :
152- print (f"Warning: Failed to process JPG: { e } " )
199+ print (
200+ f"Warning: Failed to process JPG: { e } "
201+ )
153202
154203 # Set folder timestamp to match content
155- try : # not sure this is right
204+ try : # not sure this is right
156205 timestamp_date = date_str .replace (" UTC" , "" ).strip ()
157206 set_file_timestamp (new_folder , timestamp_date )
158207 except Exception as e :
159- print (f"Warning: Could not set folder timestamp: { e } " )
208+ print (
209+ f"Warning: Could not set folder timestamp: { e } "
210+ )
160211
161212 except ZipExtractionError :
162- # Re-raise our custom errors
213+ # Re-raise our custom errors
163214 raise
164215 except Exception as e :
165- # Catch unexpected errors
216+ # Catch unexpected errors
166217 raise ZipExtractionError (f"Failed to process extracted files: { e } " )
167218
219+
168220# =========================================================================== #
169221
170222"""
@@ -178,7 +230,11 @@ def handle_zip(filepath: Path, name: str, memory: dict[str, str, str, str, str])
178230 DownloadError: If download fails
179231 NetworkError: If network connection fails
180232"""
181- def memory_download (memories : list [dict [str , str , str , str , str ]]) -> None :
233+
234+
235+ def memory_download (
236+ memories : list [dict [str , str ]],
237+ ) -> None :
182238
183239 total_files = len (memories )
184240 if not memories or total_files <= 0 :
@@ -219,20 +275,27 @@ def memory_download(memories: list[dict[str, str, str, str, str]]) -> None:
219275 name = date_str .replace (" " , "-" )[:- 4 ]
220276 name = name .replace (":" , "" )
221277 except Exception as e :
222- print (f"\n Memory { idx } : Invalid date format '{ date_str } ', skipping" )
278+ print (
279+ f"\n Memory { idx } : Invalid date format '{ date_str } ', skipping"
280+ )
223281 failed_downloads .append ((idx , f"Invalid date: { e } " ))
224282 continue
225283
226284 # Implement retries if a download fails
227285 max_retries = 3
228- retry_delay = 2 # seconds
286+ retry_delay = 2 # seconds
229287
230288 for attempt in range (0 , max_retries ):
231289 try :
232- print (f"\r Downloading { idx + 1 } /{ total_files } : { name } ..." , end = "" , flush = True )
290+ print (
291+ f"\r Downloading { idx + 1 } /{ total_files } : { name } ..." ,
292+ end = "" ,
293+ flush = True
294+ )
233295
234296 with requests .get (url , stream = True , timeout = 30 ) as r :
235- r .raise_for_status () # Raise exception for 4xx/5xx status codes
297+ # Raise exception for 4xx/5xx status codes
298+ r .raise_for_status ()
236299
237300 # Determine file extension from Content-Type header
238301 content_type = r .headers .get ("Content-Type" , "" ).lower ()
@@ -245,29 +308,38 @@ def memory_download(memories: list[dict[str, str, str, str, str]]) -> None:
245308 elif "zip" in content_type :
246309 ext = ".zip"
247310 else :
248- print (f"Memory { idx } : Unknown file type '{ content_type } ', skipping\n " )
249- failed_downloads .append ((idx , f"Unknown type: { content_type } " ))
311+ print (
312+ f"Memory { idx } : Unknown Content-Type"
313+ f"'{ content_type } ', skipping\n "
314+ )
315+ failed_downloads .append (
316+ (idx , f"Unknown type: { content_type } " )
317+ )
250318 break
251319
252320 filepath = out_dir / f"{ name } { ext } "
253321 filepath_no_ext = out_dir / name
254322
255323 if filepath .exists () or filepath_no_ext .exists ():
256- print (f"\n Memory { idx } : File already exists, skipping\n " )
324+ print (
325+ f"\n Memory { idx } : File already exists, skipping\n "
326+ )
257327 download_count += 1
258328 break
259329
260330 try :
261331 with open (filepath , 'wb' ) as f :
262- for chunk in r .iter_content (chunk_size = 8192 ): # 8 KB chunks
263- if chunk : # filter out keep-alive new chunks
332+ # 8 KB chunks
333+ for chunk in r .iter_content (chunk_size = 8192 ):
334+ if chunk : # filter out keep-alive new chunks
264335 f .write (chunk )
265336 except OSError as e :
266337 raise DownloadError (f"Failed to write file: { e } " )
267338
268339 if not filepath .exists () or filepath .stat ().st_size == 0 :
269- raise DownloadError ("Downloaded file is empty or missing\n " )
270-
340+ raise DownloadError (
341+ "Downloaded file is empty or missing\n "
342+ )
271343
272344 # Process the downloaded file
273345 try :
@@ -277,56 +349,87 @@ def memory_download(memories: list[dict[str, str, str, str, str]]) -> None:
277349 write_exif (filepath , date_str , lat , lon )
278350
279351 except Exception as e :
280- print (f"\n Memory { idx } : Post-processing failed: { e } \n " )
352+ print (
353+ f"\n Memory { idx } : Post-processing failed: { e } \n "
354+ )
281355
282356 # successful download and processing, move onto next file
283357 download_count += 1
284358 break
285359
286360 except requests .exceptions .Timeout :
287361 if attempt < max_retries :
288- print (f"\n Memory { idx } : Timeout, retrying ({ attempt } /{ max_retries } )...\n " )
362+ print (
363+ f"\n Memory { idx } : Timeout, retrying "
364+ f"({ attempt } /{ max_retries } )...\n "
365+ )
289366 time .sleep (retry_delay )
290367 else :
291- print (f"\n Memory { idx } : Timeout after { max_retries } attempts, skipping\n " )
368+ print (
369+ f"\n Memory { idx } : Timeout after { max_retries } "
370+ f" attempts, skipping\n "
371+ )
292372 failed_downloads .append ((idx , "Timeout" ))
293373
294374 except requests .exceptions .ConnectionError :
295375 if attempt < max_retries :
296- print (f"\n Memory { idx } : Connection error, retrying ({ attempt } /{ max_retries } )...\n " )
376+ print (
377+ f"\n Memory { idx } : Connection error, retrying "
378+ f"({ attempt } /{ max_retries } )...\n "
379+ )
297380 time .sleep (retry_delay )
298381 else :
299- print (f"\n Memory { idx } : Connection failed after { max_retries } attempts, skipping\n " )
382+ print (
383+ f"\n Memory { idx } : Connection failed after "
384+ f" { max_retries } attempts, skipping\n "
385+ )
300386 failed_downloads .append ((idx , "Connection error" ))
301387
302388 except requests .exceptions .HTTPError as e :
303389 # Don't retry on 404, 403, etc.
304390
305- # Retry on server errors. This seems to be most prevalent error when downloading
391+ # Retry on server errors.
392+ # This seems to be most prevalent error when downloading
306393 status = e .response .status_code
307394 if 500 <= status < 600 :
308395 if attempt < max_retries :
309- print (f"\n Memory { idx } : Server error { status } , retry attempt { attempt } /{ max_retries } " )
396+ print (
397+ f"\n Memory { idx } : Server error { status } , "
398+ f"retry attempt { attempt } /{ max_retries } "
399+ )
310400 time .sleep (retry_delay )
311401 continue
312402
313- print (f"\n Memory { idx } : HTTP error { e .response .status_code } , skipping\n " )
314- failed_downloads .append ((idx , f"HTTP { e .response .status_code } " ))
403+ print (
404+ f"\n Memory { idx } : HTTP error "
405+ f"{ e .response .status_code } , skipping\n "
406+ )
407+ failed_downloads .append (
408+ (idx , f"HTTP { e .response .status_code } " )
409+ )
315410 break
316411
317412 except requests .exceptions .RequestException as e :
318- print (f"\n Memory { idx } : Download failed: { e } , skipping\n " )
413+ print (
414+ f"\n Memory { idx } : Download failed: { e } , "
415+ f"skipping\n "
416+ )
319417 failed_downloads .append ((idx , str (e )))
320418 break
321419
322420 except Exception as e :
323- print (f"\n Memory { idx } : Unexpected error: { e } , skipping\n " )
421+ print (
422+ f"\n Memory { idx } : Unexpected error: { e } , "
423+ f"skipping\n "
424+ )
324425 failed_downloads .append ((idx , str (e )))
325426 break
326427
327428 # Final summary
328429 print (f"\n \n { '=' * 50 } " )
329- print (f"Successfully downloaded: { download_count } /{ total_files } " )
430+ print (
431+ f"Successfully downloaded: { download_count } /{ total_files } "
432+ )
330433
331434 if failed_downloads :
332435 print (f"Failed downloads: { len (failed_downloads )} " )
0 commit comments