Skip to content

Commit 30b4e03

Browse files
committed
renames a few variables to be conformant
1 parent 8d5ffbd commit 30b4e03

File tree

1 file changed

+17
-17
lines changed

1 file changed

+17
-17
lines changed

flickr_download/flick_download.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -108,8 +108,8 @@ def _get_metadata_db(dirname: str) -> sqlite3.Connection:
108108
def _get_size_and_suffix(photo: Photo, size_label: Optional[str]) -> Tuple[Optional[str], str]:
109109
photo_size_label: Optional[str] = size_label
110110
if photo.get("video"):
111-
pSizes = _get_photo_sizes(photo)
112-
if pSizes and "HD MP4" in pSizes:
111+
photo_sizes = _get_photo_sizes(photo)
112+
if photo_sizes and "HD MP4" in photo_sizes:
113113
photo_size_label = "HD MP4"
114114
else:
115115
# Fall back for old 'short videos'. This might not exist, but
@@ -124,8 +124,8 @@ def _get_size_and_suffix(photo: Photo, size_label: Optional[str]) -> Tuple[Optio
124124
# to find the original type it seems is through the source filename. This
125125
# is not pretty...
126126
if photo_size_label == "Original" or not photo_size_label:
127-
pSizes = _get_photo_sizes(photo)
128-
meta = pSizes and pSizes.get("Original")
127+
photo_sizes = _get_photo_sizes(photo)
128+
meta = photo_sizes and photo_sizes.get("Original")
129129
if meta and meta["source"]:
130130
ext = os.path.splitext(meta["source"])[1]
131131
if ext:
@@ -249,7 +249,7 @@ def do_download_photo(
249249
return
250250

251251
fname = get_full_path(dirname, get_filename(pset, photo, suffix))
252-
jsonFname = fname + ".json"
252+
json_fname = fname + ".json"
253253

254254
if not photo["loaded"]:
255255
# trying not trigger two calls to Photo.getInfo here, as it will if it was already loaded
@@ -261,14 +261,14 @@ def do_download_photo(
261261

262262
if save_json:
263263
try:
264-
if Path(jsonFname).exists():
265-
logging.info("Skipping %s, as it exists already", jsonFname)
264+
if Path(json_fname).exists():
265+
logging.info("Skipping %s, as it exists already", json_fname)
266266
else:
267-
with open(jsonFname, "w", encoding="utf-8") as jsonFile:
268-
logging.info("Saving photo info: %s", jsonFname)
267+
with open(json_fname, "w", encoding="utf-8") as json_file:
268+
logging.info("Saving photo info: %s", json_fname)
269269
photo_data = photo.__dict__.copy()
270270
photo_data["exif"] = photo.getExif()
271-
jsonFile.write(
271+
json_file.write(
272272
json.dumps(photo_data, default=serialize_json, indent=2, sort_keys=True)
273273
)
274274
except Exception:
@@ -392,8 +392,8 @@ def print_sets(username: str) -> None:
392392
"""
393393
user = find_user(username)
394394
photosets = Walker(user.getPhotosets)
395-
for set in photosets:
396-
print(f"{set.id} - {set.title}")
395+
for photoset in photosets:
396+
print(f"{photoset.id} - {photoset.title}")
397397

398398

399399
def get_cache(path: str) -> SimpleCache:
@@ -404,19 +404,19 @@ def get_cache(path: str) -> SimpleCache:
404404
return cache
405405

406406
with cache_path.open("rb") as handle:
407-
db = pickle.load(handle)
408-
cache.storage = db["storage"]
407+
database = pickle.load(handle)
408+
cache.storage = database["storage"]
409409
logging.debug("Cache loaded from: %s", cache_path.resolve())
410-
cache.expire_info = db["expire_info"]
410+
cache.expire_info = database["expire_info"]
411411
return cache
412412

413413

414414
def save_cache(path: str, cache: SimpleCache) -> bool:
415415
"""Saves the cache to disk."""
416-
db = {"storage": cache.storage, "expire_info": cache.expire_info}
416+
database = {"storage": cache.storage, "expire_info": cache.expire_info}
417417
cache_path = Path(path)
418418
with cache_path.open("wb") as handle:
419-
pickle.dump(db, handle, protocol=pickle.HIGHEST_PROTOCOL)
419+
pickle.dump(database, handle, protocol=pickle.HIGHEST_PROTOCOL)
420420

421421
logging.debug("Cache saved to %s", cache_path.resolve())
422422
return True

0 commit comments

Comments
 (0)