diff --git a/.github/workflows/build-nightly.yml b/.github/workflows/build-nightly.yml index 282403e87..0ad134970 100644 --- a/.github/workflows/build-nightly.yml +++ b/.github/workflows/build-nightly.yml @@ -1,47 +1,62 @@ -# This is quick action to build apk and share it with artifacts +# This is quick action to build exe and share it with artifacts # It's for convenience and nightly-testing with people name: Build nightly on: workflow_dispatch: + push: + branches: + - prerelease jobs: build: strategy: matrix: - os: [ ubuntu-latest, macos-13, macos-latest, windows-latest ] + os: [ubuntu-latest, macos-latest, windows-latest] + arch: [x86_64, arm64] runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 - - name: Get exe name - id: exe_name + - uses: actions/checkout@v4.2.2 + - uses: dart-lang/setup-dart@v1.7.1 + - run: dart pub get + - name: Get version + id: version shell: bash run: | - if [ "$RUNNER_OS" == "Linux" ]; then - echo "name=gpth-linux" >> $GITHUB_OUTPUT - elif [ "$RUNNER_OS" == "Windows" ]; then - echo "name=gpth-windoza.exe" >> $GITHUB_OUTPUT - elif [ "$RUNNER_OS" == "macOS" ]; then - arch=$(uname -m) - if [ "$arch" == "arm64" ]; then - echo "name=gpth-macos-arm64" >> $GITHUB_OUTPUT - elif [ "$arch" == "x86_64" ]; then - echo "name=gpth-macos-intel" >> $GITHUB_OUTPUT - else - echo "Unknown macOS architecture: $arch" >> $GITHUB_OUTPUT - exit 1 - fi + if [[ "$RUNNER_OS" == "Windows" ]]; then + VERSION=$(powershell -Command "(Get-Content pubspec.yaml | Select-String '^version:').ToString().Split(':')[1].Trim()") else - echo "Unknown OS: $RUNNER_OS" - exit 69 + VERSION=$(grep '^version:' pubspec.yaml | head -n1 | cut -d' ' -f2) fi - - uses: dart-lang/setup-dart@v1 - - run: dart pub get + echo "version=$VERSION" >> $GITHUB_OUTPUT - name: Build exe - run: dart compile exe bin/gpth.dart -o ./${{ steps.exe_name.outputs.name }} - - name: Upload apk as artifact - uses: actions/upload-artifact@v3 + shell: bash + run: | + version=${{ steps.version.outputs.version }} + osname=${{ matrix.os }} + archname=${{ matrix.arch }} + # Normalize osname for output file + if [[ "$osname" == "ubuntu-latest" ]]; then + osname=linux + elif [[ "$osname" == "macos-latest" ]]; then + osname=macos + elif [[ "$osname" == "windows-latest" ]]; then + osname=windows + fi + outname=gpth-v$version-nightly-$osname-$archname + if [[ "$osname" == "windows" ]]; then + outname="$outname.exe" + fi + dart compile exe bin/gpth.dart -o ./$outname + - name: Upload exe as artifact + uses: actions/upload-artifact@v4.6.2 with: - name: gpth-nightly-${{ runner.os }} - path: ./${{ steps.exe_name.outputs.name }} + name: gpth-v${{ steps.version.outputs.version }}-nightly-${{ matrix.os == 'ubuntu-latest' && 'linux' || matrix.os == 'macos-latest' && 'macos' || matrix.os == 'windows-latest' && 'windows' }}-${{ matrix.arch }} + path: | + ./gpth-v${{ steps.version.outputs.version }}-nightly-linux-${{ matrix.arch }} + ./gpth-v${{ steps.version.outputs.version }}-nightly-linux-${{ matrix.arch }}.exe + ./gpth-v${{ steps.version.outputs.version }}-nightly-macos-${{ matrix.arch }} + ./gpth-v${{ steps.version.outputs.version }}-nightly-macos-${{ matrix.arch }}.exe + ./gpth-v${{ steps.version.outputs.version }}-nightly-windows-${{ matrix.arch }} + ./gpth-v${{ steps.version.outputs.version }}-nightly-windows-${{ matrix.arch }}.exe if-no-files-found: error diff --git a/.github/workflows/dart-test.yaml b/.github/workflows/dart-test.yaml index 18c4422e3..5aa7f3370 100644 --- a/.github/workflows/dart-test.yaml +++ b/.github/workflows/dart-test.yaml @@ -1,25 +1,47 @@ # Main Dart tests and checks -name: Dart tests +name: Dart test on: + workflow_dispatch: push: - branches: [ master ] + branches: [main, prerelease] pull_request: - branches: [ master ] - + branches: [main, prerelease] jobs: - tests: - runs-on: ubuntu-latest + test: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + sdk: [3.8] + steps: - - uses: actions/checkout@v3 - - uses: dart-lang/setup-dart@v1 - - run: dart pub get - - run: dart test + - uses: actions/checkout@v4.2.2 + - uses: dart-lang/setup-dart@v1.7.1 + with: + sdk: ${{ matrix.sdk }} + + - name: Install exiftool (Linux) + if: runner.os == 'Linux' + run: sudo apt-get update && sudo apt-get install -y exiftool + + - name: Install exiftool (macOS) + if: runner.os == 'macOS' + run: brew install exiftool + + - name: Install exiftool (Windows) + if: runner.os == 'Windows' + run: choco install exiftool -y + + - name: Install dependencies + run: dart pub get + - name: Run tests + run: dart test formatting-and-analysis: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: dart-lang/setup-dart@v1 + - uses: actions/checkout@v4.2.2 + - uses: dart-lang/setup-dart@v1.7.1 - run: dart pub get - name: Verify formatting run: dart format --output=none --set-exit-if-changed . diff --git a/.github/workflows/new-release.yml b/.github/workflows/new-release.yml index bd3338833..b26bf52d0 100644 --- a/.github/workflows/new-release.yml +++ b/.github/workflows/new-release.yml @@ -8,7 +8,7 @@ jobs: make-release: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.2.2 - name: Get clean tag name id: clean_tag run: echo "tag=$(echo ${{ github.ref }} | sed 's/refs\/tags\///')" >> $GITHUB_OUTPUT @@ -19,7 +19,7 @@ jobs: # id: tag_message # run: echo "name=message=$(git tag -l --format='%(contents)' ${{ github.ref }})" >> $GITHUB_OUTPUT - name: Create GH-Release - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2.2.2 with: body_path: ./body-file.txt fail_on_unmatched_files: true @@ -28,67 +28,67 @@ jobs: needs: make-release strategy: matrix: - os: [ ubuntu-latest, macos-13, macos-latest, windows-latest ] + os: [ ubuntu-latest, macos-latest, windows-latest ] + arch: [x86_64, arm64] runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.2.2 - name: Get clean tag name id: clean_tag run: echo "tag=$(echo ${{ github.ref }} | sed 's/refs\/tags\///')" >> $GITHUB_OUTPUT - - name: Get exe name - id: exe_name + - uses: dart-lang/setup-dart@v1.7.1 + - run: dart pub get + - name: Get version + id: version shell: bash run: | - if [ "$RUNNER_OS" == "Linux" ]; then - echo "name=gpth-linux" >> $GITHUB_OUTPUT - elif [ "$RUNNER_OS" == "Windows" ]; then - echo "name=gpth-windoza.exe" >> $GITHUB_OUTPUT - elif [ "$RUNNER_OS" == "macOS" ]; then - arch=$(uname -m) - if [ "$arch" == "arm64" ]; then - echo "name=gpth-macos-arm64" >> $GITHUB_OUTPUT - elif [ "$arch" == "x86_64" ]; then - echo "name=gpth-macos-intel" >> $GITHUB_OUTPUT - else - echo "Unknown macOS architecture: $arch" >> $GITHUB_OUTPUT - exit 1 - fi + if [[ "$RUNNER_OS" == "Windows" ]]; then + VERSION=$(powershell -Command "(Get-Content pubspec.yaml | Select-String '^version:').ToString().Split(':')[1].Trim()") else - echo "Unknown OS: $RUNNER_OS" - exit 69 + VERSION=$(grep '^version:' pubspec.yaml | head -n1 | cut -d' ' -f2) fi - - uses: dart-lang/setup-dart@v1 - - run: dart pub get + echo "version=$VERSION" >> $GITHUB_OUTPUT - name: Build exe - run: dart compile exe bin/gpth.dart -o ./${{ steps.exe_name.outputs.name }} - - name: Code sign a windoza exe file - if: matrix.os == 'windows-latest' - uses: dlemstra/code-sign-action@v1 - with: - # Note: this is a self-signed certificate that i generated on my computer - # This is worthless i think, since windoza still get angry, but best i can do, - # because proper certificates cost money (a lot :/ ) - certificate: ${{ secrets.WINDOZA_CERT_BASE64 }} - password: ${{ secrets.WINDOZA_CERT_PASSWORD }} - folder: '.' + shell: bash + run: | + version=${{ steps.version.outputs.version }} + osname=${{ matrix.os }} + archname=${{ matrix.arch }} + if [[ "$osname" == "ubuntu-latest" ]]; then + osname=linux + elif [[ "$osname" == "macos-latest" ]]; then + osname=macos + elif [[ "$osname" == "windows-latest" ]]; then + osname=windows + fi + outname=gpth-v$version-release-$osname-$archname + if [[ "$osname" == "windows" ]]; then + outname="$outname.exe" + fi + dart compile exe bin/gpth.dart -o ./$outname - name: Add files to GH-Release - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2.2.2 with: - # this supports globs :> - files: ./gpth-* + files: | + ./gpth-v${{ steps.version.outputs.version }}-release-linux-${{ matrix.arch }} + ./gpth-v${{ steps.version.outputs.version }}-release-linux-${{ matrix.arch }}.exe + ./gpth-v${{ steps.version.outputs.version }}-release-macos-${{ matrix.arch }} + ./gpth-v${{ steps.version.outputs.version }}-release-macos-${{ matrix.arch }}.exe + ./gpth-v${{ steps.version.outputs.version }}-release-windows-${{ matrix.arch }} + ./gpth-v${{ steps.version.outputs.version }}-release-windows-${{ matrix.arch }}.exe fail_on_unmatched_files: true # if linux, upload to aur - - name: Generate PKGBUILD - if: matrix.os == 'ubuntu-latest' - run: ./scripts/gen-pkgbuild.bash ./${{ steps.exe_name.outputs.name }} - - name: Upload to AUR - if: matrix.os == 'ubuntu-latest' - uses: KSXGitHub/github-actions-deploy-aur@v2 - with: - pkgname: gpth-bin - pkgbuild: ./PKGBUILD - commit_username: 'TheLastGimbus' - commit_email: 'mateusz.soszynski@tuta.io' - ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }} - commit_message: ${{ steps.clean_tag.outputs.tag }} - test: true + #- name: Generate PKGBUILD + # if: matrix.os == 'ubuntu-latest' + # run: ./scripts/gen-pkgbuild.bash ./gpth-${{ matrix.os }}-${{ matrix.arch }} + #- name: Upload to AUR + # if: matrix.os == 'ubuntu-latest' + # uses: KSXGitHub/github-actions-deploy-aur@v4.1.1 + # with: + # pkgname: gpth-bin + # pkgbuild: ./PKGBUILD + # commit_username: 'TheLastGimbus' + # commit_email: 'mateusz.soszynski@tuta.io' + # ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }} + # commit_message: ${{ steps.clean_tag.outputs.tag }} + # test: true diff --git a/.gitignore b/.gitignore index 870e8757c..2fab9815d 100644 --- a/.gitignore +++ b/.gitignore @@ -7,9 +7,8 @@ # Conventional directory for build output. build/ - -photos/ - -ALL_PHOTOS/ output/ *.log +pubspec.lock +/test/generated +/test/.temp_exif diff --git a/CHANGELOG.md b/CHANGELOG.md index 65c9f9c73..360865b20 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,217 @@ +## 4.0.0-wacheee (by Xentraxx) + +### Fork/Alternate version + +### This change is a big overhaul of the project, so only the major improvements or potential breaking changes are mentioned + +#### Tl;dr + +- Added support for reading EXIF data from JXL (JPEG XL), ARW, RAW, DNG, CRW, CR3, NRW, NEF and RAF files internally. +- Adeded support for reading and writing coordinates and DateTime from and to exif for almost all file formats. +- Added a "--write-exif" flag which will write missing EXIF information (coordinates and DateTime) from json to EXIF for jpg and jpeg files +- Added support to get DateTime from .MOV, .MP4 and probably many other video formats through exiftool. You need to download it yourself (e.g. from here: https://exiftool.org/), rename it to exiftool.exe and make sure the folder you keep it in is in your $PATH variable or in the same folder as gpth. +- Added verbose mode (--verbose or -v) +- File size is not limited anymore by default but can be limited using the --limit-filesize flag for systems with low RAM (like a NAS). + +#### General improvements + +- upgraded dependencies and fixed breaking changes +- updated dart to a minimum version of 3.8.0 of the dart SDK +- included image, intl and coordinate_converter packages +- applied a list of coding best practices through lint rules to code +- added/edited a bunch of comments and changed unnecessary print() to log() for debugging and a better user experience +- Divided code in steps through comments and included steps in output for readability, debuggability and to make it easier to follow the code +- checked TODOs in README.md +- Added TODOs to look into in code through //TODO comments +- moved json_extractor file into date_extractor folder +- added unit tests for new write-exif functionality +- made CLI --help output more readable through line breaks +- renamed some variables/functions to better reflect their purpose +- moved step 8 (update creation time) before final output +- added output how often DateTime and Coordinates have been written in EXIF at the final output +- changed that test data will be created in test subfolder instead of project root directory +- Added consistent log levels to log output to quickly differenciate between informational and error logs +- Added logging of elapsed time for each step. +- Exposed the maxFileSize flag as an argument (--limit-filesize) to set if necessary, It's now deactivated by default to support larger files like videos. +- Added DateTime extraction method statistics to the final output - shows how many files had their dates extracted through which method +- Added elapsed time logging for each processing step +- Improved Github actions + +#### Bug fixes + +- fixed existing unit tests which would fail on windows +- Fixed Github Actions + +#### Added functionality + +- Support for writing coordinates and DateTime to EXIF + - Added new CLI option "--write-exif". + - When enabled, the script will check if the associated json of any given file contains coordinates and if the file does not yet have them in its EXIF data, the script will add them. + - When enabled, the script will check if a DateTime has been extracted from any of the given extraction methods and if the file has no EXIF DateTime set, it will add the DateTime to the EXIF data 'DateTime', 'DateTimeOriginal'and 'DateTimeDigitized'. + - Added verbose mode (--verbose or -v) with log levels info, warning and error. + +- Moved from the stale "exif" package to "exif_reader" for dart local exif reading, the image library for local jpeg exif writing and the external exiftool for all other EXIF reading and writing (images and videos) + - The move to exif_reader adds support for extracting DateTime from JXL (JPEG XL), ARW, RAW, DNG, CRW, CR3, NRW, NEF and RAF files, and video formats like MOV, MP4, etc. + - Exiftool needs to be in $PATH variable or in the same folder as the running binary. If not, that's okay. Then we fall back to exif_reader. But if you have ExifTool locally, Google Photos Takeout Helper now supports reading CreatedDateTime EXIF data for almost all media formats. + +- Added new interactive prompts: + - Option to write EXIF data to files (--write-exif) + - Option to limit file size for systems with low RAM (--limit-filesize) + +##### *Previous fixes and improvement (from 3.4.3-wacheee to 4.0.0-wacheee)* +- *added macOS executables supporting both ARM64 and Intel architectures https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/310 https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/396#issuecomment-2787459117* +- *fixed an exception when using GPTH with command-line arguments https://github.com/Wacheee/GooglePhotosTakeoutHelper/issues/5 https://github.com/Wacheee/GooglePhotosTakeoutHelper/issues/8* +- *the "fix JSON metadata files" option can now be configured using command-line arguments https://github.com/Wacheee/GooglePhotosTakeoutHelper/issues/7 https://github.com/Wacheee/GooglePhotosTakeoutHelper/issues/9* +- *if `shortcut` option is selected, shortcut creation will be 10 times faster on Windows platforms (new creation method that avoids using PowerShell). For more details: [TheLastGimbus#390](https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/390)* +- *fixed issues with folder names containing emojis 💖🤖🚀on Windows #389* +- *added an interactive option to update the creation times of files in the output folder to match their last modified times at the end of the program (only on Windows) #371* +- *if a media is not in a year folder it establishes one from an album to move it to ALL_PHOTOS correctly. This will move the original media file directly from the album (or one of those albums) to ALL_PHOTOS and create a shortcut in the output album folder (if shortcut option is selected) #261* +- *added support for moving or copying files with the following extensions to the output folder: .MP, .MV, .DNG, and .CR2 #381 #324 #180 #271* +- *added an interactive option to convert Pixel Motion Photo files (.MP or .MV) to .mp4* +- *added an option to remove the "supplemental-metadata" suffix from JSON to prevent issues with metadata #353 #355* +- *fixed shortcut issue on Windows platforms #248* +- *added more options for date-based folders [year, month, day] #238 (based in this commit [`More granular date folders #299`](https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/299/commits/d06fe73101845acd650bc025d2977b96bbd1bf1d))* +- *added reverse-shortcut option, now you can mantain original photo in album folders and create a shortcut in year albums* + +##### *Limitations:* +- *if album mode is set to duplicate-copy, it will move the album photos to the album folder (as usual), but ALL_PHOTOS will not contain them if the media is not in a year album.* +- *it does not fix issues related to reading JSON files (if necessary) for Motion Photo files; however, if the dates are included in the file name (as with Pixel Motion Photos), the correct dates will be established.* +- *No interactive unzipping* + +## 3.6.2-wacheee + +### Fork/Alternate version +#### macOS executables + +- added macOS executables supporting both ARM64 and Intel architectures https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/310 https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/396#issuecomment-2787459117 + +##### *Previous fixes and improvement (from 3.4.3-wacheee to 3.6.1-wacheee)* +- *fixed an exception when using GPTH with command-line arguments https://github.com/Wacheee/GooglePhotosTakeoutHelper/issues/5 https://github.com/Wacheee/GooglePhotosTakeoutHelper/issues/8* +- *the "fix JSON metadata files" option can now be configured using command-line arguments https://github.com/Wacheee/GooglePhotosTakeoutHelper/issues/7 https://github.com/Wacheee/GooglePhotosTakeoutHelper/issues/9* +- *if `shortcut` option is selected, shortcut creation will be 10 times faster on Windows platforms (new creation method that avoids using PowerShell). For more details: [TheLastGimbus#390](https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/390)* +- *fixed issues with folder names containing emojis 💖🤖🚀on Windows #389* +- *added an interactive option to update the creation times of files in the output folder to match their last modified times at the end of the program (only on Windows) #371* +- *if a media is not in a year folder it establishes one from an album to move it to ALL_PHOTOS correctly. This will move the original media file directly from the album (or one of those albums) to ALL_PHOTOS and create a shortcut in the output album folder (if shortcut option is selected) #261* +- *added support for moving or copying files with the following extensions to the output folder: .MP, .MV, .DNG, and .CR2 #381 #324 #180 #271* +- *added an interactive option to convert Pixel Motion Photo files (.MP or .MV) to .mp4* +- *added an option to remove the "supplemental-metadata" suffix from JSON to prevent issues with metadata #353 #355* +- *fixed shortcut issue on Windows platforms #248* +- *added more options for date-based folders [year, month, day] #238 (based in this commit [`More granular date folders #299`](https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/299/commits/d06fe73101845acd650bc025d2977b96bbd1bf1d))* +- *added reverse-shortcut option, now you can mantain original photo in album folders and create a shortcut in year albums* + +##### *Limitations (previous fixes):* +- *if album mode is set to duplicate-copy, it will move the album photos to the album folder (as usual), but ALL_PHOTOS will not contain them if the media is not in a year album.* +- *it does not fix issues related to reading JSON files (if necessary) for Motion Photo files; however, if the dates are included in the file name (as with Pixel Motion Photos), the correct dates will be established.* + +## 3.6.1-wacheee + +### Fork/Alternate version +#### Fixes for Command-Line Arguments + +- fixed an exception when using GPTH with command-line arguments https://github.com/Wacheee/GooglePhotosTakeoutHelper/issues/5 https://github.com/Wacheee/GooglePhotosTakeoutHelper/issues/8 +- the "fix JSON metadata files" option can now be configured using command-line arguments https://github.com/Wacheee/GooglePhotosTakeoutHelper/issues/7 https://github.com/Wacheee/GooglePhotosTakeoutHelper/issues/9 + +##### *Previous fixes and improvement (from 3.4.3-wacheee to 3.6.0-wacheee)* +- *if `shortcut` option is selected, shortcut creation will be 10 times faster on Windows platforms (new creation method that avoids using PowerShell). For more details: [TheLastGimbus#390](https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/390)* +- *fixed issues with folder names containing emojis 💖🤖🚀on Windows #389* +- *added an interactive option to update the creation times of files in the output folder to match their last modified times at the end of the program (only on Windows) #371* +- *if a media is not in a year folder it establishes one from an album to move it to ALL_PHOTOS correctly. This will move the original media file directly from the album (or one of those albums) to ALL_PHOTOS and create a shortcut in the output album folder (if shortcut option is selected) #261* +- *added support for moving or copying files with the following extensions to the output folder: .MP, .MV, .DNG, and .CR2 #381 #324 #180 #271* +- *added an interactive option to convert Pixel Motion Photo files (.MP or .MV) to .mp4* +- *added an option to remove the "supplemental-metadata" suffix from JSON to prevent issues with metadata #353 #355* +- *fixed shortcut issue on Windows platforms #248* +- *added more options for date-based folders [year, month, day] #238 (based in this commit [`More granular date folders #299`](https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/299/commits/d06fe73101845acd650bc025d2977b96bbd1bf1d))* +- *added reverse-shortcut option, now you can mantain original photo in album folders and create a shortcut in year albums* + +##### *Limitations (previous fixes):* +- *if album mode is set to duplicate-copy, it will move the album photos to the album folder (as usual), but ALL_PHOTOS will not contain them if the media is not in a year album.* +- *it does not fix issues related to reading JSON files (if necessary) for Motion Photo files; however, if the dates are included in the file name (as with Pixel Motion Photos), the correct dates will be established.* + +### Fork/Alternate version +#### Windows: 10x faster shortcut creation and other fixes + +- if `shortcut` option is selected, shortcut creation will be 10 times faster on Windows platforms (new creation method that avoids using PowerShell). For more details: [TheLastGimbus#390](https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/390) +- fixed issues with folder names containing emojis 💖🤖🚀on Windows #389 + +##### *Previous fixes and improvement (from 3.4.3-wacheee to 3.5.2-wacheee)* +- *added an interactive option to update the creation times of files in the output folder to match their last modified times at the end of the program (only on Windows) #371* +- *if a media is not in a year folder it establishes one from an album to move it to ALL_PHOTOS correctly. This will move the original media file directly from the album (or one of those albums) to ALL_PHOTOS and create a shortcut in the output album folder (if shortcut option is selected) #261* +- *added support for moving or copying files with the following extensions to the output folder: .MP, .MV, .DNG, and .CR2 #381 #324 #180 #271* +- *added an interactive option to convert Pixel Motion Photo files (.MP or .MV) to .mp4* +- *added an option to remove the "supplemental-metadata" suffix from JSON to prevent issues with metadata #353 #355* +- *fixed shortcut issue on Windows platforms #248* +- *added more options for date-based folders [year, month, day] #238 (based in this commit [`More granular date folders #299`](https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/299/commits/d06fe73101845acd650bc025d2977b96bbd1bf1d))* +- *added reverse-shortcut option, now you can mantain original photo in album folders and create a shortcut in year albums* + +##### *Limitations (previous fixes):* +- *if album mode is set to duplicate-copy, it will move the album photos to the album folder (as usual), but ALL_PHOTOS will not contain them if the media is not in a year album.* +- *it does not fix issues related to reading JSON files (if necessary) for Motion Photo files; however, if the dates are included in the file name (as with Pixel Motion Photos), the correct dates will be established.* + +## 3.5.2-wacheee + +### Fork/Alternate version +#### New option to update creation time at the end of program - Windows only + +- added an interactive option to update the creation times of files in the output folder to match their last modified times at the end of the program #371 + +Limitations: +- only works for Windows right now + +##### *Previous fixes and improvement (from 3.4.3-wacheee to 3.5.1-wacheee)* +- *if a media is not in a year folder it establishes one from an album to move it to ALL_PHOTOS correctly. This will move the original media file directly from the album (or one of those albums) to ALL_PHOTOS and create a shortcut in the output album folder (if shortcut option is selected) #261* +- *added support for moving or copying files with the following extensions to the output folder: .MP, .MV, .DNG, and .CR2 #381 #324 #180 #271* +- *added an interactive option to convert Pixel Motion Photo files (.MP or .MV) to .mp4* +- *added an option to remove the "supplemental-metadata" suffix from JSON to prevent issues with metadata #353 #355* +- *fixed shortcut issue on Windows platforms #248* +- *added more options for date-based folders [year, month, day] #238 (based in this commit [`More granular date folders #299`](https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/299/commits/d06fe73101845acd650bc025d2977b96bbd1bf1d))* +- *added reverse-shortcut option, now you can mantain original photo in album folders and create a shortcut in year albums* + +##### *Limitations (previous fixes):* +- *if album mode is set to duplicate-copy, it will move the album photos to the album folder (as usual), but ALL_PHOTOS will not contain them if the media is not in a year album.* +- *it does not fix issues related to reading JSON files (if necessary) for Motion Photo files; however, if the dates are included in the file name (as with Pixel Motion Photos), the correct dates will be established.* + +## 3.5.1-wacheee + +### Fork/Alternate version +#### Always move to ALL_PHOTOS even if it is not present in year album + +- if a media is not in a year folder it establishes one from an album to move it to ALL_PHOTOS correctly. This will move the original media file directly from the album (or one of those albums) to ALL_PHOTOS and create a shortcut in the output album folder (if shortcut option is selected) #261 + +Limitations: +- if album mode is set to duplicate-copy, it will move the album photos to the album folder (as usual), but ALL_PHOTOS will not contain them if the media is not in a year album. + +##### *Previous fixes (3.4.3-wacheee - 3.5.0-wacheee)* +- *added support for moving or copying files with the following extensions to the output folder: .MP, .MV, .DNG, and .CR2 #381 #324 #180 #271* +- *added an interactive option to convert Pixel Motion Photo files (.MP or .MV) to .mp4* +- *added an option to remove the "supplemental-metadata" suffix from JSON to prevent issues with metadata #353 #355* +- *fixed shortcut issue on Windows platforms #248* +- *added more options for date-based folders [year, month, day] #238 (based in this commit [`More granular date folders #299`](https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/299/commits/d06fe73101845acd650bc025d2977b96bbd1bf1d))* +- *added reverse-shortcut option, now you can mantain original photo in album folders and create a shortcut in year albums* + +##### *Limitations (previous fixes):* +- *it does not fix issues related to reading JSON files (if necessary) for Motion Photo files; however, if the dates are included in the file name (as with Pixel Motion Photos), the correct dates will be established.* + +## 3.5.0-wacheee + +### Fork/Alternate version +#### Convert Pixel Motion Photo files Option - More extensions supported + +- added support for moving or copying files with the following extensions to the output folder: .MP, .MV, .DNG, and .CR2 #381 #324 #180 #271 +- added an interactive option to convert Pixel Motion Photo files (.MP or .MV) to .mp4 + +Limitations: +- it does not fix issues related to reading JSON files (if necessary) for Motion Photo files; however, if the dates are included in the file name (as with Pixel Motion Photos), the correct dates will be established. + +## 3.4.3-wacheee + +### Fork/Alternate version from original +#### Bug fixes + +- added an option to remove the "supplemental-metadata" suffix from JSON to prevent issues with metadata #353 #355 +- fixed shortcut issue on Windows platforms #248 +- added more options for date-based folders [year, month, day] #238 (based in this commit [`More granular date folders #299`](https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/299/commits/d06fe73101845acd650bc025d2977b96bbd1bf1d)) +- added reverse-shortcut option, now you can mantain original photo in album folders and create a shortcut in year albums + ## 3.4.3 ### Just a few patches ❤️‍🩹 diff --git a/README.md b/README.md index 586fb3c6c..065fce494 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,22 @@ If you want to run it on Synology, have problems with interactive, or just love ### 3. Download the executable for your system from [releases tab](https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/releases) 🛒 ([also available on AUR 😏](https://aur.archlinux.org/packages/gpth-bin)) -### 4. Run `gpth` +### 4. Install Exiftool + - On Windoza: download [exiftool for Windows](https://exiftool.org/) and put `exiftool(-k).exe` in the same folder as `gpth.exe`. Important! Rename it to `exiftool.exe`. + - Alternatively, you can install it with [Chocolatey](https://chocolatey.org/): + ```bash + choco install exiftool + ``` + - On Mac: install with Homebrew: + ```bash + brew install exiftool + ``` + - On Linux: install with your package manager, e.g. on Ubuntu: + ```bash + sudo apt install libimage-exiftool-perl + ``` + +### 5. Run `gpth` - On Windoza: just double-click the downloaded `.exe` 🎉 - tell windoza defender that it's safe, and follow prompted instructions 🧾 - On Mac/Linux: open terminal, `cd` to the folder with downloaded executable and run it: ```bash @@ -52,18 +67,6 @@ If you want to run it on Synology, have problems with interactive, or just love ``` **Fun fact:** `gpth` *moves* files around by default - so if anything goes wrong mid-way, re-export the zips again :pray: - -### 5. Most of your photos should have correct original EXIFs (metadata), but if you want, you can insert them everywhere with `exiftool`, so you won't lose their creation time - - Download Phil Harvey's exiftool: https://exiftool.sourceforge.net/ - - Open the cmd/terminal, and run - ```bash - # cd to folder where you downloaded exiftool - cd Downloads - # run it on your output folder: - # (the '-r' means "run on all files/in sub-folders" aka recursively) - # (Make sure you didn't change file modify dates aka didn't do anything with the files after running gpth - exiftool -overwrite_original -r -if 'not defined DateTimeOriginal' -P "-AllDates arguments) async { - final parser = ArgParser() +Future main(final List arguments) async { + final ArgParser parser = ArgParser() ..addFlag('help', abbr: 'h', negatable: false) ..addOption( 'fix', - help: 'Folder with any photos to fix dates. ' - 'This skips whole "GoogleTakeout" procedure.' - 'It is here because gpth has some cool heuristics to determine date ' - 'of a photo, and this can be handy in many situations :)', + help: + 'Folder with any photos to fix dates. \n' + 'This skips whole "GoogleTakeout" procedure. \n' + 'It is here because gpth has some cool heuristics to determine date \n' + 'of a photo, and this can be handy in many situations :)\n', + ) + ..addFlag( + 'interactive', + help: + 'Use interactive mode. Type this in case auto-detection fails, \n' + 'or you *really* want to combine advanced options with prompts\n', + ) + ..addFlag( + 'verbose', + abbr: 'v', + help: + 'Shows extensive output for debugging and analysis.\n' + 'This can help with troubleshooting\n', + ) + ..addOption( + 'input', + abbr: 'i', + help: + 'Input folder with *all* takeouts *extracted*.\n' + '(The folder your "Takeout" folder is within)\n', + ) + ..addOption( + 'output', + abbr: 'o', + help: 'Output folder where all photos will land\n', ) - ..addFlag('interactive', - help: 'Use interactive mode. Type this in case auto-detection fails, ' - 'or you *really* want to combine advanced options with prompts') - ..addOption('input', - abbr: 'i', help: 'Input folder with *all* takeouts *extracted*. ') - ..addOption('output', - abbr: 'o', help: 'Output folder where all photos will land') ..addOption( 'albums', help: 'What to do about albums?', @@ -49,42 +116,59 @@ void main(List arguments) async { allowedHelp: interactive.albumOptions, defaultsTo: 'shortcut', ) - ..addOption('divide-to-dates', - help: 'Divide output to folders by nothing/year/month/day', - allowed: ['0', '1', '2', '3'], - defaultsTo: '0',) - ..addFlag('skip-extras', help: 'Skip extra images (like -edited etc)') + ..addOption( + 'divide-to-dates', + help: 'Divide output to folders by nothing/year/month/day\n', + allowed: ['0', '1', '2', '3'], + defaultsTo: '0', + ) + ..addFlag('skip-extras', help: 'Skip extra images (like -edited etc)\n') ..addFlag( 'guess-from-name', - help: 'Try to guess file dates from their names', + help: 'Try to guess file dates from their names\n', defaultsTo: true, ) ..addFlag( 'copy', - help: "Copy files instead of moving them.\n" - "This is usually slower, and uses extra space, " - "but doesn't break your input folder", + help: + 'Copy files instead of moving them.\n' + 'This is usually slower, and uses extra space, \n' + "but doesn't break your input folder\n", ) ..addFlag( - 'modify-json', - help: 'Delete the "supplemental-metadata" suffix from ' - '.json files to ensure that script works correctly', + 'modify-json', + help: + 'Delete the "supplemental-metadata" suffix from \n' + '.json files to ensure that script works correctly\n', defaultsTo: true, ) ..addFlag( - 'transform-pixel-mp', - help: 'Transform Pixel .MP or .MV extensions to ".mp4"' + 'transform-pixel-mp', + help: 'Transform Pixel .MP or .MV extensions to ".mp4"\n', + ) + ..addFlag( + 'update-creation-time', + help: + 'Set creation time equal to the last \n' + 'modification date at the end of the program. \n' + 'Only Windows supported\n', + ) + ..addFlag( + 'write-exif', + help: + 'Writes geodata from json files and the extracted DateTime to EXIF. \n' + 'It always writes to original data, even if combined with --copy!', ) ..addFlag( - 'update-creation-time', - help: "Set creation time equal to the last " - 'modification date at the end of the program.' - 'Only Windows supported' + 'limit-filesize', + help: + 'Enforces a maximum size of 64MB per file for systems with low RAM (e.g. NAS).\n ' + 'DateTime will not be extracted from or written to larger files.', ); - final args = {}; + final Map args = {}; try { - final res = parser.parse(arguments); - for (final key in res.options) { + final ArgResults res = parser.parse(arguments); + for (final String key in res.options) { args[key] = res[key]; } interactive.indeed = @@ -105,24 +189,61 @@ void main(List arguments) async { return; } + // here we check if in debug profile or in verbose mode to activate logging. + bool isDebugMode = false; + // ignore: prefer_asserts_with_message + assert(() { + isDebugMode = true; + return true; + }()); + if (args['verbose'] || isDebugMode) { + isVerbose = true; + log('Verbose mode active!'); + } + // set the enforceMaxFileSize variable through argument + if (args['limit-filesize']) { + enforceMaxFileSize = true; + } + + //checking if Exiftool is installed + if (await initExiftool()) { + print( + '[INFO] Exiftool was found! Continuing with support for reading and writing EXIF data...', + ); + } else { + print( + '[INFO] Exiftool was not found! Continuing without support for reading and writing EXIF data...', + ); + } + sleep(const Duration(seconds: 3)); + + /// ############################################################## + /// Here the Script asks interactively to fill all arguments + if (interactive.indeed) { // greet user await interactive.greet(); print(''); - // ask for everything // @Deprecated('Interactive unzipping is suspended for now!') // final zips = await interactive.getZips(); + //TODO: Add functionality to unzip files again late Directory inDir; try { inDir = await interactive.getInputDir(); } catch (e) { - print("Hmm, interactive selecting input dir crashed... \n" - "it looks like you're running in headless/on Synology/NAS...\n" - "If so, you have to use cli options - run 'gpth --help' to see them"); + print( + 'Hmm, interactive selecting input dir crashed... \n' + "it looks like you're running in headless/on Synology/NAS...\n" + "If so, you have to use cli options - run 'gpth --help' to see them", + ); exit(69); } print(''); - final out = await interactive.getOutput(); + final Directory out = await interactive.getOutput(); + print(''); + args['write-exif'] = await interactive.askIfWriteExif(); + print(''); + args['limit-filesize'] = await interactive.askIfLimitFileSize(); print(''); args['divide-to-dates'] = await interactive.askDivideDates(); print(''); @@ -132,7 +253,8 @@ void main(List arguments) async { print(''); args['transform-pixel-mp'] = await interactive.askTransformPixelMP(); print(''); - if (Platform.isWindows){ //Only in windows is going to ask + if (Platform.isWindows) { + //Only in windows is going to ask args['update-creation-time'] = await interactive.askChangeCreationTime(); print(''); } @@ -156,32 +278,38 @@ void main(List arguments) async { // elastic list of extractors - can add/remove with cli flags // those are in order of reliability - // if one fails, only then later ones will be used - final dateExtractors = [ - jsonExtractor, - exifExtractor, + final List dateExtractors = [ + jsonDateTimeExtractor, + exifDateTimeExtractor, if (args['guess-from-name']) guessExtractor, // this is potentially *dangerous* - see: // https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/175 - (f) => jsonExtractor(f, tryhard: true), + (final File f) => jsonDateTimeExtractor(f, tryhard: true), ]; - /// ##### Occasional Fix mode ##### + /// ############################################################## + /// ######################## Occasional Fix mode ################# + /// This is a special mode that will go through all files in the given folder + /// and try to set each file to correct lastModified value. + /// This is useful for files that have been moved or copied and have lost their original lastModified value. + /// This is not a part of the main functionality of the script, but it can be accessed by using the --fix flag. + /// It is not recommended to use this mode unless you know what you are doing. if (args['fix'] != null) { // i was thing if not to move this to outside file, but let's leave for now print('========== FIX MODE =========='); print('I will go through all files in folder that you gave me'); print('and try to set each file to correct lastModified value'); - final dir = Directory(args['fix']); + final Directory dir = Directory(args['fix']); if (!await dir.exists()) { error("directory to fix doesn't exist :/"); quit(11); } - var set = 0; - var notSet = 0; - await for (final file in dir.list(recursive: true).wherePhotoVideo()) { + int set = 0; + int notSet = 0; + await for (final File file in dir.list(recursive: true).wherePhotoVideo()) { DateTime? date; - for (final extractor in dateExtractors) { + for (final DateTimeExtractor extractor in dateExtractors) { date = await extractor(file); if (date != null) { await file.setLastModified(date); @@ -197,22 +325,22 @@ void main(List arguments) async { return; } - /// ############################### - - /// ##### Parse all options and check if alright ##### + /// ################# Fix mode END ############################### + /// ############################################################## + /// ##### Parse all options and check if alright ################# if (args['input'] == null) { - error("No --input folder specified :/"); + error('No --input folder specified :/'); quit(10); } if (args['output'] == null) { - error("No --output folder specified :/"); + error('No --output folder specified :/'); quit(10); } - final input = Directory(args['input']); - final output = Directory(args['output']); + final Directory input = Directory(args['input']); + final Directory output = Directory(args['output']); if (!await input.exists()) { - error("Input folder does not exist :/"); + error('Input folder does not exist :/'); quit(11); } // all of this logic is to prevent user easily blowing output folder @@ -221,89 +349,83 @@ void main(List arguments) async { !await output .list() // allow input folder to be inside output - .where((e) => p.absolute(e.path) != p.absolute(args['input'])) + .where( + (final FileSystemEntity e) => + p.absolute(e.path) != p.absolute(args['input']), + ) .isEmpty) { if (await interactive.askForCleanOutput()) { - await for (final file in output - .list() + await for (final FileSystemEntity file + in output.list() // delete everything except input folder if there - .where((e) => p.absolute(e.path) != p.absolute(args['input']))) { + .where( + (final FileSystemEntity e) => + p.absolute(e.path) != p.absolute(args['input']), + )) { await file.delete(recursive: true); } } } await output.create(recursive: true); - if (args['modify-json']) { - print('Fixing JSON files. Removing suffix (this may take some time)...'); - await renameIncorrectJsonFiles(input); - } - - /// ################################################## + /// ############################################################## + // ##### Really important global variables ####################### - // Okay, time to explain the structure of things here - // We create a list of Media objects, and fill it with everything we find - // in "year folders". Then, we play *mutably* with this list - fill Media's - // with guess DateTime's, remove duplicates from this list. - // - // No shitheads, you did not overhear - we *mutate* the whole list and objects - // inside it. This is not Flutter-ish, but it's not Flutter - it's a small - // simple script, and this the best solution 😎💯 - - // Okay, more details on what will happen here: - // 1. We find *all* media in either year folders or album folders. - // Every single file will be a separate [Media] object. - // If given [Media] was found in album folder, it will have it noted - // 2. We [removeDuplicates] - if two files in same/null album have same hash, - // one will be removed. Note that there are still duplicates from different - // albums left. This is intentional - // 3. We guess their dates. Functions in [dateExtractors] are used in order - // from most to least accurate - // 4. Now we [findAlbums]. This will analyze [Media] that have same hashes, - // and leave just one with all [albums] filled. - // final exampleMedia = [ - // Media('lonePhoto.jpg'), - // Media('photo1.jpg, albums=null), - // Media('photo1.jpg, albums={Vacation}), - // Media('photo1.jpg, albums={Friends}), - // ]; - // findAlbums(exampleMedia); - // exampleMedia == [ - // Media('lonePhoto.jpg'), - // Media('photo1.jpg, albums={Vacation, Friends}), - // ]; - // + // Big global media list that we'll work on + final List media = []; - /// Big global media list that we'll work on - final media = []; + // All "year folders" that we found + final List yearFolders = []; - /// All "year folders" that we found - final yearFolders = []; + // All album folders - that is, folders that were aside yearFolders and were + // not matching "Photos from ...." name + final List albumFolders = []; - /// All album folders - that is, folders that were aside yearFolders and were - /// not matching "Photos from ...." name - final albumFolders = []; + /// ############################################################## + /// #### Here we start the actual work ########################### + /// ############################################################## + /// ################# STEP 1 ##################################### + /// ##### Fixing JSON files (if needed) ########################## + final Stopwatch sw1 = Stopwatch() + ..start(); //Creation of our debugging stopwatch for each step. + if (args['modify-json']) { + print( + '[Step 1/8] Fixing JSON files. Removing suffix... (this may take some time)', + ); + await renameIncorrectJsonFiles(input); + } + sw1.stop(); + print( + '[Step 1/8] Step 1 took ${sw1.elapsed.inMinutes} minutes or ${sw1.elapsed.inSeconds} seconds to complete.', + ); + /// ############################################################## + /// ################# STEP 2 ##################################### /// ##### Find literally *all* photos/videos and add to list ##### - - print('Okay, running... searching for everything in input folder...'); + final Stopwatch sw2 = Stopwatch() + ..start(); //Creation of our debugging stopwatch for each step. + print('[Step 2/8] Searching for everything in input folder...'); // recursive=true makes it find everything nicely even if user id dumb 😋 - await for (final d in input.list(recursive: true).whereType()) { + await for (final Directory d + in input.list(recursive: true).whereType()) { if (isYearFolder(d)) { yearFolders.add(d); } else if (await isAlbumFolder(d)) { albumFolders.add(d); } } - for (final f in yearFolders) { - await for (final file in f.list().wherePhotoVideo()) { - media.add(Media({null: file})); + for (final Directory f in yearFolders) { + await for (final File file in f.list().wherePhotoVideo()) { + media.add(Media({null: file})); } } - for (final a in albumFolders) { - await for (final file in a.list().wherePhotoVideo()) { - media.add(Media({albumName(a): file})); + for (final Directory a in albumFolders) { + final String cleanedAlbumName = encodeAndRenameAlbumIfEmoji( + a, + ); //Here we check if there are emojis in the album names and if yes, we hex encode them so there are no problems later! + await for (final File file in a.list().wherePhotoVideo()) { + media.add(Media({cleanedAlbumName: file})); } } @@ -316,23 +438,35 @@ void main(List arguments) async { // } quit(13); } + sw2.stop(); + print( + '[Step 2/8] Step 2 took ${sw2.elapsed.inMinutes} minutes or ${sw2.elapsed.inSeconds} seconds to complete.', + ); - /// ################################################## - - /// ##### Find duplicates ##### - - print('Finding duplicates...'); - - final countDuplicates = removeDuplicates(media); + /// ############################################################## + /// ################# STEP 3 ##################################### + /// ##### Finding and removing duplicates ######################## + final Stopwatch sw3 = Stopwatch() + ..start(); //Creation of our debugging stopwatch for each step. + print('[Step 3/8] Finding duplicates... (This may take some time)'); + final int countDuplicates = removeDuplicates(media); - /// ########################### + /// ############################################################## /// ##### Potentially skip extras ##### - if (args['skip-extras']) print('Finding "extra" photos (-edited etc)'); - final countExtras = args['skip-extras'] ? removeExtras(media) : 0; + if (args['skip-extras']) { + print('[Step 3/8] Finding "extra" photos (-edited etc)'); + } + final int countExtras = args['skip-extras'] ? removeExtras(media) : 0; + sw3.stop(); + print( + '[Step 3/8] Step 3 took ${sw3.elapsed.inMinutes} minutes or ${sw3.elapsed.inSeconds} seconds to complete.', + ); - /// ################################### + /// ############################################################## + /// ################# STEP 4 ##################################### + /// ##### Extracting DateTime through Extractors ################# // NOTE FOR MYSELF/whatever: // I placed extracting dates *after* removing duplicates. @@ -351,18 +485,28 @@ void main(List arguments) async { /// ##### Extracting/predicting dates using given extractors ##### - final barExtract = FillingBar( + final Stopwatch sw4 = Stopwatch() + ..start(); //Creation of our debugging stopwatch for each step. + + final FillingBar barExtract = FillingBar( total: media.length, - desc: "Guessing dates from files", + desc: '[Step 4/8] Extracting dates from files', width: barWidth, ); - for (var i = 0; i < media.length; i++) { - var q = 0; - for (final extractor in dateExtractors) { - final date = await extractor(media[i].firstFile); + + // Collect statistics for reporting + final Map extractionStats = {}; + + for (int i = 0; i < media.length; i++) { + int q = 0; + DateTimeExtractionMethod? extractionMethod; + for (final DateTimeExtractor extractor in dateExtractors) { + final DateTime? date = await extractor(media[i].firstFile); if (date != null) { media[i].dateTaken = date; media[i].dateTakenAccuracy = q; + extractionMethod = DateTimeExtractionMethod + .values[q]; //This assigns to extractionMethod the enum value corresponding to the current extractor's index. barExtract.increment(); break; } @@ -370,33 +514,114 @@ void main(List arguments) async { q++; } if (media[i].dateTaken == null) { - print("\nCan't get date on ${media[i].firstFile.path}"); + extractionMethod = DateTimeExtractionMethod.none; //For statistics + media[i].dateTimeExtractionMethod = DateTimeExtractionMethod + .none; //Writing in media object that no extraction method worked. :( + log( + "[Step 4/8] Couldn't get date with any extractor on ${media[i].firstFile.path}", + level: 'warning', + forcePrint: true, + ); + } else { + media[i].dateTimeExtractionMethod = + extractionMethod; //Writing used extraction method to this media object. } + extractionStats[extractionMethod!] = + (extractionStats[extractionMethod] ?? 0) + 1; //Update statistics. } print(''); + sw4.stop(); + print( + '[Step 4/8] Step 4 took ${sw4.elapsed.inMinutes} minutes or ${sw4.elapsed.inSeconds} seconds to complete.', + ); + /// ############################################################## + /// ################# STEP 5 ##################################### + /// ##### Json Coordinates and extracted DateTime to EXIF ######## + + // In this part, we will write coordinates and dates to EXIF data of the files. + // This is done after the dates of files have been defined, because here we have to write the files to disk again and before + // the files are moved to the output folder, to avoid shortcuts/symlinks problems. + + final Stopwatch sw5 = Stopwatch() + ..start(); //Creation of our debugging stopwatch for each step. + + int exifccounter = 0; //Counter for coordinates set in EXIF + int exifdtcounter = 0; //Counter for DateTime set in EXIF + if (args['write-exif']) { + final FillingBar barJsonToExifExtractor = FillingBar( + total: media.length, + desc: '[Step 5/8] Getting EXIF data from JSONs and applying it to media', + width: barWidth, + ); + + for (int i = 0; i < media.length; i++) { + final File currentFile = media[i].firstFile; + + final DMSCoordinates? coords = await jsonCoordinatesExtractor( + currentFile, + ); + if (coords != null) { + //If coordinates were found in json, write them to exif + if (await writeGpsToExif(coords, currentFile)) { + exifccounter++; + } + } + if (media[i].dateTimeExtractionMethod != + DateTimeExtractionMethod + .exif && //Already got it through ExifExtractor + media[i].dateTimeExtractionMethod != DateTimeExtractionMethod.none) { + //Has no dateTime at all, so nothing to write. + //If date was found before through any extractor, except through exif extractor (cause then it's already in exif, duh!) write it to exif + if (await writeDateTimeToExif(media[i].dateTaken!, currentFile)) { + exifdtcounter++; + } + } + + barJsonToExifExtractor.increment(); + } + } else { + print('[Step 5/8] Skipping writing data to EXIF.'); + } + sw5.stop(); + print( + '[Step 5/8] Step 5 took ${sw5.elapsed.inMinutes} minutes or ${sw5.elapsed.inSeconds} seconds to complete.', + ); - /// ##### Find albums ##### + /// ############################################################## + /// ################# STEP 6 ##################################### + /// ##### Find albums and rename .MP and .MV extensions ########## // I'm placing merging duplicate Media into albums after guessing date for // each one individually, because they are in different folder. // I wish that, thanks to this, we may find some jsons in albums that would // be broken in shithole of big-ass year folders + final Stopwatch sw6 = Stopwatch() + ..start(); //Creation of our debugging stopwatch for each step. + final FillingBar barFindAlbums = FillingBar( + total: outputFileCount(media, args['albums']), + desc: '[Step 6/8] Finding albums', + width: barWidth, + ); + findAlbums(media, barFindAlbums); - print('Finding albums (this may take some time, dont worry :) ...'); - findAlbums(media); + /// ############################################################## // Change Pixel Motion Photos extension to .mp4 using a list of Medias. // This is done after the dates of files have been defined, and before // the files are moved to the output folder, to avoid shortcuts/symlinks problems if (args['transform-pixel-mp']) { - print('Changing .MP or .MV extensions to .mp4 (this may take some time) ...'); - await changeMPExtensions(media, ".mp4"); + print( + '[Step 6/8] Changing .MP or .MV extensions to .mp4... (this may take some time)', + ); + await changeMPExtensions(media, '.mp4'); + } else { + print('[Step 6/8] Skipped changing .MP or .MV extensions to .mp4'); } print(''); - /// ####################### + /// ############################################################## // https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/261 // If a media is not in a year album (there is no null key) it establishes @@ -404,35 +629,43 @@ void main(List arguments) async { // This will move the album file to ALL_PHOTOS and create the shortcut to // the output album folder (if shortcut option is selected). // (The inverse will happen if the inverse-shortcut option is selected). - // If album mode is set to *duplicate-copy* it will not proceed + // If album mode is set to *duplicate-copy* it will not proceed // to avoid moving the same file twice (which would throw an exception) - if (args['albums'] != 'duplicate-copy'){ - for (final m in media){ - final fileWithKey1 = m.files[null]; + if (args['albums'] != 'duplicate-copy') { + for (final Media m in media) { + final File? fileWithKey1 = m.files[null]; if (fileWithKey1 == null) { m.files[null] = m.files.values.first; } } } - /// ####################### - /// ##### Copy/move files to actual output folder ##### + sw6.stop(); + print( + '[Step 6/8] Step 6 took ${sw6.elapsed.inMinutes} minutes or ${sw6.elapsed.inSeconds} seconds to complete.', + ); - final barCopy = FillingBar( + /// ############################################################## + /// ################# STEP 7 ##################################### + /// ##### Copy/move files to actual output folder ################ + final Stopwatch sw7 = Stopwatch() + ..start(); //Creation of our debugging stopwatch for each step. + final FillingBar barCopy = FillingBar( total: outputFileCount(media, args['albums']), - desc: "${args['copy'] ? 'Copying' : 'Moving'} photos to output folder", + desc: + "[Step 7/8] ${args['copy'] ? 'Copying' : 'Moving'} media to output folder", width: barWidth, ); await moveFiles( media, output, copy: args['copy'], - divideToDates: args['divide-to-dates'] is num - ? args['divide-to-dates'] - : num.parse(args['divide-to-dates']), + divideToDates: args['divide-to-dates'] is num + ? args['divide-to-dates'] + : num.parse(args['divide-to-dates']), albumBehavior: args['albums'], - ).listen((_) => barCopy.increment()).asFuture(); - print(''); + ).listen((final _) => barCopy.increment()).asFuture(); + print('[Step 7/8] Done moving/copying media!'); // @Deprecated('Interactive unzipping is suspended for now!') // // remove unzipped folder if was created @@ -440,30 +673,96 @@ void main(List arguments) async { // print('Removing unzipped folder...'); // await input.delete(recursive: true); // } + sw7.stop(); + print( + '[Step 7/8] Step 7 took ${sw7.elapsed.inMinutes} minutes or ${sw7.elapsed.inSeconds} seconds to complete.', + ); - /// ################################################### + /// ############################################################## + /// ################# STEP 8 ##################################### + /// ##### Update creation time (Windows only) #################### + final Stopwatch sw8 = Stopwatch() + ..start(); //Creation of our debugging stopwatch for each step. + int updatedCreationTimeCounter = 0; + if (args['update-creation-time']) { + print( + '[Step 8/8] Updating creation time of media files to match their modified time in output folder ...', + ); + updatedCreationTimeCounter = await updateCreationTimeRecursively(output); + print(''); + print('=' * barWidth); + } else { + print('[Step 8/8] Skipping: Updating creation time (Windows only)'); + } + print(''); + sw8.stop(); + log( + '[Step 8/8] Step 6 took ${sw8.elapsed.inMinutes} minutes or ${sw8.elapsed.inSeconds} seconds to complete.', + ); + + // After all processing steps, before program exit we encode the emojis in album paths again. + final outputDirs = output.listSync().whereType(); + final FillingBar barEmojiEncode = FillingBar( + total: outputFileCount(media, args['albums']), + desc: '[Step 8/8] Looking for folders with emojis and renaming them back.', + width: barWidth, + ); + for (final dir in outputDirs) { + final String decodedPath = decodeAndRestoreAlbumEmoji(dir.path); + barEmojiEncode.increment(); + if (decodedPath != dir.path) { + dir.renameSync(decodedPath); + } + } + + /// ############################################################## + /// ################# END ######################################## + /// Now just the last message ofthe program, just displaying some stats so you have an overview of what happened. + /// Also helps with testing because you can run a diverse and large dataset with the same options through a new version and expect the same (or better) stats. + /// If they got worse, you did smth wrong. print('=' * barWidth); print('DONE! FREEEEEDOOOOM!!!'); - if (countDuplicates > 0) print('Skipped $countDuplicates duplicates'); - if (args['skip-extras']) print('Skipped $countExtras extras'); - final countPoop = media.where((e) => e.dateTaken == null).length; - if (countPoop > 0) { - print("Couldn't find date for $countPoop photos/videos :/"); + print('Some statistics for the archievement hunters:'); + //This check will print an error if no stats are available. + if (countDuplicates > 0 && + updatedCreationTimeCounter > 0 && + exifccounter > 0 && + exifdtcounter > 0 && + args['skip-extras']) { + print('Error! No stats available (This is weird!)'); } - print(''); - if (args['update-creation-time']) { - print('Updating creation time of files to match their modified time in output folder ...'); - await updateCreationTimeRecursively(output); - print(''); - print('=' * barWidth); + if (updatedCreationTimeCounter > 0) { + print('$updatedCreationTimeCounter files had their CreationDate updated'); + } + if (countDuplicates > 0) { + print('$countDuplicates duplicates were found and skipped'); } + if (exifccounter > 0) { + print( + '$exifccounter files got their coordinates set in EXIF data (from json)', + ); + } + if (exifdtcounter > 0) { + print('$exifdtcounter got their DateTime set in EXIF data'); + } + if (args['skip-extras']) print('$countExtras extras were skipped'); + + // Print datetime extraction method statistics + print('DateTime extraction method statistics:'); + for (final entry in extractionStats.entries) { + final String extractiopnMethodString = entry.key.name.toString(); + print('$extractiopnMethodString: ${entry.value} files'); + } + print( + 'In total the script took ${(sw1.elapsed + sw2.elapsed + sw3.elapsed + sw4.elapsed + sw5.elapsed + sw6.elapsed + sw7.elapsed + sw8.elapsed).inMinutes} minutes to complete', + ); print( "Last thing - I've spent *a ton* of time on this script - \n" - "if I saved your time and you want to say thanks, you can send me a tip:\n" - "https://www.paypal.me/TheLastGimbus\n" - "https://ko-fi.com/thelastgimbus\n" - "Thank you ❤", + 'if I saved your time and you want to say thanks, you can send me a tip:\n' + 'https://www.paypal.me/TheLastGimbus\n' + 'https://ko-fi.com/thelastgimbus\n' + 'Thank you ❤', ); print('=' * barWidth); quit(0); diff --git a/lib/date_extractor.dart b/lib/date_extractors/date_extractor.dart similarity index 52% rename from lib/date_extractor.dart rename to lib/date_extractors/date_extractor.dart index c75368c9f..46055c0b7 100644 --- a/lib/date_extractor.dart +++ b/lib/date_extractors/date_extractor.dart @@ -1,8 +1,8 @@ import 'dart:io'; -export 'date_extractors/exif_extractor.dart'; -export 'date_extractors/guess_extractor.dart'; -export 'date_extractors/json_extractor.dart'; +export 'exif_extractor.dart'; +export 'guess_extractor.dart'; +export 'json_extractor.dart'; /// Function that can take a file and potentially extract DateTime of it typedef DateTimeExtractor = Future Function(File); diff --git a/lib/date_extractors/exif_extractor.dart b/lib/date_extractors/exif_extractor.dart index 44ccda8b5..c1010f632 100644 --- a/lib/date_extractors/exif_extractor.dart +++ b/lib/date_extractors/exif_extractor.dart @@ -1,22 +1,166 @@ -import 'dart:io'; -import 'dart:math'; +// ignore_for_file: non_constant_identifier_names -import 'package:exif/exif.dart'; -import 'package:gpth/utils.dart'; +import 'dart:io'; +import 'dart:math' as math; +import 'package:exif_reader/exif_reader.dart'; import 'package:mime/mime.dart'; +import '../exiftoolInterface.dart'; +import '../utils.dart'; /// DateTime from exif data *potentially* hidden within a [file] /// /// You can try this with *any* file, it either works or not 🤷 -Future exifExtractor(File file) async { - // if file is not image or >32MiB - DO NOT crash :D - if (!(lookupMimeType(file.path)?.startsWith('image/') ?? false) || - await file.length() > maxFileSize) { +Future exifDateTimeExtractor(final File file) async { + //If file is >maxFileSize - return null. https://github.com/brendan-duncan/image/issues/457#issue-1549020643 + if (await file.length() > maxFileSize && enforceMaxFileSize) { + log( + '[Step 4/8] The file is larger than the maximum supported file size of ${maxFileSize.toString()} bytes. File: ${file.path}', + level: 'error', + ); + return null; + } + + //Let me give a high level overview of what is happening here: + //1. Try to get mimetype with lookupMimeType(file.path) by + // 1. checking is the magic number (refering to https://en.wikipedia.org/wiki/List_of_file_signatures and https://github.com/dart-lang/tools/blob/main/pkgs/mime/lib/src/magic_number.dart) is well known. We do this because google takeout sometimes changed the file extension (e.g. mimeType is HEIC but it exports a .png) + // Now we do or don't have a mimeType. We continue with: + // 1. If the mimeType is supported by exif_reader, we use the exif_reader library to read exif. If that fails, exiftool is still used as a fallback, cause it's worth a try. + // 2. If the mimeType is not supported by exif_reader or null, we try exiftool and don't even attempt exif_reader, because it would be pointless. + + //We only read the first 4096 bytes as that's sufficient for MIME type detection + final List headerBytes = await File(file.path).openRead(0, 4096).first; + + //Getting mimeType. + final String? mimeType = lookupMimeType(file.path, headerBytes: headerBytes); + //lookupMimeType might return null e.g. for raw files. Even if Exiftool would be installed, using it to read the mimeType just to then decide if we use exiftool to read exif data or not + //would completely defeat the purpose and actually compromise speed as we'd have to do 2 reads in some situations. In others we would still just do one read but have the additional native read. + + //We use the native way for all supported mimeTypes of exif_reader for speed and performance. We trust the list at https://pub.dev/packages/exif_reader + //We also know that the mimeTypes for RAW can never happen because the lookupMimeType() does not support them. However, leaving there in here for now cause they don't hurt. + final supportedNativeMimeTypes = { + 'image/jpeg', + 'image/tiff', + 'image/heic', + 'image/png', + 'image/webp', + 'image/jxl', + 'image/x-sony-arw', + 'image/x-canon-cr2', + 'image/x-canon-cr3', + 'image/x-canon-crw', + 'image/x-nikon-nef', + 'image/x-nikon-nrw', + 'image/x-fuji-raf', + 'image/x-adobe-dng', + 'image/x-raw', + 'image/tiff-fx', + 'image/x-portable-anymap', + }; + DateTime? + result; //this variable should be filled. That's the goal from here on. + if (supportedNativeMimeTypes.contains(mimeType)) { + result = await _nativeExif_readerExtractor(file); + if (result != null) { + return result; + } else { + //If we end up here, we have a mimeType which should be supported by exif_reader, but the read failed regardless. + //Most probably the file does not contain any DateTime in exif. So we return null. + return null; + } + } + //At this point either we didn't do anything because the mimeType is unknown (null) or not supported by the native method. + //Anyway, there is nothing else to do than to try it with exiftool now. exiftool is the last resort *sing* in any case due to performance. + if ((mimeType == null || !supportedNativeMimeTypes.contains(mimeType)) && + exifToolInstalled) { + result = await _exifToolExtractor(file); + if (result != null) { + return result; //We did get a DateTime from Exiftool and return it. It's being logged in _exifToolExtractor(). We are happy. + } + } + + //This logic below is only to give a tailored error message because if you get here, sorry, then result stayed empty and we just don't support the file type. + if (mimeType == 'image/jpeg') { + log( + '${file.path} has a mimeType of $mimeType. However, could not read it with exif_reader. This means, the file is probably corrupt', + level: 'warning', + ); + } else if (exifToolInstalled) { + log( + "$mimeType is either a weird mime type! Please create an issue if you get this error message, as we currently can't handle it.", + level: 'error', + ); + } else { + log( + 'Reading exif from ${file.path} with mimeType $mimeType skipped. Reading from this kind of file is probably only supported with exiftool.', + level: 'warning', + ); + } + return result; //If we can't get mimeType, result will be null as there is probably no point in moving forward to read other metadata. +} + +///Extracts DateTime from File through ExifTool library +Future _exifToolExtractor(final File file) async { + try { + final tags = await exiftool!.readExifBatch(file, [ + 'DateTimeOriginal', + 'MediaCreateDate', + 'CreationDate', + 'TrackCreateDate', + 'CreateDate', + 'DateTimeDigitized', + 'GPSDateStamp', + 'DateTime', + ]); + //The order is in order of reliability and important + String? datetime = + tags['DateTimeOriginal'] ?? //EXIF + tags['MediaCreateDate'] ?? //QuickTime/XMP + tags['CreationDate'] ?? //XMP + tags['TrackCreateDate']; //?? //QuickTime + //tags['CreateDate'] ?? // can be overwritten by editing software + //tags['DateTimeDigitized'] ?? //may reflect scanning or import time + //tags['DateTime']; //generic and editable + if (datetime == null) { + log( + "Exiftool was not able to extract an acceptable DateTime for ${file.path}.\n\tThose Tags are accepted: 'DateTimeOriginal', 'MediaCreateDate', 'CreationDate','TrackCreateDate','. The file has those Tags: ${tags.toString()}", + level: 'warning', + ); + return null; + } + // Normalize separators and parse + datetime = datetime + .replaceAll('-', ':') + .replaceAll('/', ':') + .replaceAll('.', ':') + .replaceAll('\\', ':') + .replaceAll(': ', ':0') + .substring(0, math.min(datetime.length, 19)) + .replaceFirst(':', '-') + .replaceFirst(':', '-'); + + final DateTime? parsedDateTime = DateTime.tryParse(datetime); + + if (parsedDateTime == DateTime.parse('2036-01-01T23:59:59.000000Z')) { + //we keep this for safety for this edge case: https://ffmpeg.org/pipermail/ffmpeg-user/2023-April/056265.html + log( + '[Step 4/8] Extracted DateTime before January 1st 1970 from EXIF for ${file.path}. Therefore the DateTime from other extractors is not being changed.', + level: 'warning', + ); + return null; + } else { + log( + '[Step 4/8] Sucessfully extracted DateTime from EXIF through Exiftool for ${file.path}', + ); + return parsedDateTime; + } + } catch (e) { + log('[Step 4/8] exiftool read failed: ${e.toString()}', level: 'error'); return null; } - // NOTE: reading whole file may seem slower than using readExifFromFile - // but while testing it was actually 2x faster on my pc 0_o - // i have nvme + btrfs, but still, will leave as is +} + +///Extracts DateTime from File through Exif_reader library +Future _nativeExif_readerExtractor(final File file) async { final bytes = await file.readAsBytes(); // this returns empty {} if file doesn't have exif so don't worry final tags = await readExifFromBytes(bytes); @@ -25,17 +169,31 @@ Future exifExtractor(File file) async { datetime ??= tags['Image DateTime']?.printable; datetime ??= tags['EXIF DateTimeOriginal']?.printable; datetime ??= tags['EXIF DateTimeDigitized']?.printable; - if (datetime == null) return null; - // replace all shitty separators that are sometimes met + if (datetime == null || datetime.isEmpty) return null; + // Normalize separators and parse datetime = datetime .replaceAll('-', ':') .replaceAll('/', ':') .replaceAll('.', ':') .replaceAll('\\', ':') .replaceAll(': ', ':0') - .substring(0, min(datetime.length, 19)) - .replaceFirst(':', '-') // replace two : year/month to comply with iso + .substring(0, math.min(datetime.length, 19)) + .replaceFirst(':', '-') .replaceFirst(':', '-'); - // now date is like: "1999-06-23 23:55" - return DateTime.tryParse(datetime); + + final DateTime? parsedDateTime = DateTime.tryParse(datetime); + + if (parsedDateTime == DateTime.parse('2036-01-01T23:59:59.000000Z')) { + //we keep this for safety for this edge case: https://ffmpeg.org/pipermail/ffmpeg-user/2023-April/056265.html + log( + '[Step 4/8] Extracted DateTime before January 1st 1970 from EXIF for ${file.path}. Therefore the DateTime from other extractors is not being changed.', + level: 'warning', + ); + return null; + } else { + log( + '[Step 4/8] Sucessfully extracted DateTime from EXIF through native library for ${file.path}', + ); + return parsedDateTime; + } } diff --git a/lib/date_extractors/guess_extractor.dart b/lib/date_extractors/guess_extractor.dart index 5522d9391..00415063f 100644 --- a/lib/date_extractors/guess_extractor.dart +++ b/lib/date_extractors/guess_extractor.dart @@ -4,60 +4,70 @@ import 'package:convert/convert.dart'; import 'package:path/path.dart' as p; // These are thanks to @hheimbuerger <3 -final _commonDatetimePatterns = [ +final List> _commonDatetimePatterns = >[ // example: Screenshot_20190919-053857_Camera-edited.jpg - [ + [ RegExp( - r'(?(20|19|18)\d{2}(01|02|03|04|05|06|07|08|09|10|11|12)[0-3]\d-\d{6})'), - 'YYYYMMDD-hhmmss' + r'(?(20|19|18)\d{2}(01|02|03|04|05|06|07|08|09|10|11|12)[0-3]\d-\d{6})', + ), + 'YYYYMMDD-hhmmss', ], // example: IMG_20190509_154733-edited.jpg, MVIMG_20190215_193501.MP4, IMG_20190221_112112042_BURST000_COVER_TOP.MP4 - [ + [ RegExp( - r'(?(20|19|18)\d{2}(01|02|03|04|05|06|07|08|09|10|11|12)[0-3]\d_\d{6})'), + r'(?(20|19|18)\d{2}(01|02|03|04|05|06|07|08|09|10|11|12)[0-3]\d_\d{6})', + ), 'YYYYMMDD_hhmmss', ], // example: Screenshot_2019-04-16-11-19-37-232_com.google.a.jpg - [ + [ RegExp( - r'(?(20|19|18)\d{2}-(01|02|03|04|05|06|07|08|09|10|11|12)-[0-3]\d-\d{2}-\d{2}-\d{2})'), + r'(?(20|19|18)\d{2}-(01|02|03|04|05|06|07|08|09|10|11|12)-[0-3]\d-\d{2}-\d{2}-\d{2})', + ), 'YYYY-MM-DD-hh-mm-ss', ], // example: signal-2020-10-26-163832.jpg - [ + [ RegExp( - r'(?(20|19|18)\d{2}-(01|02|03|04|05|06|07|08|09|10|11|12)-[0-3]\d-\d{6})'), + r'(?(20|19|18)\d{2}-(01|02|03|04|05|06|07|08|09|10|11|12)-[0-3]\d-\d{6})', + ), 'YYYY-MM-DD-hhmmss', ], // Those two are thanks to @matt-boris <3 // https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/commit/e0d9ee3e71def69d74eba7cf5ec204672924726d // example: 00004XTR_00004_BURST20190216172030.jpg, 201801261147521000.jpg, IMG_1_BURST20160520195318.jpg - [ + [ RegExp( - r'(?(20|19|18)\d{2}(01|02|03|04|05|06|07|08|09|10|11|12)[0-3]\d{7})'), + r'(?(20|19|18)\d{2}(01|02|03|04|05|06|07|08|09|10|11|12)[0-3]\d{7})', + ), 'YYYYMMDDhhmmss', ], // example: 2016_01_30_11_49_15.mp4 - [ + [ RegExp( - r'(?(20|19|18)\d{2}_(01|02|03|04|05|06|07|08|09|10|11|12)_[0-3]\d_\d{2}_\d{2}_\d{2})'), + r'(?(20|19|18)\d{2}_(01|02|03|04|05|06|07|08|09|10|11|12)_[0-3]\d_\d{2}_\d{2}_\d{2})', + ), 'YYYY_MM_DD_hh_mm_ss', ], ]; /// Guesses DateTime from [file]s name /// - for example Screenshot_20190919-053857.jpg - we can guess this 😎 -Future guessExtractor(File file) async { - for (final pat in _commonDatetimePatterns) { +Future guessExtractor(final File file) async { + for (final List pat in _commonDatetimePatterns) { // extract date str with regex - final match = (pat.first as RegExp).firstMatch(p.basename(file.path)); - final dateStr = match?.group(0); + final RegExpMatch? match = (pat.first as RegExp).firstMatch( + p.basename(file.path), + ); + final String? dateStr = match?.group(0); if (dateStr == null) continue; // parse it with given pattern DateTime? date; try { - date = FixedDateTimeFormatter(pat.last as String, isUtc: false) - .tryDecode(dateStr); + date = FixedDateTimeFormatter( + pat.last as String, + isUtc: false, + ).tryDecode(dateStr); } on RangeError catch (_) {} if (date == null) continue; return date; // success! diff --git a/lib/date_extractors/json_extractor.dart b/lib/date_extractors/json_extractor.dart index e791e3ff8..0e8885ae6 100644 --- a/lib/date_extractors/json_extractor.dart +++ b/lib/date_extractors/json_extractor.dart @@ -1,19 +1,23 @@ import 'dart:convert'; import 'dart:io'; - import 'package:collection/collection.dart'; -import 'package:gpth/extras.dart' as extras; -import 'package:gpth/utils.dart'; +import 'package:coordinate_converter/coordinate_converter.dart'; import 'package:path/path.dart' as p; import 'package:unorm_dart/unorm_dart.dart' as unorm; +import '../extras.dart' as extras; +import '../extras.dart' show extraFormats; +import '../utils.dart'; -/// Finds corresponding json file with info and gets 'photoTakenTime' from it -Future jsonExtractor(File file, {bool tryhard = false}) async { - final jsonFile = await _jsonForFile(file, tryhard: tryhard); +/// Finds corresponding json file with info from media file and gets 'photoTakenTime' from it +Future jsonDateTimeExtractor( + final File file, { + final bool tryhard = false, +}) async { + final File? jsonFile = await _jsonForFile(file, tryhard: tryhard); if (jsonFile == null) return null; try { - final data = jsonDecode(await jsonFile.readAsString()); - final epoch = int.parse(data['photoTakenTime']['timestamp'].toString()); + final dynamic data = jsonDecode(await jsonFile.readAsString()); + final int epoch = int.parse(data['photoTakenTime']['timestamp'].toString()); return DateTime.fromMillisecondsSinceEpoch(epoch * 1000); } on FormatException catch (_) { // this is when json is bad @@ -29,13 +33,17 @@ Future jsonExtractor(File file, {bool tryhard = false}) async { } } -Future _jsonForFile(File file, {required bool tryhard}) async { - final dir = Directory(p.dirname(file.path)); - var name = p.basename(file.path); +///Tries to find json for media file +Future _jsonForFile( + final File file, { + required final bool tryhard, +}) async { + final Directory dir = Directory(p.dirname(file.path)); + final String name = p.basename(file.path); // will try all methods to strip name to find json - for (final method in [ + for (final String Function(String s) method in [ // none - (String s) => s, + (final String s) => s, _shortenName, // test: combining this with _shortenName?? which way around? _bracketSwap, @@ -44,12 +52,12 @@ Future _jsonForFile(File file, {required bool tryhard}) async { // use those two only with tryhard // look at https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/175 // thanks @denouche for reporting this! - if (tryhard) ...[ + if (tryhard) ...[ _removeExtraRegex, _removeDigit, // most files with '(digit)' have jsons, so it's last - ] + ], ]) { - final jsonFile = File(p.join(dir.path, '${method(name)}.json')); + final File jsonFile = File(p.join(dir.path, '${method(name)}.json')); if (await jsonFile.exists()) return jsonFile; } return null; @@ -59,24 +67,24 @@ Future _jsonForFile(File file, {required bool tryhard}) async { // (for example, "20030616" (jpg but without ext)) // it's json won't have the extension ("20030616.json"), but the image // itself (after google proccessed it) - will ("20030616.jpg" tadam) -String _noExtension(String filename) => +String _noExtension(final String filename) => p.basenameWithoutExtension(File(filename).path); -String _removeDigit(String filename) => +String _removeDigit(final String filename) => filename.replaceAll(RegExp(r'\(\d\)\.'), '.'); /// This removes only strings defined in [extraFormats] list from `extras.dart`, /// so it's pretty safe -String _removeExtra(String filename) { +String _removeExtra(final String filename) { // MacOS uses NFD that doesn't work with our accents 🙃🙃 // https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/247 - filename = unorm.nfc(filename); - for (final extra in extras.extraFormats) { - if (filename.contains(extra)) { - return filename.replaceLast(extra, ''); + final String normalizedFilename = unorm.nfc(filename); + for (final String extra in extras.extraFormats) { + if (normalizedFilename.contains(extra)) { + return normalizedFilename.replaceLast(extra, ''); } } - return filename; + return normalizedFilename; } /// this will match: @@ -88,22 +96,26 @@ String _removeExtra(String filename) { /// Result: something.jpg /// ``` /// so it's *kinda* safe -String _removeExtraRegex(String filename) { +String _removeExtraRegex(final String filename) { // MacOS uses NFD that doesn't work with our accents 🙃🙃 // https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/247 - filename = unorm.nfc(filename); + final String normalizedFilename = unorm.nfc(filename); // include all characters, also with accents - final matches = RegExp(r'(?-[A-Za-zÀ-ÖØ-öø-ÿ]+(\(\d\))?)\.\w+$') - .allMatches(filename); + final Iterable matches = RegExp( + r'(?-[A-Za-zÀ-ÖØ-öø-ÿ]+(\(\d\))?)\.\w+$', + ).allMatches(normalizedFilename); if (matches.length == 1) { - return filename.replaceAll(matches.first.namedGroup('extra')!, ''); + return normalizedFilename.replaceAll( + matches.first.namedGroup('extra')!, + '', + ); } - return filename; + return normalizedFilename; } // this resolves years of bugs and head-scratches 😆 // f.e: https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/8#issuecomment-736539592 -String _shortenName(String filename) => '$filename.json'.length > 51 +String _shortenName(final String filename) => '$filename.json'.length > 51 ? filename.substring(0, 51 - '.json'.length) : filename; @@ -118,13 +130,51 @@ String _shortenName(String filename) => '$filename.json'.length > 51 /// This function does just that, and by my current intuition tells me it's /// pretty safe to use so I'll put it without the tryHard flag // note: would be nice if we had some tougher tests for this -String _bracketSwap(String filename) { +String _bracketSwap(final String filename) { // this is with the dot - more probable that it's just before the extension - final match = RegExp(r'\(\d+\)\.').allMatches(filename).lastOrNull; + final RegExpMatch? match = RegExp( + r'\(\d+\)\.', + ).allMatches(filename).lastOrNull; if (match == null) return filename; - final bracket = match.group(0)!.replaceAll('.', ''); // remove dot + final String bracket = match.group(0)!.replaceAll('.', ''); // remove dot // remove only last to avoid errors with filenames like: // 'image(3).(2)(3).jpg' <- "(3)." repeats twice - final withoutBracket = filename.replaceLast(bracket, ''); + final String withoutBracket = filename.replaceLast(bracket, ''); return '$withoutBracket$bracket'; } + +/// This is to get coordinates from the json file. Expects media file and finds json. +Future jsonCoordinatesExtractor( + final File file, { + final bool tryhard = false, +}) async { + final File? jsonFile = await _jsonForFile(file, tryhard: tryhard); + if (jsonFile == null) return null; + try { + final Map data = jsonDecode(await jsonFile.readAsString()); + final double lat = data['geoData']['latitude'] as double; + final double long = data['geoData']['longitude'] as double; + //var alt = double.tryParse(data['geoData']['altitude']); //Info: Altitude is not used. + if (lat == 0.0 || long == 0.0) { + return null; + } else { + final DDCoordinates ddcoords = DDCoordinates( + latitude: lat, + longitude: long, + ); + final DMSCoordinates dmscoords = DMSCoordinates.fromDD(ddcoords); + return dmscoords; + } + } on FormatException catch (_) { + // this is when json is bad + return null; + } on FileSystemException catch (_) { + // this happens for issue #143 + // "Failed to decode data using encoding 'utf-8'" + // maybe this will self-fix when dart itself support more encodings + return null; + } on NoSuchMethodError catch (_) { + // this is when tags like photoTakenTime aren't there + return null; + } +} diff --git a/lib/emojicleaner.dart b/lib/emojicleaner.dart new file mode 100644 index 000000000..8e9366ab1 --- /dev/null +++ b/lib/emojicleaner.dart @@ -0,0 +1,90 @@ +import 'dart:io'; +import 'package:path/path.dart' as p; + +import 'utils.dart'; + +/// Internal helper function to check if a single text component contains emoji characters. +/// +/// [text] The text string to check for emoji characters +/// Returns true if the text contains any emoji (Unicode surrogate pairs) +bool _hasUnicodeSurrogatesInText(final String text) { + for (int i = 0; i < text.length; i++) { + final int codeUnit = text.codeUnitAt(i); + if (codeUnit >= 0xD800 && codeUnit <= 0xDBFF) { + if (i + 1 < text.length) { + final int nextCodeUnit = text.codeUnitAt(i + 1); + if (nextCodeUnit >= 0xDC00 && nextCodeUnit <= 0xDFFF) { + return true; + } + } + } + } + return false; +} + +/// Encodes emoji characters in the album (parent) directory name to hex representation and renames the folder on disk if needed. +/// +/// [albumDir] The Directory whose name may contain emoji characters. +/// Returns the new (possibly hex-encoded) directory name as a String. +String encodeAndRenameAlbumIfEmoji(final Directory albumDir) { + final String originalName = p.basename(albumDir.path); + // Return early if no emoji in the album directory name + if (!_hasUnicodeSurrogatesInText(originalName)) { + return originalName; + } + log('Found an emoji in ${albumDir.path}. Encoding it to hex.'); + final String parentPath = albumDir.parent.path; + final StringBuffer cleanName = StringBuffer(); + for (int i = 0; i < originalName.length; i++) { + final int codeUnit = originalName.codeUnitAt(i); + if (codeUnit >= 0xD800 && + codeUnit <= 0xDBFF && + i + 1 < originalName.length) { + final int nextCodeUnit = originalName.codeUnitAt(i + 1); + if (nextCodeUnit >= 0xDC00 && nextCodeUnit <= 0xDFFF) { + final int emoji = + ((codeUnit - 0xD800) << 10) + (nextCodeUnit - 0xDC00) + 0x10000; + cleanName.write('_0x${emoji.toRadixString(16)}_'); + i++; // Skip low surrogate + continue; + } + } + cleanName.write(String.fromCharCode(codeUnit)); + } + final String newPath = p.join(parentPath, cleanName.toString()); + if (albumDir.path != newPath) { + albumDir.renameSync(newPath); + } + return cleanName.toString(); +} + +/// Decodes hex-encoded emoji sequences (e.g., _0x1f60a_) in the last segment of the path back to emoji characters, only if such encoding is present. +/// +/// [encodedPath] The path with hex-encoded emojis in the last segment. +/// Returns the path with emojis restored in the last segment, or the original path if no encoding is present. +String decodeAndRestoreAlbumEmoji(final String encodedPath) { + final String separator = Platform.pathSeparator; + final List parts = encodedPath.split(separator); + if (parts.isEmpty) return encodedPath; + // Only decode if hex-encoded emoji is present in the last segment + if (RegExp(r'_0x[0-9a-fA-F]+_').hasMatch(parts.last)) { + log( + 'Found a hex encoded emoji in $encodedPath. Decoding it back to emoji.', + ); + parts[parts.length - 1] = _decodeEmojiComponent(parts.last); + return parts.join(separator); + } + return encodedPath; +} + +/// Internal helper function to decode hex-encoded emoji characters back to UTF-8. +/// +/// [component] A string potentially containing hex-encoded emojis (e.g., "_0x1f60a_") +/// Returns the string with all hex-encoded emojis converted back to UTF-8 characters +String _decodeEmojiComponent(final String component) { + final RegExp emojiPattern = RegExp(r'_0x([0-9a-fA-F]+)_'); + return component.replaceAllMapped(emojiPattern, (final Match match) { + final int codePoint = int.parse(match.group(1)!, radix: 16); + return String.fromCharCode(codePoint); + }); +} diff --git a/lib/exif_writer.dart b/lib/exif_writer.dart new file mode 100644 index 000000000..d13531d87 --- /dev/null +++ b/lib/exif_writer.dart @@ -0,0 +1,184 @@ +import 'dart:io'; +import 'dart:typed_data'; +import 'package:coordinate_converter/coordinate_converter.dart'; +import 'package:image/image.dart'; +import 'package:intl/intl.dart'; +import 'package:mime/mime.dart'; +import 'date_extractors/exif_extractor.dart'; +import 'exiftoolInterface.dart'; +import 'utils.dart'; + +Future writeDateTimeToExif( + final DateTime dateTime, + final File file, +) async { + //Check if the file already has a dateTime in its EXIF data. If function returns a DateTime, there is no need to write it again. Skip. + if (await exifDateTimeExtractor(file) != null) { + return false; + } + //When exiftool is installed + if (exifToolInstalled) { + //Even if exifTool is installed, try to use native way for speed first and if it works keep going. If not, use exiftool. + if (_noExifToolDateTimeWriter(file, dateTime)) { + return true; //If native way was able to write exif data: exit. If not, try exifTool. + } + final exifFormat = DateFormat('yyyy:MM:dd HH:mm:ss'); + final String dt = exifFormat.format(dateTime); + final success = await exiftool!.writeExifBatch(file, { + 'DateTimeOriginal': '"$dt"', + 'DateTimeDigitized': '"$dt"', + 'DateTime': '"$dt"', + }); + if (success) { + log( + '[Step 5/8] New DateTime $dt written to EXIF (exiftool): ${file.path}', + ); + return true; + } else { + log( + '[Step 5/8] DateTime $dt could not be written to EXIF: ${file.path}', + level: 'error', + ); + return false; + } + } else { + //When exiftool is not installed + return _noExifToolDateTimeWriter(file, dateTime); + } +} + +Future writeGpsToExif( + final DMSCoordinates coordinates, + final File file, +) async { + if (exifToolInstalled) { + //When exiftool is installed + //Even if exifTool is installed, try to use native way for speed first and if it works keep going. If not, use exiftool. + if (_noExifGPSWriter(file, coordinates)) { + return true; + } + //Check if the file already has EXIF data and if yes, skip. + final Map coordinatesMap = await exiftool!.readExifBatch(file, [ + 'GPSLatitude', + 'GPSLongitude', + ]); + final bool filehasExifCoordinates = coordinatesMap.values.isNotEmpty; + if (!filehasExifCoordinates) { + log( + '[Step 5/8] Found coordinates ${coordinates.toString()} in json, but missing in EXIF for file: ${file.path}', + ); + + final success = await exiftool!.writeExifBatch(file, { + 'GPSLatitude': coordinates.toDD().latitude.toString(), + 'GPSLongitude': coordinates.toDD().longitude.toString(), + 'GPSLatitudeRef': coordinates.latDirection.abbreviation.toString(), + 'GPSLongitudeRef': coordinates.longDirection.abbreviation.toString(), + }); + if (success) { + log('[Step 5/8] New coordinates written to EXIF: ${file.path}'); + return true; + } else { + log( + '[Step 5/8] Coordinates ${coordinates.toString()} could not be written to EXIF: ${file.path}', + level: 'error', + forcePrint: true, + ); + return false; + } + } + //Found coords in json but already present in exif. Skip. + return false; + } else { + //If exiftool is not installed + return _noExifGPSWriter(file, coordinates); + } +} + +bool _noExifToolDateTimeWriter(final File file, final DateTime dateTime) { + final exifFormat = DateFormat('yyyy:MM:dd HH:mm:ss'); + final String? mimeType = lookupMimeType(file.path); + if (mimeType == 'image/jpeg') { + //when it's a jpg and the image library can handle it + ExifData? exifData; + final Uint8List origbytes = file.readAsBytesSync(); + try { + exifData = decodeJpgExif(origbytes); //Decode the exif data of the jpg. + } catch (e) { + log( + '[Step 5/8] Found DateTime in json, but missing in EXIF for file: ${file.path}. Failed to write because of error during decoding: $e', + level: 'error', + ); + return false; // Ignoring errors during image decoding as it may not be a valid image file + } + if (exifData != null && !exifData.isEmpty) { + exifData.imageIfd['DateTime'] = exifFormat.format(dateTime); + exifData.exifIfd['DateTimeOriginal'] = exifFormat.format(dateTime); + exifData.exifIfd['DateTimeDigitized'] = exifFormat.format(dateTime); + final Uint8List? newbytes = injectJpgExif( + origbytes, + exifData, + ); //This overwrites the original exif data of the image with the altered exif data. + if (newbytes != null) { + file.writeAsBytesSync(newbytes); + log( + '[Step 5/8] New DateTime ${dateTime.toString()} written to EXIF (natively): ${file.path}', + ); + return true; + } + } + } + if (!exifToolInstalled) { + log( + '[Step 5/8] Found DateTime in json, but missing in EXIF. Writing to $mimeType is not supported without exiftool.', + level: 'warning', + forcePrint: true, + ); + } + return false; +} + +bool _noExifGPSWriter(final File file, final DMSCoordinates coordinates) { + final String? mimeType = lookupMimeType(file.path); + if (mimeType == 'image/jpeg') { + //when it's a jpg and the image library can handle it + ExifData? exifData; + final Uint8List origbytes = file.readAsBytesSync(); + try { + exifData = decodeJpgExif(origbytes); //Decode only the exif data + } catch (e) { + log( + '[Step 5/8] Found Coordinates in json, but missing in EXIF for file: ${file.path}. Failed to write because of error during decoding: $e', + level: 'error', + ); + return false; // Ignoring errors during image decoding as it may not be a valid image file + } + if (exifData != null) { + exifData.gpsIfd.gpsLatitude = coordinates.toDD().latitude; + exifData.gpsIfd.gpsLongitude = coordinates.toDD().longitude; + exifData.gpsIfd.gpsLatitudeRef = coordinates.latDirection.abbreviation; + exifData.gpsIfd.gpsLongitudeRef = coordinates.longDirection.abbreviation; + final Uint8List? newbytes = injectJpgExif( + origbytes, + exifData, + ); //This overwrites the original exif data of the image with the altered exif data. + if (newbytes != null) { + file.writeAsBytesSync(newbytes); + log('[Step 5/8] New coordinates written to EXIF: ${file.path}'); + return true; + } + } + } + if (!exifToolInstalled) { + if (isVerbose) { + log( + '[Step 5/8] Found Coordinates in json, but missing in EXIF. Writing to $mimeType is not supported without exiftool.', + level: 'warning', + ); + } else { + print( + '[Step 5/8] [WARNING] Found Coordinates in json, but missing in EXIF. Writing to $mimeType is not supported without exiftool.', + ); + } + } + return false; +} diff --git a/lib/exiftoolInterface.dart b/lib/exiftoolInterface.dart new file mode 100644 index 000000000..ec1e8780f --- /dev/null +++ b/lib/exiftoolInterface.dart @@ -0,0 +1,162 @@ +// ignore_for_file: file_names + +import 'dart:convert'; +import 'dart:io'; +import 'package:path/path.dart' as p; +import 'utils.dart'; + +ExiftoolInterface? exiftool; + +Future initExiftool() async { + exiftool = await ExiftoolInterface.find(); + if (exiftool != null) { + exifToolInstalled = true; + return true; + } else { + return false; + } +} + +/// Cross-platform interface for exiftool (read/write EXIF data) +class ExiftoolInterface { + ExiftoolInterface._(this.exiftoolPath); + + final String exiftoolPath; + + /// Attempts to find exiftool in PATH and returns an instance, or null if not found + static Future find() async { + final String exe = Platform.isWindows ? 'exiftool.exe' : 'exiftool'; + final String? path = await _which(exe); + if (path != null) { + return ExiftoolInterface._(path); + } + // Not found in PATH, check same directory as running binary + String? binDir; + try { + binDir = File(Platform.resolvedExecutable).parent.path; + } catch (_) { + binDir = null; + } + if (binDir != null) { + final exiftoolFile = File(p.join(binDir, exe)); + if (await exiftoolFile.exists()) { + return ExiftoolInterface._(exiftoolFile.path); + } + final exiftoolSubdirFile = File( + p.join(binDir, 'gpth_tool', 'exif_tool', exe), + ); + if (await exiftoolSubdirFile.exists()) { + return ExiftoolInterface._(exiftoolSubdirFile.path); + } + } + return null; + } + + /// Reads all EXIF data from [file] and returns as a Map + Future> readExif(final File file) async { + final result = await Process.run(exiftoolPath, ['-j', '-n', file.path]); + if (result.exitCode != 0) { + log( + 'exiftool returned a non 0 code for reading ${file.path} with error: ${result.stderr}', + level: 'error', + ); + } + try { + final List jsonList = jsonDecode(result.stdout); + if (jsonList.isEmpty) return {}; + final map = Map.from(jsonList.first); + map.remove('SourceFile'); + return map; + } on FormatException catch (_) { + // this is when json is bad + return {}; + } on FileSystemException catch (_) { + // this happens for issue #143 + // "Failed to decode data using encoding 'utf-8'" + // maybe this will self-fix when dart itself support more encodings + return {}; + } on NoSuchMethodError catch (_) { + // this is when tags like photoTakenTime aren't there + return {}; + } + } + + /// Reads only the specified EXIF tags from [file] and returns as a Map + Future> readExifBatch( + final File file, + final List tags, + ) async { + final String filepath = file.path; + + if (tags.isEmpty) { + return {}; + } + final args = ['-j', '-n']; + args.addAll(tags.map((final tag) => '-$tag')); + args.add(filepath); + final result = await Process.run(exiftoolPath, args); + if (result.exitCode != 0) { + log( + 'exiftool returned a non 0 code for reading ${file.path} with error: ${result.stderr}', + level: 'error', + ); + } + try { + final List jsonList = jsonDecode(result.stdout); + if (jsonList.isEmpty) return {}; + final map = Map.from(jsonList.first); + map.remove('SourceFile'); + return map; + } on FormatException catch (_) { + // this is when json is bad + return {}; + } on FileSystemException catch (_) { + // this happens for issue #143 + // "Failed to decode data using encoding 'utf-8'" + // maybe this will self-fix when dart itself support more encodings + return {}; + } on NoSuchMethodError catch (_) { + // this is when tags like photoTakenTime aren't there + return {}; + } + } + + /// Writes multiple EXIF tags to [file]. [tags] is a map of tag name to value. + Future writeExifBatch( + final File file, + final Map tags, + ) async { + final String filepath = file.path; + + final args = ['-overwrite_original']; + tags.forEach((final tag, final value) => args.add('-$tag=$value')); + args.add(filepath); + final result = await Process.run(exiftoolPath, args); + if (result.exitCode != 0) { + log( + '[Step 5/8] Writing exif to file ${file.path} failed. ${result.stderr}', + level: 'error', + forcePrint: true, + ); + } + if (result.exitCode != 0) { + return false; + } else { + return true; + } + } +} + +/// Helper to find an executable in PATH (like 'which' or 'where') +Future _which(final String bin) async { + final result = await Process.run(Platform.isWindows ? 'where' : 'which', [ + bin, + ]); + if (result.exitCode != 0) return null; + final output = result.stdout.toString(); + final lines = output + .split(RegExp(r'[\r\n]+')) + .where((final l) => l.trim().isNotEmpty) + .toList(); + return lines.isEmpty ? null : lines.first.trim(); +} diff --git a/lib/extras.dart b/lib/extras.dart index d3ae6e70b..40ce16902 100644 --- a/lib/extras.dart +++ b/lib/extras.dart @@ -3,7 +3,7 @@ import 'package:unorm_dart/unorm_dart.dart' as unorm; import 'media.dart'; -const extraFormats = [ +const List extraFormats = [ // EN/US - thanks @DalenW '-edited', '-effects', @@ -31,12 +31,14 @@ const extraFormats = [ /// Removes any media that match any of "extra" formats /// Returns count of removed -int removeExtras(List media) { - final copy = media.toList(); - var count = 0; - for (final m in copy) { - final name = p.withoutExtension(p.basename(m.firstFile.path)).toLowerCase(); - for (final extra in extraFormats) { +int removeExtras(final List media) { + final List copy = media.toList(); + int count = 0; + for (final Media m in copy) { + final String name = p + .withoutExtension(p.basename(m.firstFile.path)) + .toLowerCase(); + for (final String extra in extraFormats) { // MacOS uses NFD that doesn't work with our accents 🙃🙃 // https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/pull/247 if (unorm.nfc(name).endsWith(extra)) { diff --git a/lib/folder_classify.dart b/lib/folder_classify.dart index 66cb5d0ab..2f0ca1cd2 100644 --- a/lib/folder_classify.dart +++ b/lib/folder_classify.dart @@ -1,12 +1,15 @@ /// This file contains utils for determining type of a folder /// Whether it's a legendary "year folder", album, trash, etc +library; + import 'dart:io'; -import 'package:gpth/utils.dart'; import 'package:path/path.dart' as p; -bool isYearFolder(Directory dir) => +import 'utils.dart'; + +bool isYearFolder(final Directory dir) => RegExp(r'^Photos from (20|19|18)\d{2}$').hasMatch(p.basename(dir.path)); -Future isAlbumFolder(Directory dir) => - dir.parent.list().whereType().any((e) => isYearFolder(e)); +Future isAlbumFolder(final Directory dir) => + dir.parent.list().whereType().any(isYearFolder); diff --git a/lib/grouping.dart b/lib/grouping.dart index 3610d40ca..78a8ba672 100644 --- a/lib/grouping.dart +++ b/lib/grouping.dart @@ -1,12 +1,15 @@ /// This files contains functions for removing duplicates and detecting albums /// /// That's because their logic looks very similar and they share code +library; import 'dart:io'; - import 'package:collection/collection.dart'; -import 'package:gpth/media.dart'; +import 'package:console_bars/console_bars.dart'; import 'package:path/path.dart' as p; +import 'media.dart' show Media; +import 'media.dart'; +import 'utils.dart'; extension Group on Iterable { /// This groups your media into map where key is something that they share @@ -18,16 +21,20 @@ extension Group on Iterable { /// Groups may be 1-lenght, where element was unique, or n-lenght where there /// were duplicates Map> groupIdentical() { - final output = >{}; + final Map> output = >{}; // group files by size - can't have same hash with diff size // ignore: unnecessary_this - for (final sameSize in this.groupListsBy((e) => e.size).entries) { + for (final MapEntry> sameSize in groupListsBy( + (final Media e) => e.size, + ).entries) { // just add with "...bytes" key if just one if (sameSize.value.length <= 1) { output['${sameSize.key}bytes'] = sameSize.value; } else { // ...calculate their full hashes and group by them - output.addAll(sameSize.value.groupListsBy((e) => e.hash.toString())); + output.addAll( + sameSize.value.groupListsBy((final Media e) => e.hash.toString()), + ); } } return output; @@ -43,59 +50,74 @@ extension Group on Iterable { /// Uses file size, then sha256 hash to distinct /// /// Returns count of removed -int removeDuplicates(List media) { - var count = 0; - final byAlbum = media +int removeDuplicates(final List media) { + int count = 0; + + final Iterable>> byAlbum = media // group by albums as we will merge those later // (to *not* compare hashes between albums) - .groupListsBy((e) => e.files.keys.first) + .groupListsBy((final Media e) => e.files.keys.first) .values // group by hash - .map((albumGroup) => albumGroup.groupIdentical().values); + .map( + (final List albumGroup) => albumGroup.groupIdentical().values, + ); // we don't care about album organization now - flatten final Iterable> hashGroups = byAlbum.flattened; - for (final group in hashGroups) { + + for (final List group in hashGroups) { // sort by best date extraction, then file name length // using strings to sort by two values is a sneaky trick i learned at // https://stackoverflow.com/questions/55920677/how-to-sort-a-list-based-on-two-values // note: we are comparing accuracy here tho we do know that *all* // of them have it null - i'm leaving this just for sake - group.sort((a, b) => - '${a.dateTakenAccuracy ?? 999}${p.basename(a.firstFile.path).length}' - .compareTo( - '${b.dateTakenAccuracy ?? 999}${p.basename(b.firstFile.path).length}')); + group.sort( + ( + final Media a, + final Media b, + ) => '${a.dateTakenAccuracy ?? 999}${p.basename(a.firstFile.path).length}' + .compareTo( + '${b.dateTakenAccuracy ?? 999}${p.basename(b.firstFile.path).length}', + ), + ); // get list of all except first - for (final e in group.sublist(1)) { + for (final Media e in group.sublist(1)) { // remove them from media media.remove(e); + log('[Step 3/8] Skipping duplicate: ${e.firstFile.path}'); count++; } } - return count; } -String albumName(Directory albumDir) => p.basename(albumDir.path); +String albumName(final Directory albumDir) => + p.basename(p.normalize(albumDir.path)); /// This will analyze [allMedia], find which files are hash-same, and merge /// all of them into single [Media] object with all album names they had -void findAlbums(List allMedia) { - for (final group in allMedia.groupIdentical().values) { +void findAlbums(final List allMedia, [final FillingBar? barFindAlbums]) { + for (final List group in allMedia.groupIdentical().values) { + if (barFindAlbums != null) { + barFindAlbums.increment(); + } + if (group.length <= 1) continue; // then this isn't a group // now, we have [group] list that contains actual sauce: - final allFiles = group.fold( + final Map allFiles = group.fold( {}, - (allFiles, e) => allFiles..addAll(e.files), + (final Map allFiles, final Media e) => + allFiles..addAll(e.files), ); // sort by best date extraction - group.sort((a, b) => - (a.dateTakenAccuracy ?? 999).compareTo((b.dateTakenAccuracy ?? 999))); + group.sort( + (final Media a, final Media b) => + (a.dateTakenAccuracy ?? 999).compareTo(b.dateTakenAccuracy ?? 999), + ); // remove original dirty ones - for (final e in group) { - allMedia.remove(e); - } + allMedia.removeWhere(group.contains); // set the first (best) one complete album list group.first.files = allFiles; // add our one, precious ✨perfect✨ one diff --git a/lib/interactive.dart b/lib/interactive.dart index e3853abac..c4991291b 100644 --- a/lib/interactive.dart +++ b/lib/interactive.dart @@ -11,40 +11,48 @@ /// - extra \n are added in main file /// - ...detect when something is wrong (f.e. disk space) and quit whole program /// - ...are as single-job as it's appropriate - main file calls them one by one +library; + import 'dart:async'; import 'dart:io'; // @Deprecated('Interactive unzipping is suspended for now!') // import 'package:archive/archive_io.dart'; import 'package:file_picker_desktop/file_picker_desktop.dart'; -import 'package:gpth/utils.dart'; import 'package:path/path.dart' as p; -const albumOptions = { - 'shortcut': '[Recommended] Album folders with shortcuts/symlinks to ' - 'original photos. Recommended as it will take the least space, but ' - 'may not be portable when moving across systems/computes/phones etc', - 'duplicate-copy': 'Album folders with photos copied into them. ' - 'This will work across all systems, but may take wayyy more space!!', - 'json': "Put ALL photos (including Archive and Trash) in one folder and " - "make a .json file with info about albums. " - "Use if you're a programmer, or just want to get everything, " - "ignoring lack of year-folders etc.", - 'nothing': 'Just ignore them and put year-photos into one folder. ' - 'WARNING: This ignores Archive/Trash !!!', - 'reverse-shortcut': 'Album folders with ORIGINAL photos. "ALL_PHOTOS" folder ' - 'with shortcuts/symlinks to albums. If a photo is not in an album, ' - 'the original is saved. CAUTION: If a photo is in multiple albums, it will ' - 'be duplicated in the other albums, and the shortcuts/symlinks in ' - '"ALL_PHOTOS" will point only to one album.', +import 'utils.dart'; + +const Map albumOptions = { + 'shortcut': + '[Recommended] Album folders with shortcuts/symlinks to \n' + 'original photos. \nRecommended as it will take the least space, but \n' + 'may not be portable when moving across systems/computes/phones etc\n', + 'duplicate-copy': + 'Album folders with photos copied into them. \n' + 'This will work across all systems, but may take wayyy more space!!\n', + 'json': + 'Put ALL photos (including Archive and Trash) in one folder and \n' + 'make a .json file with info about albums. \n' + "Use if you're a programmer, or just want to get everything, \n" + 'ignoring lack of year-folders etc.\n', + 'nothing': + 'Just ignore them and put year-photos into one folder. \n' + 'WARNING: This ignores Archive/Trash !!!\n', + 'reverse-shortcut': + 'Album folders with ORIGINAL photos. "ALL_PHOTOS" folder \n' + 'with shortcuts/symlinks to albums. If a photo is not in an album, \n' + 'the original is saved. CAUTION: If a photo is in multiple albums, it will \n' + 'be duplicated in the other albums, and the shortcuts/symlinks in \n' + '"ALL_PHOTOS" will point only to one album.\n', }; /// Whether we are, indeed, running interactive (or not) -var indeed = false; +bool indeed = false; /// Shorthand for Future.delayed -Future sleep(num seconds) => - Future.delayed(Duration(milliseconds: (seconds * 1000).toInt())); +Future sleep(final num seconds) => + Future.delayed(Duration(milliseconds: (seconds * 1000).toInt())); void pressEnterToContinue() { print('[press enter to continue]'); @@ -63,11 +71,15 @@ Future askForInt() async => stdin Future greet() async { print('GooglePhotosTakeoutHelper v$version'); await sleep(1); - print('Hi there! This tool will help you to get all of your photos from ' - 'Google Takeout to one nice tidy folder\n'); + print( + 'Hi there! This tool will help you to get all of your photos from ' + 'Google Takeout to one nice tidy folder\n', + ); await sleep(3); - print('(If any part confuses you, read the guide on:\n' - 'https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper )'); + print( + '(If any part confuses you, read the guide on:\n' + 'https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper )', + ); await sleep(3); } @@ -75,17 +87,17 @@ Future greet() async { Future nothingFoundMessage() async { print('...oh :('); print('...'); - print("I couldn't find any D: reasons for this may be:"); + print("8 I couldn't find any D: reasons for this may be:"); if (indeed) { print( " - you've already ran gpth and it moved all photos to output -\n" - " delete the input folder and re-extract the zip", + ' delete the input folder and re-extract the zip', ); } print( " - your Takeout doesn't have any \"year folders\" -\n" - " visit https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper\n" - " again and request new, correct Takeout", + ' visit https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper\n' + ' again and request new, correct Takeout', ); print('After fixing this, go ahead and try again :)'); } @@ -95,28 +107,32 @@ Future getInputDir() async { print('(Make sure they are merged => there is only one "Takeout" folder!)'); await sleep(1); pressEnterToContinue(); - final dir = await getDirectoryPath(dialogTitle: 'Select unzipped folder:'); + final String? dir = await getDirectoryPath( + dialogTitle: 'Select unzipped folder:', + ); await sleep(1); if (dir == null) { error('Duh, something went wrong with selecting - try again!'); return getOutput(); } print('Cool!'); - sleep(1); + await sleep(1); return Directory(dir); } /// Asks user for zip files with ui dialogs @Deprecated('Interactive unzipping is suspended for now!') Future> getZips() async { - print('First, select all .zips from Google Takeout ' - '(use Ctrl to select multiple)'); + print( + 'First, select all .zips from Google Takeout ' + '(use Ctrl to select multiple)', + ); await sleep(2); pressEnterToContinue(); - final files = await pickFiles( + final FilePickerResult? files = await pickFiles( dialogTitle: 'Select all Takeout zips:', type: FileType.custom, - allowedExtensions: ['zip', 'tgz'], + allowedExtensions: ['zip', 'tgz'], allowMultiple: true, ); await sleep(1); @@ -129,56 +145,66 @@ Future> getZips() async { quit(6969); } if (files.count == 1) { - print("You selected only one zip - if that's only one you have, it's cool, " - "but if you have multiple, Ctrl-C to exit gpth, and select them " - "*all* again (with Ctrl)"); + print( + "You selected only one zip - if that's only one you have, it's cool, " + 'but if you have multiple, Ctrl-C to exit gpth, and select them ' + '*all* again (with Ctrl)', + ); await sleep(5); pressEnterToContinue(); } - if (!files.files.every((e) => - File(e.path!).statSync().type == FileSystemEntityType.file && - RegExp(r'\.(zip|tgz)$').hasMatch(e.path!))) { - print('Files: [${files.files.map((e) => p.basename(e.path!)).join(', ')}]'); + if (!files.files.every( + (final PlatformFile e) => + File(e.path!).statSync().type == FileSystemEntityType.file && + RegExp(r'\.(zip|tgz)$').hasMatch(e.path!), + )) { + print( + 'Files: [${files.files.map((final PlatformFile e) => p.basename(e.path!)).join(', ')}]', + ); error('Not all files you selected are zips :/ please do this again'); quit(6969); } // potentially shows user they selected too little ? - print('Cool! Selected ${files.count} zips => ' - '${filesize( - files.files - .map((e) => File(e.path!).statSync().size) - .reduce((a, b) => a + b), - )}'); + print( + 'Cool! Selected ${files.count} zips => ' + '${filesize(files.files.map((final PlatformFile e) => File(e.path!).statSync().size).reduce((final int a, final int b) => a + b))}', + ); await sleep(1); - return files.files.map((e) => File(e.path!)).toList(); + return files.files.map((final PlatformFile e) => File(e.path!)).toList(); } /// Asks user for output folder with ui dialogs Future getOutput() async { - print('Now, select output folder - all photos will be moved there\n' - '(note: GPTH will *move* your photos - no extra space will be taken ;)'); + print( + 'Now, select output folder - all photos will be moved there\n' + '(note: GPTH will *move* your photos - no extra space will be taken ;)', + ); await sleep(1); pressEnterToContinue(); - final dir = await getDirectoryPath(dialogTitle: 'Select output folder:'); + final String? dir = await getDirectoryPath( + dialogTitle: 'Select output folder:', + ); await sleep(1); if (dir == null) { error('Duh, something went wrong with selecting - try again!'); return getOutput(); } print('Cool!'); - sleep(1); + await sleep(1); return Directory(dir); } Future askDivideDates() async { - print('Do you want your photos in one big chronological folder, ' - 'or divided to folders by year/month?'); + print( + 'Do you want your photos in one big chronological folder, ' + 'or divided to folders by year/month?', + ); print('[1] (default) - one big folder'); print('[2] - year folders'); print('[3] - year/month folders'); print('[3] - year/month/day folders'); print('(Type a number or press enter for default):'); - final answer = await askForInt(); + final String answer = await askForInt(); switch (answer) { case '1': case '': @@ -201,15 +227,17 @@ Future askDivideDates() async { Future askModifyJson() async { print( - 'Check if your .json files of your photos contains "supplemental-metadata" ' - 'between the original extension and .json. If this suffix is present, ' - 'the script will not detect the corresponding JSON file'); + 'Check if your .json files of your photos contains "supplemental-metadata" ' + 'between the original extension and .json. If this suffix is present, ' + 'the script will not detect the corresponding JSON file', + ); print('For example: myImageName.jpg.supplemental-metadata.json'); print( - '[1] (Erase suffix) - [Recommended] Yes, the photos have the suffix "supplemental-metadata"'); + '[1] (Erase suffix) - [Recommended] Yes, the photos have the suffix "supplemental-metadata"', + ); print('[2] (Dont Erease suffix) - No'); print('(Type a number or press enter for default):'); - final answer = await askForInt(); + final String answer = await askForInt(); switch (answer) { case '1': case '': @@ -226,16 +254,16 @@ Future askModifyJson() async { Future askAlbums() async { print('What should be done with albums?'); - var i = 0; - for (final entry in albumOptions.entries) { + int i = 0; + for (final MapEntry entry in albumOptions.entries) { print('[${i++}] ${entry.key}: ${entry.value}'); } - final answer = int.tryParse(await askForInt()); + final int? answer = int.tryParse(await askForInt()); if (answer == null || answer < 0 || answer >= albumOptions.length) { error('Invalid answer - try again'); return askAlbums(); } - final choice = albumOptions.keys.elementAt(answer); + final String choice = albumOptions.keys.elementAt(answer); print('Okay, doing: $choice'); return choice; } @@ -246,7 +274,7 @@ Future askForCleanOutput() async { print('[1] - delete *all* files inside output folder and continue'); print('[2] - continue as usual - put output files alongside existing'); print('[3] - exit program to examine situation yourself'); - final answer = stdin + final String answer = stdin .readLineSync()! .replaceAll('[', '') .replaceAll(']', '') @@ -269,13 +297,15 @@ Future askForCleanOutput() async { } Future askTransformPixelMP() async { - print('Pixel Motion Pictures are saved with the .MP or .MV ' - 'extensions. Do you want to change them to .mp4 ' - 'for better compatibility?'); + print( + 'Pixel Motion Pictures are saved with the .MP or .MV ' + 'extensions. Do you want to change them to .mp4 ' + 'for better compatibility?', + ); print('[1] (default) - no, keep original extension'); print('[2] - yes, change extension to .mp4'); print('(Type 1 or 2 or press enter for default):'); - final answer = await askForInt(); + final String answer = await askForInt(); switch (answer) { case '1': case '': @@ -291,15 +321,17 @@ Future askTransformPixelMP() async { } Future askChangeCreationTime() async { - print('This program fixes file "modified times". ' - 'Due to language limitations, creation times remain unchanged. ' - 'Would you like to run a separate script at the end to sync ' - 'creation times with modified times?' - '\nNote: ONLY ON WINDOWS'); + print( + 'This program fixes file "modified times". ' + 'Due to language limitations, creation times remain unchanged. ' + 'Would you like to run a separate script at the end to sync ' + 'creation times with modified times?' + '\nNote: ONLY ON WINDOWS', + ); print('[1] (Default) - No, don\'t update creation time'); print('[2] - Yes, update creation time to match modified time'); print('(Type 1 or 2, or press enter for default):'); - final answer = await askForInt(); + final String answer = await askForInt(); switch (answer) { case '1': case '': @@ -316,8 +348,8 @@ Future askChangeCreationTime() async { /// Checks free space on disk and notifies user accordingly @Deprecated('Interactive unzipping is suspended for now!') -Future freeSpaceNotice(int required, Directory dir) async { - final freeSpace = await getDiskFree(dir.path); +Future freeSpaceNotice(final int required, final Directory dir) async { + final int? freeSpace = await getDiskFree(dir.path); if (freeSpace == null) { print( 'Note: everything will take ~${filesize(required)} of disk space - ' @@ -346,7 +378,7 @@ Future freeSpaceNotice(int required, Directory dir) async { /// Unzips all zips to given folder (creates it if needed) @Deprecated('Interactive unzipping is suspended for now!') -Future unzip(List zips, Directory dir) async { +Future unzip(final List zips, final Directory dir) async { throw UnimplementedError(); // if (await dir.exists()) await dir.delete(recursive: true); // await dir.create(recursive: true); @@ -375,3 +407,67 @@ Future unzip(List zips, Directory dir) async { // } // } } + +Future askIfWriteExif() async { + if (exifToolInstalled) { + print( + 'This mode will write Exif data (dates/times/coordinates) back to your files. ' + 'To achieve the best results, download Exiftool and place it next to this executable or in your \$PATH.' + 'If you haven\'t done so yet, close this program and come back. ' + 'creation times with modified times?' + '\nNote: ONLY ON WINDOWS', + ); + } else { + print( + 'This mode will write Exif data (dates/times/coordinates) back to your files. ' + 'We detected that ExifTool is NOT available! ' + 'To achieve the best results, we strongly recomend to download Exiftool and place it next to this executable or in your \$PATH.' + 'You can download ExifTool here: https://exiftool.org ' + 'Note that this mode will alter your original files, regardless of the "copy" mode.' + 'Do you want to continue with writing exif data enabled?', + ); + } + + print('[1] (Default) - Yes, write exif'); + print('[2] - No, don\'t write to exif'); + print('(Type 1 or 2, or press enter for default):'); + final String answer = await askForInt(); + switch (answer) { + case '1': + case '': + print('Okay, will write to exif'); + return true; + case '2': + print('Okay, will not touch the exif of your files!'); + return false; + default: + error('Invalid answer - try again'); + return askIfWriteExif(); + } +} + +Future askIfLimitFileSize() async { + print( + 'By default we will process all your files.' + 'However, if you have large video files and run this script on a low ram system (e.g. a NAS or your vacuum cleaning robot), you might want to ' + 'limit the maximum file size to 64 MB not run out of memory. ' + 'We recommend to only activate this if you run into problems.', + ); + + print('[1] (Default) - Don\'t limit me! Process everything!'); + print('[2] - I operate a Toaster. Limit supported media size to 64 MB'); + print('(Type 1 or 2, or press enter for default):'); + final String answer = await askForInt(); + switch (answer) { + case '1': + case '': + print('Alrighty! Will process everything!'); + return false; + case '2': + print('Okay! Limiting files to a size of 64 MB'); + return true; + default: + error('Invalid answer - try again'); + return askIfLimitFileSize(); + } +} diff --git a/lib/media.dart b/lib/media.dart index 8b6abb6b4..15db07f43 100644 --- a/lib/media.dart +++ b/lib/media.dart @@ -1,7 +1,11 @@ import 'dart:io'; import 'package:crypto/crypto.dart'; -import 'package:gpth/utils.dart'; +import 'utils.dart'; + +//Order is important! +///This is the extraction method through which a Media got its dateTime. +enum DateTimeExtractionMethod { json, exif, guess, jsonTryHard, none } /// Abstract of a *media* - a photo or video /// Main thing is the [file] - this should not change @@ -12,6 +16,13 @@ import 'package:gpth/utils.dart'; /// you find a duplicate, use one that has lower [dateTakenAccuracy] number. /// this and [dateTaken] should either both be null or both filled class Media { + Media( + this.files, { + this.dateTaken, + this.dateTakenAccuracy, + this.dateTimeExtractionMethod, + }); + /// First file with media, used in early stage when albums are not merged /// /// BE AWARE OF HOW YOU USE IT @@ -44,23 +55,22 @@ class Media { /// higher the worse int? dateTakenAccuracy; + /// The method/extractor that produced the DateTime ('json', 'exif', 'guess', 'jsonTryHard', 'none') + DateTimeExtractionMethod? dateTimeExtractionMethod; + //cache Digest? _hash; /// will be used for finding duplicates/albums /// WARNING: Returns same value for files > [maxFileSize] - Digest get hash => _hash ??= firstFile.lengthSync() > maxFileSize - ? Digest([0]) + Digest get hash => + _hash ??= ((firstFile.lengthSync() > maxFileSize) && enforceMaxFileSize) + ? Digest([0]) : sha256.convert(firstFile.readAsBytesSync()); - Media( - this.files, { - this.dateTaken, - this.dateTakenAccuracy, - }); - @override - String toString() => 'Media(' + String toString() => + 'Media(' '$firstFile, ' 'dateTaken: $dateTaken' '${files.keys.length > 1 ? ', albums: ${files.keys}' : ''}' diff --git a/lib/moving.dart b/lib/moving.dart index d04671353..cdfd0ac5a 100644 --- a/lib/moving.dart +++ b/lib/moving.dart @@ -1,20 +1,23 @@ /// This file contains logic/utils for final act of moving actual files once /// we have everything grouped, de-duplicated and sorted +// ignore_for_file: prefer_single_quotes + +library; import 'dart:convert'; import 'dart:io'; import 'package:collection/collection.dart'; -import 'package:gpth/interactive.dart' as interactive; -import 'package:gpth/utils.dart'; import 'package:path/path.dart' as p; +import 'interactive.dart' as interactive; import 'media.dart'; +import 'utils.dart'; /// This will add (1) add end of file name over and over until file with such /// name doesn't exist yet. Will leave without "(1)" if is free already -File findNotExistingName(File initialFile) { - var file = initialFile; +File findNotExistingName(final File initialFile) { + File file = initialFile; while (file.existsSync()) { file = File('${p.withoutExtension(file.path)}(1)${p.extension(file.path)}'); } @@ -26,39 +29,42 @@ File findNotExistingName(File initialFile) { /// Uses [findNotExistingName] for safety /// /// WARN: Crashes with non-ascii names :( -Future createShortcut(Directory location, File target) async { - final name = '${p.basename(target.path)}${Platform.isWindows ? '.lnk' : ''}'; - final link = findNotExistingName(File(p.join(location.path, name))); +Future createShortcut(final Directory location, final File target) async { + final String name = + '${p.basename(target.path)}${Platform.isWindows ? '.lnk' : ''}'; + final File link = findNotExistingName(File(p.join(location.path, name))); // this must be relative to not break when user moves whole folder around: // https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/232 - final targetRelativePath = p.relative(target.path, from: link.parent.path); - final targetPath = target.absolute.path; + final String targetRelativePath = p.relative( + target.path, + from: link.parent.path, + ); + final String targetPath = target.absolute.path; if (Platform.isWindows) { try { - createShortcutWin(link.path, targetPath); - }catch (e) { - final res = await Process.run( - 'powershell.exe', - [ - '-ExecutionPolicy', - 'Bypass', - '-NoLogo', - '-NonInteractive', - '-NoProfile', - '-Command', - '\$ws = New-Object -ComObject WScript.Shell; ' - '\$s = \$ws.CreateShortcut(\'${link.path}\'); ' - '\$s.TargetPath = \'$targetPath\'; ' - '\$s.Save()', - ], - ); + await createShortcutWin(link.path, targetPath); + } catch (e) { + final ProcessResult res = await Process.run('powershell.exe', [ + '-ExecutionPolicy', + 'Bypass', + '-NoLogo', + '-NonInteractive', + '-NoProfile', + '-Command', + "\$ws = New-Object -ComObject WScript.Shell; ", + "\$s = \$ws.CreateShortcut(\"${link.path}\"); ", + "\$s.TargetPath = \"$targetPath\"; ", + "\$s.Save()", + ]); if (res.exitCode != 0) { - throw 'PowerShell doesnt work :( - ' - 'report that to @TheLastGimbus on GitHub:\n\n' - 'https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues\n\n' - '...or try other album solution\n' - 'sorry for inconvenience :(' - '\nshortcut exc -> $e'; + throw Exception( + 'PowerShell doesnt work :( - \n\n' + 'report that to @TheLastGimbus on GitHub:\n\n' + 'https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues\n\n' + '...or try other album solution\n' + 'sorry for inconvenience :(' + '\nshortcut exc -> $e', + ); } } return File(link.path); @@ -68,13 +74,16 @@ Future createShortcut(Directory location, File target) async { } Future moveFileAndCreateShortcut( - Directory newLocation, File target) async { - final newPath = p.join(newLocation.path, p.basename(target.path)); - final movedFile = await target.rename( - newPath); // Move the file from year folder to album (new location) + final Directory newLocation, + final File target, +) async { + final String newPath = p.join(newLocation.path, p.basename(target.path)); + final File movedFile = await target.rename( + newPath, + ); // Move the file from year folder to album (new location) // Create shortcut in the original path (year folder) - return await createShortcut(target.parent, movedFile); + return createShortcut(target.parent, movedFile); } /// Big-ass logic of moving files from input to output @@ -90,38 +99,48 @@ Future moveFileAndCreateShortcut( /// Emits number of files that it copied/created/whatever (starting from 1) - /// use [outputFileCount] function for progress measurement Stream moveFiles( - List allMediaFinal, - Directory output, { - required bool copy, - required num divideToDates, - required String albumBehavior, + final List allMediaFinal, + final Directory output, { + required final bool copy, + required final num divideToDates, + required final String albumBehavior, }) async* { - assert(interactive.albumOptions.keys.contains(albumBehavior)); + assert( + interactive.albumOptions.keys.contains(albumBehavior), + 'Invalid albumBehavior: $albumBehavior. Must be one of ${interactive.albumOptions.keys}', + ); /// used only in 'json' behavior /// key = name of main outputted file | value = list of albums it belongs to - final infoJson = >{}; - var i = 0; - for (final m in allMediaFinal) { + final Map> infoJson = >{}; + int i = 0; + for (final Media m in allMediaFinal) { // main file shortcuts will link to File? mainFile; - final nullFirst = albumBehavior == 'json' + final List> nullFirst = albumBehavior == 'json' // in 'json' case, we want to copy ALL files (like Archive) as normals - ? [MapEntry(null, m.files.values.first)] + ? >[ + MapEntry(null, m.files.values.first), + ] // this will put null media first so album shortcuts can link to it - : m.files.entries - .sorted((a, b) => (a.key ?? '').compareTo(b.key ?? '')); + : m.files.entries.sorted( + ( + final MapEntry a, + final MapEntry b, + ) => (a.key ?? '').compareTo(b.key ?? ''), + ); // iterate over all media of file to do something about them // ignore non-nulls with 'ignore', copy with 'duplicate-copy', // symlink with 'shortcut' etc - for (final file in nullFirst) { + for (final MapEntry file in nullFirst) { // if it's not from year folder and we're doing nothing/json, skip - if (file.key != null && ['nothing', 'json'].contains(albumBehavior)) { + if (file.key != null && + ['nothing', 'json'].contains(albumBehavior)) { continue; } // now on, logic is shared for nothing+null/shortcut/copy cases - final date = m.dateTaken; + final DateTime? date = m.dateTaken; String folderName; if (file.key != null) { folderName = file.key!.trim(); @@ -151,12 +170,8 @@ Stream moveFiles( } } - final folder = Directory( - p.join( - output.path, - folderName, - dateFolder, - ), + final Directory folder = Directory( + p.join(output.path, folderName, dateFolder), ); // now folder logic is so complex i'll just create it every time 🤷 await folder.create(recursive: true); @@ -166,21 +181,22 @@ Stream moveFiles( /// moves/copies file with safe name // it's here because we do this for two cases - moveFile() async { - final freeFile = findNotExistingName( - File(p.join(folder.path, p.basename(file.value.path)))); + Future moveFile() async { + final File freeFile = findNotExistingName( + File(p.join(folder.path, p.basename(file.value.path))), + ); try { return copy ? await file.value.copy(freeFile.path) : await file.value.rename(freeFile.path); } on FileSystemException { print( - "Uh-uh, it looks like you selected other output drive than\n" - "input one - gpth can't move files between them. But, you don't have\n" + '[Step 7/8] [Error] Uh-uh, it looks like you selected another output drive than\n' + "your input drive - gpth can't move files between them. But, you don't have\n" "to do this! Gpth *moves* files, so this doesn't take any extra space!\n" - "Please run again and select different output location <3", + 'Please run again and select different output location <3', ); - quit(1); + quit(); } } @@ -193,9 +209,11 @@ Stream moveFiles( result = await createShortcut(folder, mainFile); } catch (e) { // in case powershell fails/whatever - print('Creating shortcut for ' - '${p.basename(mainFile.path)} in ${p.basename(folder.path)} ' - 'failed :(\n$e\n - copying normal file instead'); + print( + '[Step 7/8] [Error] Creating shortcut for ' + '${p.basename(mainFile.path)} in ${p.basename(folder.path)} ' + 'failed :(\n$e\n - copying normal file instead', + ); result = await moveFile(); } } else if (albumBehavior == 'reverse-shortcut' && mainFile != null) { @@ -208,9 +226,11 @@ Stream moveFiles( result = await moveFile(); } else { // in case of other exception, print details - print('Creating shortcut for ' - '${p.basename(mainFile.path)} in ${p.basename(folder.path)} ' - 'failed :(\n$e\n - copying normal file instead'); + print( + '[Step 7/8] [Error] Creating shortcut for ' + '${p.basename(mainFile.path)} in ${p.basename(folder.path)} ' + 'failed :(\n$e\n - copying normal file instead', + ); result = await moveFile(); } } @@ -223,11 +243,12 @@ Stream moveFiles( // Done! Now, set the date: - var time = m.dateTaken ?? DateTime.now(); + DateTime time = m.dateTaken ?? DateTime.now(); if (Platform.isWindows && time.isBefore(DateTime(1970))) { print( - 'WARNING: ${m.firstFile.path} has date $time, which is before 1970 ' - '(not supported on Windows) - will be set to 1970-01-01'); + '[Step 7/8] [Info]: ${m.firstFile.path} has date $time, which is before 1970 ' + '(not supported on Windows) - will be set to 1970-01-01', + ); time = DateTime(1970); } try { @@ -237,24 +258,29 @@ Stream moveFiles( // https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/229#issuecomment-1685085899 // That's why this is here if (e.errorCode != 0) { - print("WARNING: Can't set modification time on $result: $e"); + print( + "[Step 7/8] [Error]: Can't set modification time on $result: $e", + ); } } catch (e) { - print("WARNING: Can't set modification time on $result: $e"); + log( + "[Step 7/8]: Can't set modification time on $result: $e. This happens on Windows sometimes. Can be ignored.", + level: 'warning', + ); //If error code 0, no need to notify user. Only log. } // one copy/move/whatever - one yield yield ++i; if (albumBehavior == 'json') { - infoJson[p.basename(result.path)] = - m.files.keys.whereNotNull().toList(); + infoJson[p.basename(result.path)] = m.files.keys.nonNulls.toList(); } } // done with this media - next! } if (albumBehavior == 'json') { - await File(p.join(output.path, 'albums-info.json')) - .writeAsString(jsonEncode(infoJson)); + await File( + p.join(output.path, 'albums-info.json'), + ).writeAsString(jsonEncode(infoJson)); } } diff --git a/lib/utils.dart b/lib/utils.dart index 2ee5eb96b..c69fc5200 100644 --- a/lib/utils.dart +++ b/lib/utils.dart @@ -1,30 +1,34 @@ -import 'dart:ffi'; import 'dart:io'; - import 'package:collection/collection.dart'; -import 'package:gpth/interactive.dart' as interactive; import 'package:mime/mime.dart'; import 'package:path/path.dart' as p; import 'package:proper_filesize/proper_filesize.dart'; import 'package:unorm_dart/unorm_dart.dart' as unorm; -import 'package:ffi/ffi.dart'; -import 'package:win32/win32.dart'; - +import 'interactive.dart' as interactive; import 'media.dart'; // remember to bump this -const version = '3.4.3'; +const String version = '4.0.0'; /// max file size to read for exif/hash/anything -const maxFileSize = 64 * 1024 * 1024; +const int maxFileSize = 64 * 1024 * 1024; + +//initialising some global variables +bool isVerbose = false; + +bool enforceMaxFileSize = false; + +bool exifToolInstalled = false; /// convenient print for errors -void error(Object? object) => stderr.write('$object\n'); +void error(final Object? object) => stderr.write('$object\n'); -Never quit([int code = 1]) { +Never quit([final int code = 1]) { if (interactive.indeed) { - print('[gpth ${code != 0 ? 'quitted :(' : 'finished :)'} (code $code) - ' - 'press enter to close]'); + print( + '[gpth ${code != 0 ? 'quitted :(' : 'finished :)'} (code $code) - ' + 'press enter to close]', + ); stdin.readLineSync(); } exit(code); @@ -32,39 +36,39 @@ Never quit([int code = 1]) { extension X on Iterable { /// Easy extension allowing you to filter for files that are photo or video - Iterable wherePhotoVideo() => whereType().where((e) { - final mime = lookupMimeType(e.path) ?? ""; - final fileExtension = p.extension(e.path).toLowerCase(); - return mime.startsWith('image/') || - mime.startsWith('video/') || - // https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/223 - // https://github.com/dart-lang/mime/issues/102 - // 🙃🙃 - mime == 'model/vnd.mts'|| - _moreExtensions.contains(fileExtension); - }); + Iterable wherePhotoVideo() => whereType().where((final File e) { + final String mime = lookupMimeType(e.path) ?? ''; + final String fileExtension = p.extension(e.path).toLowerCase(); + return mime.startsWith('image/') || + mime.startsWith('video/') || + // https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/223 + // https://github.com/dart-lang/mime/issues/102 + // 🙃🙃 + mime == 'model/vnd.mts' || + _moreExtensions.contains(fileExtension); + }); } extension Y on Stream { /// Easy extension allowing you to filter for files that are photo or video - Stream wherePhotoVideo() => whereType().where((e) { - final mime = lookupMimeType(e.path) ?? ""; - final fileExtension = p.extension(e.path).toLowerCase(); - return mime.startsWith('image/') || - mime.startsWith('video/') || - // https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/223 - // https://github.com/dart-lang/mime/issues/102 - // 🙃🙃 - mime == 'model/vnd.mts'|| - _moreExtensions.contains(fileExtension); - }); + Stream wherePhotoVideo() => whereType().where((final File e) { + final String mime = lookupMimeType(e.path) ?? ''; + final String fileExtension = p.extension(e.path).toLowerCase(); + return mime.startsWith('image/') || + mime.startsWith('video/') || + // https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper/issues/223 + // https://github.com/dart-lang/mime/issues/102 + // 🙃🙃 + mime == 'model/vnd.mts' || + _moreExtensions.contains(fileExtension); + }); } //Support raw formats (dng, cr2) and Pixel motion photos (mp, mv) -const _moreExtensions = ['.mp', '.mv', '.dng', '.cr2']; +const List _moreExtensions = ['.mp', '.mv', '.dng', '.cr2']; extension Util on Stream { - Stream whereType() => where((e) => e is T).cast(); + Stream whereType() => where((final e) => e is T).cast(); } Future getDiskFree([String? path]) async { @@ -80,8 +84,12 @@ Future getDiskFree([String? path]) async { } } -Future _dfLinux(String path) async { - final res = await Process.run('df', ['-B1', '--output=avail', path]); +Future _dfLinux(final String path) async { + final ProcessResult res = await Process.run('df', [ + '-B1', + '--output=avail', + path, + ]); return res.exitCode != 0 ? null : int.tryParse( @@ -90,48 +98,52 @@ Future _dfLinux(String path) async { ); } -Future _dfWindoza(String path) async { - final res = await Process.run('wmic', [ - 'LogicalDisk', - 'Where', - 'DeviceID="${p.rootPrefix(p.absolute(path)).replaceAll('\\', '')}"', - 'Get', - 'FreeSpace' +Future _dfWindoza(final String path) async { + final String driveLetter = p + .rootPrefix(p.absolute(path)) + .replaceAll('\\', '') + .replaceAll(':', ''); + final ProcessResult res = await Process.run('powershell', [ + '-Command', + 'Get-PSDrive -Name ${driveLetter[0]} | Select-Object -ExpandProperty Free', ]); - return res.exitCode != 0 - ? null - : int.tryParse( - res.stdout.toString().split('\n').elementAtOrNull(1) ?? '', - ); + final int? result = res.exitCode != 0 ? null : int.tryParse(res.stdout); + return result; } -Future _dfMcOS(String path) async { - final res = await Process.run('df', ['-k', path]); +Future _dfMcOS(final String path) async { + final ProcessResult res = await Process.run('df', ['-k', path]); if (res.exitCode != 0) return null; - final line2 = res.stdout.toString().split('\n').elementAtOrNull(1); + final String? line2 = res.stdout.toString().split('\n').elementAtOrNull(1); if (line2 == null) return null; - final elements = line2.split(' ')..removeWhere((e) => e.isEmpty); - final macSays = int.tryParse( + final List elements = line2.split(' ') + ..removeWhere((final String e) => e.isEmpty); + final int? macSays = int.tryParse( elements.elementAtOrNull(3) ?? '', radix: 10, // to be sure ); return macSays != null ? macSays * 1024 : null; } -String filesize(int bytes) => ProperFilesize.generateHumanReadableFilesize( - bytes, - base: Bases.Binary, - decimals: 2, +String filesize(final int bytes) => FileSize.fromBytes(bytes).toString( + unit: Unit.auto(size: bytes, baseType: BaseType.metric), + decimals: 2, +); + +int outputFileCount(final List media, final String albumOption) { + if ([ + 'shortcut', + 'duplicate-copy', + 'reverse-shortcut', + ].contains(albumOption)) { + return media.fold( + 0, + (final int prev, final Media e) => prev + e.files.length, ); - -int outputFileCount(List media, String albumOption) { - if (['shortcut', 'duplicate-copy', 'reverse-shortcut'] - .contains(albumOption)) { - return media.fold(0, (prev, e) => prev + e.files.length); } else if (albumOption == 'json') { return media.length; } else if (albumOption == 'nothing') { - return media.where((e) => e.files.containsKey(null)).length; + return media.where((final Media e) => e.files.containsKey(null)).length; } else { throw ArgumentError.value(albumOption, 'albumOption'); } @@ -139,193 +151,223 @@ int outputFileCount(List media, String albumOption) { extension Z on String { /// Returns same string if pattern not found - String replaceLast(String from, String to) { - final lastIndex = lastIndexOf(from); + String replaceLast(final String from, final String to) { + final int lastIndex = lastIndexOf(from); if (lastIndex == -1) return this; return replaceRange(lastIndex, lastIndex + from.length, to); } } -Future renameIncorrectJsonFiles(Directory directory) async { +Future renameIncorrectJsonFiles(final Directory directory) async { int renamedCount = 0; - await for (final entity in directory.list(recursive: true)) { + await for (final FileSystemEntity entity in directory.list(recursive: true)) { if (entity is File && p.extension(entity.path) == '.json') { - final originalName = p.basename(entity.path); + final String originalName = p.basename(entity.path); // Regex to dettect pattern - final regex = RegExp( + final RegExp regex = RegExp( r'^(.*\.[a-z0-9]{3,5})\..+\.json$', caseSensitive: false, ); - final match = regex.firstMatch(originalName); + final RegExpMatch? match = regex.firstMatch(originalName); if (match != null) { - final newName = '${match.group(1)}.json'; + final String newName = '${match.group(1)}.json'; if (newName != originalName) { - final newPath = p.join(p.dirname(entity.path), newName); - final newFile = File(newPath); + final String newPath = p.join(p.dirname(entity.path), newName); + final File newFile = File(newPath); // Verify if the file renamed already exists if (await newFile.exists()) { - print('[Renamed] Skipping: $newPath already exists'); + log( + '[Step 1/8] Skipped renaming of json because it already exists: $newPath', + ); } else { try { await entity.rename(newPath); renamedCount++; - //print('[Renamed] ${entity.path} -> $newPath'); + log('[Step 1/8] Renamed: ${entity.path} -> $newPath'); } on FileSystemException catch (e) { - print('[Error] Renaming ${entity.path}: ${e.message}'); + log( + '[Step 1/8] While renaming json ${entity.path}: ${e.message}', + level: 'error', + ); } } } } } } - print('Successfully renamed JSON files (suffix removed): $renamedCount'); + print( + '[Step 1/8] Successfully renamed JSON files (suffix removed): $renamedCount', + ); } -Future changeMPExtensions(List allMedias, String finalExtension) async { +Future changeMPExtensions( + final List allMedias, + final String finalExtension, +) async { int renamedCount = 0; - for (final m in allMedias) { - for (final entry in m.files.entries) { - final file = entry.value; - final ext = p.extension(file.path).toLowerCase(); + for (final Media m in allMedias) { + for (final MapEntry entry in m.files.entries) { + final File file = entry.value; + final String ext = p.extension(file.path).toLowerCase(); if (ext == '.mv' || ext == '.mp') { - final originalName = p.basenameWithoutExtension(file.path); - final normalizedName = unorm.nfc(originalName); - - final newName = '$normalizedName$finalExtension'; + final String originalName = p.basenameWithoutExtension(file.path); + final String normalizedName = unorm.nfc(originalName); + + final String newName = '$normalizedName$finalExtension'; if (newName != normalizedName) { - final newPath = p.join(p.dirname(file.path), newName); + final String newPath = p.join(p.dirname(file.path), newName); // Rename file and update reference in map try { - final newFile = await file.rename(newPath); + final File newFile = await file.rename(newPath); m.files[entry.key] = newFile; renamedCount++; } on FileSystemException catch (e) { - print('[Error] Error changing extension to $finalExtension -> ${file.path}: ${e.message}'); + print( + '[Step 6/8] [Error] Error changing extension to $finalExtension -> ${file.path}: ${e.message}', + ); } - } + } } } } - print('Successfully changed Pixel Motion Photos files extensions (change it to $finalExtension): $renamedCount'); + print( + '[Step 6/8] Successfully changed Pixel Motion Photos files extensions (change it to $finalExtension): $renamedCount', + ); } /// Recursively traverses the output [directory] and updates /// the creation time of files in batches. -/// For each file, attempts to set the creation date to match +/// For each file, attempts to set the creation date to match /// the last modification date. /// Only Windows support for now, using PowerShell. -/// In the future MacOS support is possible if the user has XCode installed -Future updateCreationTimeRecursively(Directory directory) async { +//TODO In the future MacOS support is possible if the user has XCode installed +Future updateCreationTimeRecursively(final Directory directory) async { if (!Platform.isWindows) { - print("Skipping: Updating creation time is only supported on Windows."); - return; + print( + '[Step 8/8] Skipping: Updating creation time is only supported on Windows.', + ); + return 0; } int changedFiles = 0; - int maxChunkSize = 32000; //Avoid 32768 char limit in command line with chunks - - String currentChunk = ""; - await for (final entity in directory.list(recursive: true, followLinks: false)) { + const int maxChunkSize = + 32000; //Avoid 32768 char limit in command line with chunks + + String currentChunk = ''; + await for (final FileSystemEntity entity in directory.list( + recursive: true, + followLinks: false, + )) { if (entity is File) { //Command for each file - final command ="(Get-Item '${entity.path}').CreationTime = (Get-Item '${entity.path}').LastWriteTime;"; + final String command = + "(Get-Item '${entity.path}').CreationTime = (Get-Item '${entity.path}').LastWriteTime;"; //If current command + chunk is larger than 32000, commands in currentChunk is executed and current comand is passed for the next execution if (currentChunk.length + command.length > maxChunkSize) { - bool success = await _executePShellCreationTimeCmd(currentChunk); - if (success) changedFiles += currentChunk.split(';').length-1; // -1 to ignore last ';' + final bool success = await _executePShellCreationTimeCmd(currentChunk); + if (success) { + changedFiles += + currentChunk.split(';').length - 1; // -1 to ignore last ';' + } currentChunk = command; } else { currentChunk += command; } } } - + //Leftover chunk is executed after the for if (currentChunk.isNotEmpty) { - bool success = await _executePShellCreationTimeCmd(currentChunk); - if (success) changedFiles += currentChunk.split(';').length-1; // -1 to ignore last ';' + final bool success = await _executePShellCreationTimeCmd(currentChunk); + if (success) { + changedFiles += + currentChunk.split(';').length - 1; // -1 to ignore last ';' + } } - print("Successfully updated creation time for $changedFiles files!"); + print( + '[Step 8/8] Successfully updated creation time for $changedFiles files!', + ); + return changedFiles; } //Execute a chunk of commands in PowerShell related with creation time -Future _executePShellCreationTimeCmd(String commandChunk) async { +Future _executePShellCreationTimeCmd(final String commandChunk) async { try { - final result = await Process.run('powershell', [ - '-ExecutionPolicy', 'Bypass', + final ProcessResult result = await Process.run('powershell', [ + '-ExecutionPolicy', + 'Bypass', '-NonInteractive', - '-Command', commandChunk + '-Command', + commandChunk, ]); if (result.exitCode != 0) { - print("Error updateing creation time in batch: ${result.stderr}"); + print( + '[Step 8/8] Error updateing creation time in batch: ${result.stderr}', + ); return false; } return true; } catch (e) { - print("Error updating creation time: $e"); + print('[Step 8/8] Error updating creation time: $e'); return false; } } -void createShortcutWin(String shortcutPath, String targetPath) { - Pointer? shellLink; - Pointer? persistFile; - Pointer? shortcutPathPtr; - try { - // Initialize the COM library on the current thread - final hrInit = CoInitializeEx(nullptr, COINIT_APARTMENTTHREADED); - if (FAILED(hrInit)) { - throw ('Error initializing COM: $hrInit'); - } - - shellLink = calloc(); - - // Create IShellLink instance - final hr = CoCreateInstance( - GUIDFromString(CLSID_ShellLink).cast(), - nullptr, - CLSCTX_INPROC_SERVER, - GUIDFromString(IID_IShellLink).cast(), - shellLink.cast()); - - if (FAILED(hr)) { - throw ('Error creating IShellLink instance: $hr'); - } - - final shellLinkPtr = IShellLink(shellLink); - shellLinkPtr.SetPath(targetPath.toNativeUtf16().cast()); - - // Saving shortcut - persistFile = calloc(); - final hrPersistFile = shellLinkPtr.QueryInterface( - GUIDFromString(IID_IPersistFile).cast(), - persistFile.cast()); - if (FAILED(hrPersistFile)) { - throw ('Error obtaining IPersistFile: $hrPersistFile'); - } - final persistFilePtr = IPersistFile(persistFile); - shortcutPathPtr = shortcutPath.toNativeUtf16(); - final hrSave = persistFilePtr.Save(shortcutPathPtr.cast(), TRUE); +Future createShortcutWin( + final String shortcutPath, + final String targetPath, +) async { + // Make sure parent directory exists + final Directory parentDir = Directory(p.dirname(shortcutPath)); + if (!parentDir.existsSync()) { + parentDir.createSync(recursive: true); + } + // Use PowerShell for reliable shortcut creation + final ProcessResult res = await Process.run('powershell.exe', [ + '-ExecutionPolicy', + 'Bypass', + '-NoLogo', + '-NonInteractive', + '-NoProfile', + '-Command', + // ignore: no_adjacent_strings_in_list + '\$ws = New-Object -ComObject WScript.Shell; ' + '\$s = \$ws.CreateShortcut("$shortcutPath"); ' + '\$s.TargetPath = "$targetPath"; ' + '\$s.Save()', + ]); + if (res.exitCode != 0) { + throw Exception('PowerShell failed to create shortcut: ${res.stderr}'); + } +} - if (FAILED(hrSave)) { - throw ('Error trying to save shortcut: $hrSave'); - } - } finally { - // Free memory - if (shortcutPathPtr != null) { - free(shortcutPathPtr); - } - if (persistFile != null) { - IPersistFile(persistFile).Release(); - free(persistFile); - } - if (shellLink != null) { - IShellLink(shellLink).Release(); - free(shellLink); +///This little helper function replaces the default log function, so it can be used with compiled code +///Default log level is 'info'. Possible values for 'level' are: 'error', 'warning' and 'info' +///forcePrint makes the output even when verbose mode is not enabled +void log( + final String message, { + final String level = 'info', + final bool forcePrint = false, +}) { + if (isVerbose || forcePrint == true) { + final String color; + switch (level.toLowerCase()) { + case 'error': + color = '\x1B[31m'; // Red for errors + break; + case 'warning': + color = '\x1B[33m'; // Yellow for warnings + break; + case 'info': + default: + color = '\x1B[32m'; // Green for info + break; } - CoUninitialize(); + print( + '\r$color[${level.toUpperCase()}] $message\x1B[0m', + ); // Reset color after the message } } diff --git a/pubspec.lock b/pubspec.lock index 5543ac367..f981079df 100644 --- a/pubspec.lock +++ b/pubspec.lock @@ -5,66 +5,103 @@ packages: dependency: transitive description: name: _fe_analyzer_shared - sha256: eb376e9acf6938204f90eb3b1f00b578640d3188b4c8a8ec054f9f479af8d051 + sha256: "16e298750b6d0af7ce8a3ba7c18c69c3785d11b15ec83f6dcd0ad2a0009b3cab" url: "https://pub.dev" source: hosted - version: "64.0.0" + version: "76.0.0" + _macros: + dependency: transitive + description: dart + source: sdk + version: "0.3.3" analyzer: dependency: transitive description: name: analyzer - sha256: "69f54f967773f6c26c7dcb13e93d7ccee8b17a641689da39e878d5cf13b06893" + sha256: "1f14db053a8c23e260789e9b0980fa27f2680dd640932cae5e1137cce0e46e1e" url: "https://pub.dev" source: hosted - version: "6.2.0" + version: "6.11.0" + archive: + dependency: transitive + description: + name: archive + sha256: "2fde1607386ab523f7a36bb3e7edb43bd58e6edaf2ffb29d8a6d578b297fdbbd" + url: "https://pub.dev" + source: hosted + version: "4.0.7" args: dependency: "direct main" description: name: args - sha256: eef6c46b622e0494a36c5a12d10d77fb4e855501a91c1b9ef9339326e58f0596 + sha256: d0481093c50b1da8910eb0bb301626d4d8eb7284aa739614d2b394ee09e3ea04 url: "https://pub.dev" source: hosted - version: "2.4.2" + version: "2.7.0" async: dependency: transitive description: name: async - sha256: "947bfcf187f74dbc5e146c9eb9c0f10c9f8b30743e341481c1e2ed3ecc18c20c" + sha256: "758e6d74e971c3e5aceb4110bfd6698efc7f501675bcfe0c775459a8140750eb" url: "https://pub.dev" source: hosted - version: "2.11.0" + version: "2.13.0" async_extension: dependency: transitive description: name: async_extension - sha256: f5589e5e0611648f610b7ad00c40fbee4cb7398061ea73463bbeb8ec29fc8a28 + sha256: "362765ac14560ae9e958865068e58c16c9e4e17ee135b1491d3804e68446fa55" url: "https://pub.dev" source: hosted - version: "1.2.5" + version: "1.2.15" async_task: dependency: transitive description: name: async_task - sha256: e941339576c3b923ab9640b1f1e017483f5ec97e955aa851d96681ea6f3bc4c3 + sha256: a0effde412fdb5d0b08d4329a75967a26d67e4b20055f928fe6e7ad8697222cd url: "https://pub.dev" source: hosted - version: "1.0.18" + version: "1.1.1" boolean_selector: dependency: transitive description: name: boolean_selector - sha256: "6cfb5af12253eaf2b368f07bacc5a80d1301a071c73360d746b7f2e32d762c66" + sha256: "8aab1771e1243a5063b8b0ff68042d67334e3feab9e95b9490f9a6ebf73b42ea" + url: "https://pub.dev" + source: hosted + version: "2.1.2" + brotli: + dependency: transitive + description: + name: brotli + sha256: "7f891558ed779aab2bed874f0a36b8123f9ff3f19cf6efbee89e18ed294945ae" + url: "https://pub.dev" + source: hosted + version: "0.6.0" + cli_config: + dependency: transitive + description: + name: cli_config + sha256: ac20a183a07002b700f0c25e61b7ee46b23c309d76ab7b7640a028f18e4d99ec url: "https://pub.dev" source: hosted - version: "2.1.1" + version: "0.2.0" + clock: + dependency: transitive + description: + name: clock + sha256: fddb70d9b5277016c77a80201021d40a2247104d9f4aa7bab7157b7e3f05b84b + url: "https://pub.dev" + source: hosted + version: "1.1.2" collection: dependency: "direct main" description: name: collection - sha256: ee67cb0715911d28db6bf4af1026078bd6f0128b07a5f66fb2ed94ec6783c09a + sha256: "2f5709ae4d3d59dd8f7cd309b4e023046b57d8a6c82130785d2b0e5868084e76" url: "https://pub.dev" source: hosted - version: "1.18.0" + version: "1.19.1" console_bars: dependency: "direct main" description: @@ -77,66 +114,74 @@ packages: dependency: "direct main" description: name: convert - sha256: "0f08b14755d163f6e2134cb58222dd25ea2a2ee8a195e53983d57c075324d592" + sha256: b30acd5944035672bc15c6b7a8b47d773e41e2f17de064350988c5d02adb1c68 url: "https://pub.dev" source: hosted - version: "3.1.1" + version: "3.1.2" + coordinate_converter: + dependency: "direct main" + description: + name: coordinate_converter + sha256: ae14ad326551acbd6c384d717f508537340ed566a6e2412b7a908e8d3e7a1750 + url: "https://pub.dev" + source: hosted + version: "1.2.3" coverage: dependency: transitive description: name: coverage - sha256: "2fb815080e44a09b85e0f2ca8a820b15053982b2e714b59267719e8a9ff17097" + sha256: "802bd084fb82e55df091ec8ad1553a7331b61c08251eef19a508b6f3f3a9858d" url: "https://pub.dev" source: hosted - version: "1.6.3" + version: "1.13.1" crypto: dependency: "direct main" description: name: crypto - sha256: ff625774173754681d66daaf4a448684fb04b78f902da9cb3d308c19cc5e8bab + sha256: "1e445881f28f22d6140f181e07737b22f1e099a5e1ff94b0af2f9e4a463f4855" url: "https://pub.dev" source: hosted - version: "3.0.3" - exif: + version: "3.0.6" + exif_reader: dependency: "direct main" description: - name: exif - sha256: c154e074234eb6ac4a09831072b4783b55f5f9e84c4b344a472a6d6aa83a9982 + name: exif_reader + sha256: "548ccc25be936e508c6a20706ae5e2796687b55b9db851962db1cd20fe6abfd9" url: "https://pub.dev" source: hosted - version: "3.1.4" + version: "3.16.1" ffi: dependency: "direct main" description: name: ffi - sha256: "13a6ccf6a459a125b3fcdb6ec73bd5ff90822e071207c663bfd1f70062d51d18" + sha256: "289279317b4b16eb2bb7e271abccd4bf84ec9bdcbe999e278a94b804f5630418" url: "https://pub.dev" source: hosted - version: "1.2.1" + version: "2.1.4" file: dependency: transitive description: name: file - sha256: "5fc22d7c25582e38ad9a8515372cd9a93834027aacf1801cf01164dac0ffa08c" + sha256: "1b92bec4fc2a72f59a8e15af5f52cd441e4a7860b49499d69dfa817af20e925d" url: "https://pub.dev" source: hosted - version: "7.0.0" + version: "6.1.4" file_picker_desktop: dependency: "direct main" description: name: file_picker_desktop - sha256: bc802a0fff747071aed0ccdd9b3df827527e46643a18756b2c6bd4b4b4adce20 + sha256: "6e84f4cd3056fa189cea62d231b121fe7f5003744f3ab822e1cbb29d071df570" url: "https://pub.dev" source: hosted - version: "1.1.1" + version: "1.2.0" frontend_server_client: dependency: transitive description: name: frontend_server_client - sha256: "408e3ca148b31c20282ad6f37ebfa6f4bdc8fede5b74bc2f08d9d92b55db3612" + sha256: f64a0333a82f30b0cca061bc3d143813a486dc086b574bfb233b7c1372427694 url: "https://pub.dev" source: hosted - version: "3.2.0" + version: "4.0.0" fuzzysearch: dependency: "direct main" description: @@ -149,50 +194,66 @@ packages: dependency: transitive description: name: glob - sha256: "0e7014b3b7d4dac1ca4d6114f82bf1782ee86745b9b42a92c9289c23d8a0ab63" + sha256: c3f1ee72c96f8f78935e18aa8cecced9ab132419e8625dc187e1c2408efc20de url: "https://pub.dev" source: hosted - version: "2.1.2" + version: "2.1.3" http_multi_server: dependency: transitive description: name: http_multi_server - sha256: "97486f20f9c2f7be8f514851703d0119c3596d14ea63227af6f7a481ef2b2f8b" + sha256: aa6199f908078bb1c5efb8d8638d4ae191aac11b311132c3ef48ce352fb52ef8 url: "https://pub.dev" source: hosted - version: "3.2.1" + version: "3.2.2" http_parser: dependency: transitive description: name: http_parser - sha256: "2aa08ce0341cc9b354a498388e30986515406668dbcc4f7c950c3e715496693b" + sha256: "178d74305e7866013777bab2c3d8726205dc5a4dd935297175b19a23a2e66571" + url: "https://pub.dev" + source: hosted + version: "4.1.2" + image: + dependency: "direct main" + description: + name: image + sha256: "4e973fcf4caae1a4be2fa0a13157aa38a8f9cb049db6529aa00b4d71abc4d928" url: "https://pub.dev" source: hosted - version: "4.0.2" + version: "4.5.4" + intl: + dependency: "direct main" + description: + name: intl + sha256: "3df61194eb431efc39c4ceba583b95633a403f46c9fd341e550ce0bfa50e9aa5" + url: "https://pub.dev" + source: hosted + version: "0.20.2" io: dependency: transitive description: name: io - sha256: "2ec25704aba361659e10e3e5f5d672068d332fc8ac516421d483a11e5cbd061e" + sha256: dfd5a80599cf0165756e3181807ed3e77daf6dd4137caaad72d0b7931597650b url: "https://pub.dev" source: hosted - version: "1.0.4" - js: + version: "1.0.5" + iso_base_media: dependency: transitive description: - name: js - sha256: f2c445dce49627136094980615a031419f7f3eb393237e4ecd97ac15dea343f3 + name: iso_base_media + sha256: "0f5594feef1fba98179a2df95d1afbdda952de0c7a2e35e6815093f7c00aaf06" url: "https://pub.dev" source: hosted - version: "0.6.7" - json_annotation: + version: "4.5.2" + js: dependency: transitive description: - name: json_annotation - sha256: b10a7b2ff83d83c777edba3c6a0f97045ddadd56c944e1a23a3fdf43a1bf4467 + name: js + sha256: "53385261521cc4a0c4658fd0ad07a7d14591cf8fc33abbceae306ddb974888dc" url: "https://pub.dev" source: hosted - version: "4.8.1" + version: "0.7.2" latinize: dependency: transitive description: @@ -205,42 +266,66 @@ packages: dependency: "direct dev" description: name: lints - sha256: "0a217c6c989d21039f1498c3ed9f3ed71b354e69873f13a8dfc3c9fe76f1b452" + sha256: c35bb79562d980e9a453fc715854e1ed39e24e7d0297a880ef54e17f9874a9d7 + url: "https://pub.dev" + source: hosted + version: "5.1.1" + lists: + dependency: transitive + description: + name: lists + sha256: "4ca5c19ae4350de036a7e996cdd1ee39c93ac0a2b840f4915459b7d0a7d4ab27" url: "https://pub.dev" source: hosted - version: "2.1.1" + version: "1.0.1" logging: dependency: transitive description: name: logging - sha256: "623a88c9594aa774443aa3eb2d41807a48486b5613e67599fb4c41c0ad47c340" + sha256: c8245ada5f1717ed44271ed1c26b8ce85ca3228fd2ffdb75468ab01979309d61 url: "https://pub.dev" source: hosted - version: "1.2.0" + version: "1.3.0" + macros: + dependency: transitive + description: + name: macros + sha256: "1d9e801cd66f7ea3663c45fc708450db1fa57f988142c64289142c9b7ee80656" + url: "https://pub.dev" + source: hosted + version: "0.1.3-main.0" matcher: dependency: transitive description: name: matcher - sha256: "1803e76e6653768d64ed8ff2e1e67bea3ad4b923eb5c56a295c3e634bad5960e" + sha256: dc58c723c3c24bf8d3e2d3ad3f2f9d7bd9cf43ec6feaa64181775e60190153f2 url: "https://pub.dev" source: hosted - version: "0.12.16" + version: "0.12.17" meta: dependency: transitive description: name: meta - sha256: a6e590c838b18133bb482a2745ad77c5bb7715fb0451209e1a7567d416678b8e + sha256: "23f08335362185a5ea2ad3a4e597f1375e78bce8a040df5c600c8d3552ef2394" url: "https://pub.dev" source: hosted - version: "1.10.0" + version: "1.17.0" + mgrs_dart: + dependency: transitive + description: + name: mgrs_dart + sha256: fb89ae62f05fa0bb90f70c31fc870bcbcfd516c843fb554452ab3396f78586f7 + url: "https://pub.dev" + source: hosted + version: "2.0.0" mime: dependency: "direct main" description: name: mime - sha256: e4ff8e8564c03f255408decd16e7899da1733852a9110a58fe6d1b817684a63e + sha256: "41a20518f0cb1256669420fdba0cd90d21561e560ac240f26ef8322e45bb7ed6" url: "https://pub.dev" source: hosted - version: "1.0.4" + version: "2.0.0" node_preamble: dependency: transitive description: @@ -253,18 +338,26 @@ packages: dependency: transitive description: name: package_config - sha256: "1c5b77ccc91e4823a5af61ee74e6b972db1ef98c2ff5a18d3161c982a55448bd" + sha256: f096c55ebb7deb7e384101542bfba8c52696c1b56fca2eb62827989ef2353bbc url: "https://pub.dev" source: hosted - version: "2.1.0" + version: "2.2.0" path: dependency: "direct main" description: name: path - sha256: "8829d8a55c13fc0e37127c29fedf290c102f4e40ae94ada574091fe0ff96c917" + sha256: "75cca69d1490965be98c73ceaea117e8a04dd21217b37b292c9ddbec0d955bc5" + url: "https://pub.dev" + source: hosted + version: "1.9.1" + petitparser: + dependency: transitive + description: + name: petitparser + sha256: "07c8f0b1913bcde1ff0d26e57ace2f3012ccbf2b204e070290dad3bb22797646" url: "https://pub.dev" source: hosted - version: "1.8.3" + version: "6.1.0" pool: dependency: transitive description: @@ -273,30 +366,54 @@ packages: url: "https://pub.dev" source: hosted version: "1.5.1" + posix: + dependency: transitive + description: + name: posix + sha256: f0d7856b6ca1887cfa6d1d394056a296ae33489db914e365e2044fdada449e62 + url: "https://pub.dev" + source: hosted + version: "6.0.2" + proj4dart: + dependency: transitive + description: + name: proj4dart + sha256: c8a659ac9b6864aa47c171e78d41bbe6f5e1d7bd790a5814249e6b68bc44324e + url: "https://pub.dev" + source: hosted + version: "2.1.0" proper_filesize: dependency: "direct main" description: name: proper_filesize - sha256: bd7566ec10e0425b3b6f81691cce9a6d16c08e590ae15b2df0a1c22f432dc205 + sha256: "5426f6698aa6a189d5475bcf189020cf5a3661e3ff389bc46551e32b0279164e" url: "https://pub.dev" source: hosted - version: "0.0.2" + version: "1.0.2" pub_semver: dependency: transitive description: name: pub_semver - sha256: "40d3ab1bbd474c4c2328c91e3a7df8c6dd629b79ece4c4bd04bee496a224fb0c" + sha256: "5bfcf68ca79ef689f8990d1160781b4bad40a3bd5e5218ad4076ddb7f4081585" url: "https://pub.dev" source: hosted - version: "2.1.4" + version: "2.2.0" + random_access_source: + dependency: transitive + description: + name: random_access_source + sha256: dc86934da2cc4777334f43916234410f232032738c519c0c3452147c5d4fec89 + url: "https://pub.dev" + source: hosted + version: "2.1.0" shelf: dependency: transitive description: name: shelf - sha256: ad29c505aee705f41a4d8963641f91ac4cee3c8fad5947e033390a7bd8180fa4 + sha256: e7dd780a7ffb623c57850b33f43309312fc863fb6aa3d276a754bb299839ef12 url: "https://pub.dev" source: hosted - version: "1.4.1" + version: "1.4.2" shelf_packages_handler: dependency: transitive description: @@ -309,42 +426,42 @@ packages: dependency: transitive description: name: shelf_static - sha256: a41d3f53c4adf0f57480578c1d61d90342cd617de7fc8077b1304643c2d85c1e + sha256: c87c3875f91262785dade62d135760c2c69cb217ac759485334c5857ad89f6e3 url: "https://pub.dev" source: hosted - version: "1.1.2" + version: "1.1.3" shelf_web_socket: dependency: transitive description: name: shelf_web_socket - sha256: "9ca081be41c60190ebcb4766b2486a7d50261db7bd0f5d9615f2d653637a84c1" + sha256: "3632775c8e90d6c9712f883e633716432a27758216dfb61bd86a8321c0580925" url: "https://pub.dev" source: hosted - version: "1.0.4" + version: "3.0.0" source_map_stack_trace: dependency: transitive description: name: source_map_stack_trace - sha256: "84cf769ad83aa6bb61e0aa5a18e53aea683395f196a6f39c4c881fb90ed4f7ae" + sha256: c0713a43e323c3302c2abe2a1cc89aa057a387101ebd280371d6a6c9fa68516b url: "https://pub.dev" source: hosted - version: "2.1.1" + version: "2.1.2" source_maps: dependency: transitive description: name: source_maps - sha256: "708b3f6b97248e5781f493b765c3337db11c5d2c81c3094f10904bfa8004c703" + sha256: "190222579a448b03896e0ca6eca5998fa810fda630c1d65e2f78b3f638f54812" url: "https://pub.dev" source: hosted - version: "0.10.12" + version: "0.10.13" source_span: dependency: transitive description: name: source_span - sha256: "53e943d4206a5e30df338fd4c6e7a077e02254531b138a15aec3bd143c1a8b3c" + sha256: "254ee5351d6cb365c859e20ee823c3bb479bf4a293c22d17a9f1bf144ce86f7c" url: "https://pub.dev" source: hosted - version: "1.10.0" + version: "1.10.1" sprintf: dependency: transitive description: @@ -357,98 +474,122 @@ packages: dependency: transitive description: name: stack_trace - sha256: "73713990125a6d93122541237550ee3352a2d84baad52d375a4cad2eb9b7ce0b" + sha256: "8b27215b45d22309b5cddda1aa2b19bdfec9df0e765f2de506401c071d38d1b1" url: "https://pub.dev" source: hosted - version: "1.11.1" + version: "1.12.1" stream_channel: dependency: transitive description: name: stream_channel - sha256: ba2aa5d8cc609d96bbb2899c28934f9e1af5cddbd60a827822ea467161eb54e7 + sha256: "969e04c80b8bcdf826f8f16579c7b14d780458bd97f56d107d3950fdbeef059d" url: "https://pub.dev" source: hosted - version: "2.1.2" + version: "2.1.4" string_scanner: dependency: transitive description: name: string_scanner - sha256: "556692adab6cfa87322a115640c11f13cb77b3f076ddcc5d6ae3c20242bedcde" + sha256: "921cd31725b72fe181906c6a94d987c78e3b98c2e205b397ea399d4054872b43" url: "https://pub.dev" source: hosted - version: "1.2.0" + version: "1.4.1" term_glyph: dependency: transitive description: name: term_glyph - sha256: a29248a84fbb7c79282b40b8c72a1209db169a2e0542bce341da992fe1bc7e84 + sha256: "7f554798625ea768a7518313e58f83891c7f5024f88e46e7182a4558850a4b8e" url: "https://pub.dev" source: hosted - version: "1.2.1" + version: "1.2.2" test: dependency: "direct dev" description: name: test - sha256: "9b0dd8e36af4a5b1569029949d50a52cb2a2a2fdaa20cebb96e6603b9ae241f9" + sha256: "65e29d831719be0591f7b3b1a32a3cda258ec98c58c7b25f7b84241bc31215bb" url: "https://pub.dev" source: hosted - version: "1.24.6" + version: "1.26.2" test_api: dependency: transitive description: name: test_api - sha256: "5c2f730018264d276c20e4f1503fd1308dfbbae39ec8ee63c5236311ac06954b" + sha256: "522f00f556e73044315fa4585ec3270f1808a4b186c936e612cab0b565ff1e00" url: "https://pub.dev" source: hosted - version: "0.6.1" + version: "0.7.6" test_core: dependency: transitive description: name: test_core - sha256: "4bef837e56375537055fdbbbf6dd458b1859881f4c7e6da936158f77d61ab265" + sha256: "80bf5a02b60af04b09e14f6fe68b921aad119493e26e490deaca5993fef1b05a" url: "https://pub.dev" source: hosted - version: "0.5.6" + version: "0.6.11" typed_data: dependency: transitive description: name: typed_data - sha256: facc8d6582f16042dd49f2463ff1bd6e2c9ef9f3d5da3d9b087e244a7b564b3c + sha256: f9049c039ebfeb4cf7a7104a675823cd72dba8297f264b6637062516699fa006 url: "https://pub.dev" source: hosted - version: "1.3.2" + version: "1.4.0" + unicode: + dependency: transitive + description: + name: unicode + sha256: "0f69e46593d65245774d4f17125c6084d2c20b4e473a983f6e21b7d7762218f1" + url: "https://pub.dev" + source: hosted + version: "0.3.1" unorm_dart: dependency: "direct main" description: name: unorm_dart - sha256: "5b35bff83fce4d76467641438f9e867dc9bcfdb8c1694854f230579d68cd8f4b" + sha256: "23d8bf65605401a6a32cff99435fed66ef3dab3ddcad3454059165df46496a3b" url: "https://pub.dev" source: hosted - version: "0.2.0" + version: "0.3.0" vm_service: dependency: transitive description: name: vm_service - sha256: c538be99af830f478718b51630ec1b6bee5e74e52c8a802d328d9e71d35d2583 + sha256: ddfa8d30d89985b96407efce8acbdd124701f96741f2d981ca860662f1c0dc02 url: "https://pub.dev" source: hosted - version: "11.10.0" + version: "15.0.0" watcher: dependency: transitive description: name: watcher - sha256: "3d2ad6751b3c16cf07c7fca317a1413b3f26530319181b37e3b9039b84fc01d8" + sha256: "69da27e49efa56a15f8afe8f4438c4ec02eff0a117df1b22ea4aad194fe1c104" url: "https://pub.dev" source: hosted - version: "1.1.0" + version: "1.1.1" + web: + dependency: transitive + description: + name: web + sha256: "868d88a33d8a87b18ffc05f9f030ba328ffefba92d6c127917a2ba740f9cfe4a" + url: "https://pub.dev" + source: hosted + version: "1.1.1" + web_socket: + dependency: transitive + description: + name: web_socket + sha256: "34d64019aa8e36bf9842ac014bb5d2f5586ca73df5e4d9bf5c936975cae6982c" + url: "https://pub.dev" + source: hosted + version: "1.0.1" web_socket_channel: dependency: transitive description: name: web_socket_channel - sha256: d88238e5eac9a42bb43ca4e721edba3c08c6354d4a53063afaa568516217621b + sha256: d645757fb0f4773d602444000a8131ff5d48c9e47adfe9772652dd1a4f2d45c8 url: "https://pub.dev" source: hosted - version: "2.4.0" + version: "3.0.3" webkit_inspection_protocol: dependency: transitive description: @@ -461,17 +602,33 @@ packages: dependency: "direct main" description: name: win32 - sha256: c0e3a4f7be7dae51d8f152230b86627e3397c1ba8c3fa58e63d44a9f3edc9cef + sha256: "329edf97fdd893e0f1e3b9e88d6a0e627128cc17cc316a8d67fda8f1451178ba" url: "https://pub.dev" source: hosted - version: "2.6.1" + version: "5.13.0" + wkt_parser: + dependency: transitive + description: + name: wkt_parser + sha256: "8a555fc60de3116c00aad67891bcab20f81a958e4219cc106e3c037aa3937f13" + url: "https://pub.dev" + source: hosted + version: "2.0.0" + xml: + dependency: transitive + description: + name: xml + sha256: b015a8ad1c488f66851d762d3090a21c600e479dc75e68328c52774040cf9226 + url: "https://pub.dev" + source: hosted + version: "6.5.0" yaml: dependency: transitive description: name: yaml - sha256: "75769501ea3489fca56601ff33454fe45507ea3bfb014161abc3b43ae25989d5" + sha256: b9da305ac7c39faa3f030eccd175340f968459dae4af175130b3fc47e40d76ce url: "https://pub.dev" source: hosted - version: "3.1.2" + version: "3.1.3" sdks: - dart: ">=3.1.0 <4.0.0" + dart: ">=3.8.0 <4.0.0" diff --git a/pubspec.yaml b/pubspec.yaml index b548b8558..b3fe864c7 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,32 +1,36 @@ name: gpth description: 'Tool to help you with exporting stuff from Google Photos' -version: 3.4.3 +version: 4.0.0 homepage: 'https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper' publish_to: 'none' environment: - sdk: '>=3.1.0 <4.0.0' + sdk: '>=3.8.0 <4.0.0' dependencies: - args: ^2.4.2 - path: ^1.8.3 - mime: ^1.0.4 + args: ^2.7.0 + path: ^1.9.1 + mime: ^2.0.0 collection: ^1.18.0 convert: ^3.1.1 fuzzysearch: ^0.1.3 crypto: ^3.0.3 - exif: ^3.1.4 console_bars: ^1.2.0 - file_picker_desktop: ^1.1.1 + intl: ^0.20.2 + file_picker_desktop: ^1.2.0 + exif_reader: ^3.16.1 + image: ^4.5.4 + # archive: # git: # url: https://github.com/TheLastGimbus/archive.git # ref: fix-windoza-extract-errors - proper_filesize: ^0.0.2 - unorm_dart: ^0.2.0 - win32: ^2.0.0 - ffi: ^1.1.3 + proper_filesize: ^1.0.2 + unorm_dart: ^0.3.0 + win32: ^5.13.0 + ffi: ^2.1.4 + coordinate_converter: ^1.2.3 dev_dependencies: - lints: ^2.1.1 - test: ^1.24.6 + lints: ^5.1.1 + test: ^1.25.15 diff --git a/test/gpth_test.dart b/test/gpth_test.dart index d72fd95f3..6c23d5f3e 100644 --- a/test/gpth_test.dart +++ b/test/gpth_test.dart @@ -2,20 +2,29 @@ import 'dart:convert'; import 'dart:io'; import 'package:collection/collection.dart'; -import 'package:gpth/date_extractor.dart'; +import 'package:coordinate_converter/coordinate_converter.dart'; +import 'package:exif_reader/exif_reader.dart'; +import 'package:gpth/date_extractors/date_extractor.dart'; +import 'package:gpth/emojicleaner.dart'; +import 'package:gpth/exif_writer.dart' as exif_writer; +import 'package:gpth/exiftoolInterface.dart'; import 'package:gpth/extras.dart'; import 'package:gpth/folder_classify.dart'; import 'package:gpth/grouping.dart'; import 'package:gpth/media.dart'; import 'package:gpth/moving.dart'; import 'package:gpth/utils.dart'; +import 'package:intl/intl.dart'; +import 'package:path/path.dart' as p; import 'package:path/path.dart'; import 'package:test/test.dart'; -void main() { +void main() async { + await initExiftool(); + /// this is 1x1 green jg image, with exif: /// DateTime Original: 2022:12:16 16:06:47 - const greenImgBase64 = """ + const String greenImgBase64 = ''' /9j/4AAQSkZJRgABAQAAAQABAAD/4QC4RXhpZgAATU0AKgAAAAgABQEaAAUAAAABAAAASgEbAAUA AAABAAAAUgEoAAMAAAABAAEAAAITAAMAAAABAAEAAIdpAAQAAAABAAAAWgAAAAAAAAABAAAAAQAA AAEAAAABAAWQAAAHAAAABDAyMzKQAwACAAAAFAAAAJyRAQAHAAAABAECAwCgAAAHAAAABDAxMDCg @@ -24,42 +33,97 @@ BAQEBAgGBgUGCQgKCgkICQkKDA8MCgsOCwkJDRENDg8QEBEQCgwSExIQEw8QEBD/2wBDAQMDAwQD BAgEBAgQCwkLEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQ EBD/wAARCAABAAEDAREAAhEBAxEB/8QAFAABAAAAAAAAAAAAAAAAAAAAA//EABQQAQAAAAAAAAAA AAAAAAAAAAD/xAAUAQEAAAAAAAAAAAAAAAAAAAAI/8QAFBEBAAAAAAAAAAAAAAAAAAAAAP/aAAwD -AQACEQMRAD8AIcgXf//Z"""; +AQACEQMRAD8AIcgXf//Z'''; + + /// Same as above just without the DateTime. + const String greenImgNoMetaDataBase64 = ''' +/9j/4AAQSkZJRgABAQAAAQABAAD/4QCYRXhpZgAATU0AKgAAAAgABQEaAAUAAAABAAA +ASgEbAAUAAAABAAAAUgEoAAMAAAABAAEAAAITAAMAAAABAAEAAIdpAAQAAAABAAAAWgAAA +AAAAAABAAAAAQAAAAEAAAABAASQAAAHAAAABDAyMzKRAQAHAAAABAECAwCgAAAHAAAABDA +xMDCgAQADAAAAAf//AAAAAAAA/9sAQwADAgICAgIDAgICAwMDAwQGBAQEBAQIBgYFBgkIC +goJCAkJCgwPDAoLDgsJCQ0RDQ4PEBAREAoMEhMSEBMPEBAQ/9sAQwEDAwMEAwQIBAQIEAs +JCxAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQ/ +8AAEQgAAQABAwERAAIRAQMRAf/EABQAAQAAAAAAAAAAAAAAAAAAAAP/xAAUEAEAAAAAAAA +AAAAAAAAAAAAA/8QAFAEBAAAAAAAAAAAAAAAAAAAACP/EABQRAQAAAAAAAAAAAAAAAAAAA +AD/2gAMAwEAAhEDEQA/ACHIF3//2Q=='''; - final albumDir = Directory('Vacation'); - final imgFileGreen = File('green.jpg'); - final imgFile1 = File('image-edited.jpg'); - final jsonFile1 = File('image-edited.jpg.json'); + final String current = Directory.current.path; + final String basepath = + p.join(current, 'test', 'generated') + + p.separator; //Where the test files are created + + final Directory albumDir = Directory('${basepath}Vacation'); + final File imgFileGreen = File('${basepath}green.jpg'); + final File imgFile1 = File('${basepath}image-edited.jpg'); + final File jsonFile1 = File('${basepath}image-edited.jpg.json'); // these names are from good old #8 issue... - final imgFile2 = File('Urlaub in Knaufspesch in der Schneifel (38).JPG'); - final jsonFile2 = File('Urlaub in Knaufspesch in der Schneifel (38).JP.json'); - final imgFile3 = File('Screenshot_2022-10-28-09-31-43-118_com.snapchat.jpg'); - final jsonFile3 = File('Screenshot_2022-10-28-09-31-43-118_com.snapcha.json'); - final imgFile4 = File('simple_file_20200101-edited.jpg'); - final imgFile4_1 = File('simple_file_20200101-edited(1).jpg'); - final jsonFile4 = File('simple_file_20200101.jpg.json'); - final imgFile5 = File('img_(87).(vacation stuff).lol(87).jpg'); - final jsonFile5 = File('img_(87).(vacation stuff).lol.jpg(87).json'); - final imgFile6 = File('IMG-20150125-WA0003-modifié.jpg'); - final imgFile6_1 = File('IMG-20150125-WA0003-modifié(1).jpg'); - final jsonFile6 = File('IMG-20150125-WA0003.jpg.json'); - final media = [ - Media({null: imgFile1}, - dateTaken: DateTime(2020, 9, 1), dateTakenAccuracy: 1), + final File imgFile2 = File( + '${basepath}Urlaub in Knaufspesch in der Schneifel (38).JPG', + ); + final File jsonFile2 = File( + '${basepath}Urlaub in Knaufspesch in der Schneifel (38).JPG.json', + ); + final File imgFile3 = File( + '${basepath}Screenshot_2022-10-28-09-31-43-118_com.snapchat.jpg', + ); + final File jsonFile3 = File( + '${basepath}Screenshot_2022-10-28-09-31-43-118_com.snapchat.json', + ); + final File imgFile4 = File('${basepath}simple_file_20200101-edited.jpg'); + final File imgFile4_1 = File('${basepath}simple_file_20200101-edited(1).jpg'); + final File jsonFile4 = File('${basepath}simple_file_20200101.jpg.json'); + final File imgFile5 = File( + '${basepath}img_(87).(vacation stuff).lol(87).jpg', + ); + final File jsonFile5 = File( + '${basepath}img_(87).(vacation stuff).lol.jpg(87).json', + ); + final File imgFile6 = File('${basepath}IMG-20150125-WA0003-modifié.jpg'); + final File imgFile6_1 = File('${basepath}IMG-20150125-WA0003-modifié(1).jpg'); + final File jsonFile6 = File('${basepath}IMG-20150125-WA0003.jpg.json'); + final List media = [ + Media( + {null: imgFile1}, + dateTaken: DateTime(2020, 9), + dateTakenAccuracy: 1, + ), + Media( + {albumName(albumDir): imgFile1}, + dateTaken: DateTime(2022, 9), + dateTakenAccuracy: 2, + ), Media( - {albumName(albumDir): imgFile1}, - dateTaken: DateTime(2022, 9, 1), + {null: imgFile2}, + dateTaken: DateTime(2020), dateTakenAccuracy: 2, ), - Media({null: imgFile2}, dateTaken: DateTime(2020), dateTakenAccuracy: 2), - Media({null: imgFile3}, - dateTaken: DateTime(2022, 10, 28), dateTakenAccuracy: 1), - Media({null: imgFile4}), // these two... + Media( + {null: imgFile3}, + dateTaken: DateTime(2022, 10, 28), + dateTakenAccuracy: 1, + ), + Media({null: imgFile4}), // these two... // ...are duplicates - Media({null: imgFile4_1}, dateTaken: DateTime(2019), dateTakenAccuracy: 3), - Media({null: imgFile5}, dateTaken: DateTime(2020), dateTakenAccuracy: 1), - Media({null: imgFile6}, dateTaken: DateTime(2015), dateTakenAccuracy: 1), - Media({null: imgFile6_1}, dateTaken: DateTime(2015), dateTakenAccuracy: 1), + Media( + {null: imgFile4_1}, + dateTaken: DateTime(2019), + dateTakenAccuracy: 3, + ), + Media( + {null: imgFile5}, + dateTaken: DateTime(2020), + dateTakenAccuracy: 1, + ), + Media( + {null: imgFile6}, + dateTaken: DateTime(2015), + dateTakenAccuracy: 1, + ), + Media( + {null: imgFile6_1}, + dateTaken: DateTime(2015), + dateTakenAccuracy: 1, + ), ]; /// Set up test stuff - create test shitty files in wherever pwd is @@ -71,17 +135,53 @@ AQACEQMRAD8AIcgXf//Z"""; base64.decode(greenImgBase64.replaceAll('\n', '')), ); // apparently you don't need to .create() before writing 👍 - imgFile1.writeAsBytesSync([0, 1, 2]); + imgFile1.writeAsBytesSync([0, 1, 2]); imgFile1.copySync('${albumDir.path}/${basename(imgFile1.path)}'); - imgFile2.writeAsBytesSync([3, 4, 5]); - imgFile3.writeAsBytesSync([6, 7, 8]); - imgFile4.writeAsBytesSync([9, 10, 11]); // these two... - imgFile4_1.writeAsBytesSync([9, 10, 11]); // ...are duplicates - imgFile5.writeAsBytesSync([12, 13, 14]); - imgFile6.writeAsBytesSync([15, 16, 17]); - imgFile6_1.writeAsBytesSync([18, 19, 20]); - writeJson(File file, int time) => - file.writeAsStringSync('{"photoTakenTime": {"timestamp": "$time"}}'); + imgFile2.writeAsBytesSync([3, 4, 5]); + imgFile3.writeAsBytesSync([6, 7, 8]); + imgFile4.writeAsBytesSync([9, 10, 11]); // these two... + imgFile4_1.writeAsBytesSync([9, 10, 11]); // ...are duplicates + imgFile5.writeAsBytesSync([12, 13, 14]); + imgFile6.writeAsBytesSync([15, 16, 17]); + imgFile6_1.writeAsBytesSync([18, 19, 20]); + void writeJson(final File file, final int time) { + file.createSync(recursive: true); + file.writeAsStringSync( + jsonEncode({ + 'title': 'test.jpg', + 'description': '', + 'imageViews': '1', + 'creationTime': { + 'timestamp': '1702198242', + 'formatted': '10.12.2023, 08:50:42 UTC', + }, + 'photoTakenTime': { + 'timestamp': '$time', + 'formatted': '01.05.2023, 14:32:37 UTC', + }, + 'geoData': { + 'latitude': 41.3221611, + 'longitude': 19.8149139, + 'altitude': 143.09, + 'latitudeSpan': 0.0, + 'longitudeSpan': 0.0, + }, + 'geoDataExif': { + 'latitude': 41.3221611, + 'longitude': 19.8149139, + 'altitude': 143.09, + 'latitudeSpan': 0.0, + 'longitudeSpan': 0.0, + }, + 'archived': true, + 'url': 'https://photos.google.com/photo/xyz', + 'googlePhotosOrigin': >{ + 'mobileUpload': {'deviceType': 'IOS_PHONE'}, + }, + }), + ); + } + writeJson(jsonFile1, 1599078832); writeJson(jsonFile2, 1683078832); writeJson(jsonFile3, 1666942303); @@ -92,144 +192,228 @@ AQACEQMRAD8AIcgXf//Z"""; group('DateTime extractors', () { test('json', () async { - expect((await jsonExtractor(imgFile1))?.millisecondsSinceEpoch, - 1599078832 * 1000); - expect((await jsonExtractor(imgFile2))?.millisecondsSinceEpoch, - 1683078832 * 1000); - expect((await jsonExtractor(imgFile3))?.millisecondsSinceEpoch, - 1666942303 * 1000); + expect( + (await jsonDateTimeExtractor(imgFile1))?.millisecondsSinceEpoch, + 1599078832 * 1000, + ); + expect( + (await jsonDateTimeExtractor(imgFile2))?.millisecondsSinceEpoch, + 1683078832 * 1000, + ); + expect( + (await jsonDateTimeExtractor(imgFile3))?.millisecondsSinceEpoch, + 1666942303 * 1000, + ); // They *should* fail without tryhard // See b38efb5d / #175 expect( - (await jsonExtractor(imgFile4))?.millisecondsSinceEpoch, + (await jsonDateTimeExtractor(imgFile4))?.millisecondsSinceEpoch, 1683074444 * 1000, ); - expect((await jsonExtractor(imgFile4_1))?.millisecondsSinceEpoch, null); + expect( + (await jsonDateTimeExtractor(imgFile4_1))?.millisecondsSinceEpoch, + null, + ); // Should work *with* tryhard expect( - (await jsonExtractor(imgFile4, tryhard: true))?.millisecondsSinceEpoch, + (await jsonDateTimeExtractor( + imgFile4, + tryhard: true, + ))?.millisecondsSinceEpoch, 1683074444 * 1000, ); expect( - (await jsonExtractor(imgFile4_1, tryhard: true)) - ?.millisecondsSinceEpoch, + (await jsonDateTimeExtractor( + imgFile4_1, + tryhard: true, + ))?.millisecondsSinceEpoch, 1683074444 * 1000, ); expect( - (await jsonExtractor(imgFile5, tryhard: false))?.millisecondsSinceEpoch, + (await jsonDateTimeExtractor(imgFile5))?.millisecondsSinceEpoch, 1680289442 * 1000, ); expect( - (await jsonExtractor(imgFile6, tryhard: false))?.millisecondsSinceEpoch, + (await jsonDateTimeExtractor(imgFile6))?.millisecondsSinceEpoch, 1422183600 * 1000, ); expect( - (await jsonExtractor(imgFile6_1, tryhard: false)) - ?.millisecondsSinceEpoch, + (await jsonDateTimeExtractor(imgFile6_1))?.millisecondsSinceEpoch, null, ); expect( - (await jsonExtractor(imgFile6_1, tryhard: true)) - ?.millisecondsSinceEpoch, + (await jsonDateTimeExtractor( + imgFile6_1, + tryhard: true, + ))?.millisecondsSinceEpoch, 1422183600 * 1000, ); }); test('exif', () async { expect( - (await exifExtractor(imgFileGreen)), + await exifDateTimeExtractor(imgFileGreen), DateTime.parse('2022-12-16 16:06:47'), ); }); test('guess', () async { - final files = [ - ['Screenshot_20190919-053857_Camera-edited.jpg', '2019-09-19 05:38:57'], - ['MVIMG_20190215_193501.MP4', '2019-02-15 19:35:01'], - ['Screenshot_2019-04-16-11-19-37-232_com.jpg', '2019-04-16 11:19:37'], - ['signal-2020-10-26-163832.jpg', '2020-10-26 16:38:32'], - ['VID_20220107_113306.mp4', '2022-01-07 11:33:06'], - ['00004XTR_00004_BURST20190216172030.jpg', '2019-02-16 17:20:30'], - ['00055IMG_00055_BURST20190216172030_COVER.jpg', '2019-02-16 17:20:30'], - ['2016_01_30_11_49_15.mp4', '2016-01-30 11:49:15'], - ['201801261147521000.jpg', '2018-01-26 11:47:52'], - ['IMG_1_BURST20160623205107_COVER.jpg', '2016-06-23 20:51:07'], - ['IMG_1_BURST20160520195318.jpg', '2016-05-20 19:53:18'], - ['1990_06_16_07_30_00.jpg', '1990-06-16 07:30:00'], - ['1869_12_30_16_59_57.jpg', '1869-12-30 16:59:57'], + final List> files = >[ + [ + 'Screenshot_20190919-053857_Camera-edited.jpg', + '2019-09-19 05:38:57', + ], + ['MVIMG_20190215_193501.MP4', '2019-02-15 19:35:01'], + [ + 'Screenshot_2019-04-16-11-19-37-232_com.jpg', + '2019-04-16 11:19:37', + ], + ['signal-2020-10-26-163832.jpg', '2020-10-26 16:38:32'], + ['VID_20220107_113306.mp4', '2022-01-07 11:33:06'], + [ + '00004XTR_00004_BURST20190216172030.jpg', + '2019-02-16 17:20:30', + ], + [ + '00055IMG_00055_BURST20190216172030_COVER.jpg', + '2019-02-16 17:20:30', + ], + ['2016_01_30_11_49_15.mp4', '2016-01-30 11:49:15'], + ['201801261147521000.jpg', '2018-01-26 11:47:52'], + ['IMG_1_BURST20160623205107_COVER.jpg', '2016-06-23 20:51:07'], + ['IMG_1_BURST20160520195318.jpg', '2016-05-20 19:53:18'], + ['1990_06_16_07_30_00.jpg', '1990-06-16 07:30:00'], + ['1869_12_30_16_59_57.jpg', '1869-12-30 16:59:57'], ]; - for (final f in files) { - expect((await guessExtractor(File(f.first))), DateTime.parse(f.last)); + for (final List f in files) { + expect(await guessExtractor(File(f.first)), DateTime.parse(f.last)); } }); }); test('Duplicate removal', () { expect(removeDuplicates(media), 1); expect(media.length, 8); - expect(media.firstWhereOrNull((e) => e.firstFile == imgFile4), null); + expect( + media.firstWhereOrNull((final Media e) => e.firstFile == imgFile4), + null, + ); }); test('Extras removal', () { - final m = [ - Media({null: imgFile1}), - Media({null: imgFile2}), + final List m = [ + Media({null: imgFile1}), + Media({null: imgFile2}), ]; expect(removeExtras(m), 1); expect(m.length, 1); }); test('Album finding', () { // sadly, this will still modify [media] some, but won't delete anything - final copy = media.toList(); + final List copy = media.toList(); removeDuplicates(copy); - final countBefore = copy.length; + final int countBefore = copy.length; findAlbums(copy); expect(countBefore - copy.length, 1); - final albumed = copy.firstWhere((e) => e.files.length > 1); - expect(albumed.files.keys, [null, 'Vacation']); + final Media albumed = copy.firstWhere( + (final Media e) => e.files.length > 1, + ); + expect(albumed.files.keys.last, 'Vacation'); expect(albumed.dateTaken, media[0].dateTaken); expect(albumed.dateTaken == media[1].dateTaken, false); // be sure - expect(copy.where((e) => e.files.length > 1).length, 1); + expect(copy.where((final Media e) => e.files.length > 1).length, 1); // fails because Dart is no Rust :/ // expect(media.where((e) => e.albums != null).length, 1); }); group('Utils', () { test('Stream.whereType()', () { - final stream = Stream.fromIterable([1, 'a', 2, 'b', 3, 'c']); - expect(stream.whereType(), emitsInOrder([1, 2, 3, emitsDone])); + final Stream stream = Stream.fromIterable([ + 1, + 'a', + 2, + 'b', + 3, + 'c', + ]); + expect( + stream.whereType(), + emitsInOrder([1, 2, 3, emitsDone]), + ); }); test('Stream.wherePhotoVideo()', () { // check if stream with random list of files is emitting only photos and videos // use standard formats as jpg and mp4 but also rare ones like 3gp and eps - final stream = Stream.fromIterable([ - File('a.jpg'), - File('lol.json'), - File('b.mp4'), - File('c.3gp'), - File('e.png'), - File('f.txt'), - ]); + final Stream stream = + Stream.fromIterable([ + File('a.jpg'), + File('lol.json'), + File('b.mp4'), + File('c.3gp'), + File('e.png'), + File('f.txt'), + ]); expect( // looked like File()'s couldn't compare correctly :/ - stream.wherePhotoVideo().map((event) => event.path), - emitsInOrder(['a.jpg', 'b.mp4', 'c.3gp', 'e.png', emitsDone]), + stream.wherePhotoVideo().map((final File event) => event.path), + emitsInOrder(['a.jpg', 'b.mp4', 'c.3gp', 'e.png', emitsDone]), ); }); test('findNotExistingName()', () { - expect(findNotExistingName(imgFileGreen).path, 'green(1).jpg'); - expect(findNotExistingName(File('not-here.jpg')).path, 'not-here.jpg'); + expect(findNotExistingName(imgFileGreen).path, '${basepath}green(1).jpg'); + expect( + findNotExistingName(File('${basepath}not-here.jpg')).path, + '${basepath}not-here.jpg', + ); }); test('getDiskFree()', () async { expect(await getDiskFree('.'), isNotNull); }); + test('Create win shortcut', () async { + if (Platform.isWindows) { + const shortcutPath = r'C:\Temp\MyShortcut.lnk'; + const targetPath = r'C:\Windows\System32\notepad.exe'; + + // Ensure target exists + if (!File(targetPath).existsSync()) { + print('Target file does not exist: $targetPath'); + exit(1); + } + + // Create folder if needed + final shortcutDir = p.dirname(shortcutPath); + if (!Directory(shortcutDir).existsSync()) { + Directory(shortcutDir).createSync(recursive: true); + } + + try { + await createShortcutWin(shortcutPath, targetPath); + } catch (e, stack) { + print('❌ Failed to create shortcut:\n$e\n$stack'); + } + // Verify that shortcut file now exists + expect(File(shortcutPath).existsSync(), true); + File(shortcutPath).deleteSync(); + } + }); }); group('folder_classify', () { - final dirs = [ - Directory('./Photos from 2025'), - Directory('./Photos from 1969'), - Directory('./Photos from vacation'), - Directory('/tmp/very-random-omg'), - ]; + List tmpdirs; + if (Platform.isWindows) { + tmpdirs = [ + Directory('./Photos from 2025'), + Directory('./Photos from 1969'), + Directory('./Photos from vacation'), + Directory('C:/Windows/Temp/very-random-omg'), + ]; + } else { + tmpdirs = [ + Directory('./Photos from 2025'), + Directory('./Photos from 1969'), + Directory('./Photos from vacation'), + Directory('/tmp/very-random-omg'), + ]; + } + final List dirs = tmpdirs; setUpAll(() async { - for (var d in dirs) { + for (Directory d in dirs) { await d.create(); } }); @@ -241,7 +425,7 @@ AQACEQMRAD8AIcgXf//Z"""; expect(await isAlbumFolder(dirs[3]), false); }); tearDownAll(() async { - for (var d in dirs) { + for (Directory d in dirs) { await d.delete(); } }); @@ -249,7 +433,9 @@ AQACEQMRAD8AIcgXf//Z"""; /// This is complicated, thus those test are not bullet-proof group('Moving logic', () { - final output = Directory(join(Directory.systemTemp.path, 'testy-output')); + final Directory output = Directory( + join(Directory.systemTemp.path, '${basepath}testy-output'), + ); setUp(() async { await output.create(); removeDuplicates(media); @@ -263,14 +449,29 @@ AQACEQMRAD8AIcgXf//Z"""; divideToDates: 0, albumBehavior: 'shortcut', ).toList(); - final outputted = - await output.list(recursive: true, followLinks: false).toSet(); + final Set outputted = await output + .list(recursive: true, followLinks: false) + .toSet(); // 2 folders + media + 1 album-ed shortcut expect(outputted.length, 2 + media.length + 1); - expect(outputted.whereType().length, 1); + if (Platform.isWindows) { + expect( + outputted + .whereType() + .where((final File file) => file.path.endsWith('.lnk')) + .length, + 1, + ); + } else { + expect(outputted.whereType().length, 1); + } + expect( - outputted.whereType().map((e) => basename(e.path)).toSet(), - {'ALL_PHOTOS', 'Vacation'}, + outputted + .whereType() + .map((final Directory e) => basename(e.path)) + .toSet(), + {'ALL_PHOTOS', 'Vacation'}, ); }); test('nothing', () async { @@ -281,15 +482,19 @@ AQACEQMRAD8AIcgXf//Z"""; divideToDates: 0, albumBehavior: 'nothing', ).toList(); - final outputted = - await output.list(recursive: true, followLinks: false).toSet(); + final Set outputted = await output + .list(recursive: true, followLinks: false) + .toSet(); // 1 folder + media expect(outputted.length, 1 + media.length); expect(outputted.whereType().length, 0); expect(outputted.whereType().length, 1); expect( - outputted.whereType().map((e) => basename(e.path)).toSet(), - {'ALL_PHOTOS'}, + outputted + .whereType() + .map((final Directory e) => basename(e.path)) + .toSet(), + {'ALL_PHOTOS'}, ); }); test('duplicate-copy', () async { @@ -300,34 +505,39 @@ AQACEQMRAD8AIcgXf//Z"""; divideToDates: 0, albumBehavior: 'duplicate-copy', ).toList(); - final outputted = - await output.list(recursive: true, followLinks: false).toSet(); + final Set outputted = await output + .list(recursive: true, followLinks: false) + .toSet(); // 2 folders + media + 1 album-ed copy expect(outputted.length, 2 + media.length + 1); expect(outputted.whereType().length, 0); expect(outputted.whereType().length, 2); expect(outputted.whereType().length, media.length + 1); expect( - UnorderedIterableEquality().equals( - outputted.whereType().map((e) => basename(e.path)), - [ - "image-edited.jpg", - "image-edited.jpg", // two times - "Screenshot_2022-10-28-09-31-43-118_com.snapchat.jpg", - "simple_file_20200101-edited(1).jpg", - "Urlaub in Knaufspesch in der Schneifel (38).JPG", - "img_(87).(vacation stuff).lol(87).jpg", - "IMG-20150125-WA0003-modifié.jpg", - "IMG-20150125-WA0003-modifié(1).jpg", + const UnorderedIterableEquality().equals( + outputted.whereType().map((final File e) => basename(e.path)), + [ + 'image-edited.jpg', + 'image-edited.jpg', // two times + 'Screenshot_2022-10-28-09-31-43-118_com.snapchat.jpg', + 'simple_file_20200101-edited(1).jpg', + 'Urlaub in Knaufspesch in der Schneifel (38).JPG', + 'img_(87).(vacation stuff).lol(87).jpg', + 'IMG-20150125-WA0003-modifié.jpg', + 'IMG-20150125-WA0003-modifié(1).jpg', ], ), true, ); expect( - outputted.whereType().map((e) => basename(e.path)).toSet(), - {'ALL_PHOTOS', 'Vacation'}, + outputted + .whereType() + .map((final Directory e) => basename(e.path)) + .toSet(), + {'ALL_PHOTOS', 'Vacation'}, ); }); + test('json', () async { await moveFiles( media, @@ -336,39 +546,363 @@ AQACEQMRAD8AIcgXf//Z"""; divideToDates: 0, albumBehavior: 'json', ).toList(); - final outputted = - await output.list(recursive: true, followLinks: false).toSet(); + final Set outputted = await output + .list(recursive: true, followLinks: false) + .toSet(); // 1 folder + media + 1 json expect(outputted.length, 1 + media.length + 1); expect(outputted.whereType().length, 0); expect(outputted.whereType().length, 1); expect(outputted.whereType().length, media.length + 1); expect( - UnorderedIterableEquality().equals( - outputted.whereType().map((e) => basename(e.path)), - [ - "image-edited.jpg", - "Screenshot_2022-10-28-09-31-43-118_com.snapchat.jpg", - "simple_file_20200101-edited(1).jpg", - "Urlaub in Knaufspesch in der Schneifel (38).JPG", - "albums-info.json", - "img_(87).(vacation stuff).lol(87).jpg", - "IMG-20150125-WA0003-modifié.jpg", - "IMG-20150125-WA0003-modifié(1).jpg", + const UnorderedIterableEquality().equals( + outputted.whereType().map((final File e) => basename(e.path)), + [ + 'image-edited.jpg', + 'Screenshot_2022-10-28-09-31-43-118_com.snapchat.jpg', + 'simple_file_20200101-edited(1).jpg', + 'Urlaub in Knaufspesch in der Schneifel (38).JPG', + 'albums-info.json', + 'img_(87).(vacation stuff).lol(87).jpg', + 'IMG-20150125-WA0003-modifié.jpg', + 'IMG-20150125-WA0003-modifié(1).jpg', ], ), true, ); expect( - outputted.whereType().map((e) => basename(e.path)).toSet(), - {'ALL_PHOTOS'}, + outputted + .whereType() + .map((final Directory e) => basename(e.path)) + .toSet(), + {'ALL_PHOTOS'}, ); }); - tearDown(() async => await output.delete(recursive: true)); + tearDown(() async => output.delete(recursive: true)); + }); + + group('writeGpsToExif', () { + late File testImage; + late DMSCoordinates testCoordinates; + + setUp(() { + // Create a temporary test image file with metadata + testImage = File('${basepath}test_image.jpg'); + testImage.createSync(); + testImage.writeAsBytesSync( + base64.decode(greenImgBase64.replaceAll('\n', '')), + ); + + // Define test GPS coordinates + testCoordinates = DMSCoordinates( + latDegrees: 41, + latMinutes: 19, + latSeconds: 22.1611, + longDegrees: 19, + longMinutes: 48, + longSeconds: 14.9139, + latDirection: DirectionY.north, + longDirection: DirectionX.east, + ); + }); + + tearDown(() { + if (testImage.existsSync()) { + testImage.deleteSync(); + } + }); + + test('writes GPS coordinates to EXIF metadata', () async { + final bool result = await exif_writer.writeGpsToExif( + testCoordinates, + testImage, + ); + expect(result, isTrue); + final tags = await exiftool!.readExif(testImage); + expect(tags['GPSLatitude'], isNotNull); + expect(tags['GPSLongitude'], isNotNull); + expect(tags['GPSLatitudeRef'], 'N'); + expect(tags['GPSLongitudeRef'], 'E'); + }); + + test('returns false for unsupported file formats', () async { + final File unsupportedFile = File('${basepath}test_file.txt'); + unsupportedFile.writeAsStringSync('This is a test file.'); + final bool result = await exif_writer.writeGpsToExif( + testCoordinates, + unsupportedFile, + ); + expect(result, isFalse); + unsupportedFile.deleteSync(); + }); + }); + + group('writeDateTimeToExif', () { + late File testImage; + late File testImage2; + late DateTime testDateTime; + + setUp(() { + // Create a temporary test image file with metadata + testImage = File('${basepath}test_image.jpg'); + testImage.createSync(); + testImage.writeAsBytesSync( + base64.decode(greenImgBase64.replaceAll('\n', '')), + ); + testDateTime = DateTime(2023, 12, 25, 15, 30, 45); + + // Create a temporary test image file without metadata + testImage2 = File('${basepath}test_image2.jpg'); + testImage2.createSync(); + testImage2.writeAsBytesSync( + base64.decode(greenImgNoMetaDataBase64.replaceAll('\n', '')), + ); + }); + + tearDown(() { + if (testImage.existsSync()) { + testImage.deleteSync(); + } + if (testImage2.existsSync()) { + testImage2.deleteSync(); + } + }); + + test( + 'writes DateTime to EXIF metadata when original has no metadata', + () async { + final bool result = await exif_writer.writeDateTimeToExif( + testDateTime, + testImage2, + ); + expect(result, isTrue); + final tags = await readExifFromBytes(await testImage2.readAsBytes()); + final DateFormat exifFormat = DateFormat('yyyy:MM:dd HH:mm:ss'); + final String expectedDateTime = exifFormat.format(testDateTime); + + expect(tags['Image DateTime']?.printable, expectedDateTime); + expect(tags['EXIF DateTimeOriginal']?.printable, expectedDateTime); + expect(tags['EXIF DateTimeDigitized']?.printable, expectedDateTime); + }, + ); + + test( + 'does not write DateTime to EXIF metadata if file already has EXIF datetime', + () async { + final bool result = await exif_writer.writeDateTimeToExif( + testDateTime, + testImage, + ); + expect(result, isFalse); + }, + ); + + test('returns false for unsupported file formats', () async { + final File unsupportedFile = File('${basepath}test_file.txt'); + unsupportedFile.writeAsStringSync('This is a test file.'); + final bool result = await exif_writer.writeDateTimeToExif( + testDateTime, + unsupportedFile, + ); + expect(result, isFalse); + unsupportedFile.deleteSync(); + }); + }); + + group('ExiftoolInterface', () { + late File testImage; + late File testImage2; + + setUp(() async { + await initExiftool(); + testImage = File('${basepath}test_image.jpg'); + testImage.createSync(); + testImage.writeAsBytesSync( + base64.decode(greenImgBase64.replaceAll('\n', '')), + ); + testImage2 = File('${basepath}test_exiftool.jpg'); + testImage2.createSync(); + testImage2.writeAsBytesSync( + base64.decode(greenImgNoMetaDataBase64.replaceAll('\n', '')), + ); + }); + tearDown(() { + if (testImage.existsSync()) testImage.deleteSync(); + if (testImage2.existsSync()) testImage2.deleteSync(); + }); + test( + 'readExifBatch returns only requested tags and no SourceFile', + () async { + final tags = await exiftool!.readExifBatch(testImage, [ + 'DateTimeOriginal', + 'DateTimeDigitized', + ]); + expect(tags.containsKey('SourceFile'), isFalse); + expect(tags.containsKey('DateTimeOriginal'), isTrue); + expect(tags.containsKey('DateTimeDigitized'), isFalse); + }, + ); + test('readExifBatch returns empty map for empty tag list', () async { + final tags = await exiftool!.readExifBatch(testImage, []); + expect(tags, isEmpty); + }); + test('writeExif writes a single tag', () async { + final Map map = {}; + map['Artist'] = 'TestArtist'; + final result = await exiftool!.writeExifBatch(testImage, map); + expect(result, isTrue); + final tags = await exiftool!.readExifBatch(testImage, ['Artist']); + expect(tags['Artist'], 'TestArtist'); + }); + test('readExifBatch returns empty map for unsupported file', () async { + final file = File('${basepath}unsupported.txt'); + file.writeAsStringSync('not an image'); + final tags = await exiftool!.readExifBatch(file, ['DateTimeOriginal']); + expect(tags, isEmpty); + file.deleteSync(); + }); + test('writeExif returns false for unsupported file', () async { + final Map map = {}; + map['Artist'] = 'Nobody'; + final file = File('${basepath}unsupported2.txt'); + file.writeAsStringSync('not an image'); + final result = await exiftool!.writeExifBatch(file, map); + expect(result, isFalse); + file.deleteSync(); + }); + }); + + group('Emoji handling', () { + late File emojiFile; + late Directory emojiDir; + late File testImage3; + + setUp(() async { + emojiFile = File('${basepath}test_😊.jpg'); + emojiFile.writeAsBytesSync( + base64.decode(greenImgNoMetaDataBase64.replaceAll('\n', '')), + ); + emojiDir = Directory('${basepath}test_folder_😀'); + await emojiDir.create(); + testImage3 = File('${emojiDir.path}exiftoolEmojiTest.jpg'); + testImage3.writeAsBytesSync( + base64.decode(greenImgNoMetaDataBase64.replaceAll('\n', '')), + ); + }); + + test( + 'encodeAndRenameAlbumIfEmoji renames folder with emoji and returns hex-encoded name', + () { + if (!emojiDir.existsSync()) emojiDir.createSync(); + final String newName = encodeAndRenameAlbumIfEmoji(emojiDir); + expect(newName.contains('_0x1f600_'), isTrue); + final Directory renamedDir = Directory( + emojiDir.parent.path + Platform.pathSeparator + newName, + ); + expect(renamedDir.existsSync(), isTrue); + // Cleanup + renamedDir.deleteSync(); + }, + ); + + test('encodeAndRenameAlbumIfEmoji returns original name if no emoji', () { + final Directory noEmojiDir = Directory('${basepath}test_album_noemoji'); + if (!noEmojiDir.existsSync()) noEmojiDir.createSync(); + final String newName = encodeAndRenameAlbumIfEmoji(noEmojiDir); + expect(newName, 'test_album_noemoji'); + expect(noEmojiDir.existsSync(), isTrue); + // Cleanup + noEmojiDir.deleteSync(); + }); + + group('decodeAndRestoreAlbumEmoji', () { + test('decodes hex-encoded emoji in last segment to emoji', () { + final Directory emojiDir = Directory('${basepath}test_album_❤❤❤'); + if (!emojiDir.existsSync()) emojiDir.createSync(); + final String encodedName = encodeAndRenameAlbumIfEmoji(emojiDir); + final String encodedPath = + emojiDir.parent.path + Platform.pathSeparator + encodedName; + final String decodedPath = decodeAndRestoreAlbumEmoji(encodedPath); + expect(decodedPath.contains('❤❤❤'), isTrue); + // Cleanup + final Directory renamedDir = Directory(encodedPath); + if (renamedDir.existsSync()) renamedDir.deleteSync(); + }); + + test('returns original path if no hex-encoded emoji present', () { + final Directory noEmojiDir = Directory('${basepath}test_album_noemoji'); + if (!noEmojiDir.existsSync()) noEmojiDir.createSync(); + final String path = noEmojiDir.path; + final String decodedPath = decodeAndRestoreAlbumEmoji(path); + expect(decodedPath, path); + // Cleanup + noEmojiDir.deleteSync(); + }); + }); + tearDown(() { + if (testImage3.existsSync()) testImage3.deleteSync(); + if (emojiFile.existsSync()) emojiFile.deleteSync(); + if (emojiDir.existsSync()) emojiDir.deleteSync(recursive: true); + }); + }); + + group('Emoji folder end-to-end', () { + test( + 'process file in emoji folder: hex encode, exif read, shortcut/symlink, decode', + () async { + const String emojiFolderName = 'test_💖'; + final Directory emojiDir = Directory(p.join(basepath, emojiFolderName)); + if (!emojiDir.existsSync()) emojiDir.createSync(recursive: true); + final File img = File(p.join(emojiDir.path, 'img.jpg')); + img.writeAsBytesSync( + base64.decode(greenImgBase64.replaceAll('\n', '')), + ); + + // 1. Encode and rename folder + final String hexName = encodeAndRenameAlbumIfEmoji(emojiDir); + expect(hexName.contains('_0x1f496_'), isTrue); + final Directory hexDir = Directory( + p.join(emojiDir.parent.path, hexName), + ); + expect(hexDir.existsSync(), isTrue); + final File hexImg = File(p.join(hexDir.path, 'img.jpg')); + expect(hexImg.existsSync(), isTrue); + + // 2. Read EXIF from image in hex folder + final DateTime? exifDate = await exifDateTimeExtractor(hexImg); + expect(exifDate, DateTime.parse('2022-12-16 16:06:47')); + + // 3. Create shortcut (Windows) or symlink (other platforms) to image + final String symlinkPath = p.join(basepath, 'symlink-to-emoji-img.lnk'); + if (Platform.isWindows) { + await createShortcutWin(symlinkPath, hexImg.path); + } else { + Link(symlinkPath).createSync(hexImg.path, recursive: true); + } + expect(File(symlinkPath).existsSync(), isTrue); + + // 4. Decode and restore folder name + final String decodedPath = decodeAndRestoreAlbumEmoji(hexDir.path); + if (decodedPath != hexDir.path) { + hexDir.renameSync(decodedPath); + } + final Directory restoredDir = Directory(decodedPath); + expect(restoredDir.existsSync(), isTrue); + expect(p.basename(restoredDir.path), emojiFolderName); + // Symlink/shortcut should still point to the file (unless moved) + // Clean up + if (Platform.isWindows) { + File(symlinkPath).deleteSync(); + } else { + Link(symlinkPath).deleteSync(); + } + restoredDir.deleteSync(recursive: true); + }, + ); }); /// Delete all shitty files as we promised - tearDownAll(() { + tearDownAll(() async { albumDir.deleteSync(recursive: true); imgFileGreen.deleteSync(); imgFile1.deleteSync(); @@ -385,5 +919,6 @@ AQACEQMRAD8AIcgXf//Z"""; jsonFile4.deleteSync(); jsonFile5.deleteSync(); jsonFile6.deleteSync(); + //Directory(basepath).deleteSync(recursive: true); }); }