diff --git a/conan/api/subapi/cache.py b/conan/api/subapi/cache.py index 862530b324a..5855f72eeb8 100644 --- a/conan/api/subapi/cache.py +++ b/conan/api/subapi/cache.py @@ -1,7 +1,6 @@ import json import os import shutil -import tarfile import tempfile from conan.api.model import PackagesList @@ -12,12 +11,13 @@ METADATA, DOWNLOAD_EXPORT_FOLDER from conan.internal.cache.home_paths import HomePaths from conan.internal.cache.integrity_check import IntegrityChecker +from conan.internal.loader import load_python_file from conan.internal.rest.download_cache import DownloadCache from conan.errors import ConanException from conan.api.model import PkgReference from conan.api.model import RecipeReference from conan.internal.util.dates import revision_timestamp_now -from conan.internal.util.files import rmdir, mkdir, remove, save +from conan.internal.util.files import rmdir, mkdir, remove, save, tar_extract class CacheAPI: @@ -25,6 +25,7 @@ class CacheAPI: def __init__(self, conan_api, api_helpers): self._conan_api = conan_api self._api_helpers = api_helpers + self._compression_plugin = None def export_path(self, ref: RecipeReference): cache = PkgCache(self._conan_api.cache_folder, self._api_helpers.global_conf) @@ -132,7 +133,6 @@ def save(self, package_list, tgz_path, no_source=False): cache_folder = cache.store # Note, this is not the home, but the actual package cache out = ConanOutput() mkdir(os.path.dirname(tgz_path)) - compresslevel = global_conf.get("core.gzip:compresslevel", check_type=int) tar_files: dict[str, str] = {} # {path_in_tar: abs_path} for ref, ref_bundle in package_list.refs().items(): @@ -173,7 +173,8 @@ def save(self, package_list, tgz_path, no_source=False): pkglist_path = os.path.join(tempfile.gettempdir(), "pkglist.json") save(pkglist_path, serialized) tar_files["pkglist.json"] = pkglist_path - compress_files(tar_files, os.path.basename(tgz_path), os.path.dirname(tgz_path), compresslevel, recursive=True) + compress_files(tar_files, os.path.basename(tgz_path), os.path.dirname(tgz_path), conf=self._conan_api.config, + recursive=True, ref=None, compression_plugin=self.compression_plugin) remove(pkglist_path) def restore(self, path): @@ -183,13 +184,20 @@ def restore(self, path): cache = PkgCache(self._conan_api.cache_folder, self._api_helpers.global_conf) cache_folder = cache.store # Note, this is not the home, but the actual package cache - with open(path, mode='rb') as file_handler: - the_tar = tarfile.open(fileobj=file_handler) - fileobj = the_tar.extractfile("pkglist.json") - pkglist = fileobj.read() - the_tar.extraction_filter = (lambda member, _: member) # fully_trusted (Py 3.14) - the_tar.extractall(path=cache_folder) - the_tar.close() + with open(path, mode="rb") as file_handler: + tar_extract( + fileobj=file_handler, + destination_dir=cache_folder, + compression_plugin=self.compression_plugin, + conf=self._conan_api.config._helpers.global_conf, + ) + + # Retrieve the package list from the already extracted archive + pkglist_path = os.path.join(cache_folder, "pkglist.json") + with open(pkglist_path) as file_handler: + pkglist = file_handler.read() + # Delete the pkglist.json file to keep cache clean + remove(pkglist_path) # After unzipping the files, we need to update the DB that references these files out = ConanOutput() @@ -268,6 +276,19 @@ def path_to_ref(self, path): result = cache.path_to_ref(base) return result + @property + def compression_plugin(self): + if self._compression_plugin is None: + compression_plugin_path = HomePaths(self._conan_api.home_folder).compression_plugin_path + if not os.path.exists(compression_plugin_path): + self._compression_plugin = False # Avoid FS re-check + return None + mod, _ = load_python_file(compression_plugin_path) + if not hasattr(mod, "tar_extract") or not hasattr(mod, "tar_compress"): + raise ConanException("The 'compression.py' plugin does not contain required `tar_extract` or `tar_compress` functions") + self._compression_plugin = mod + return self._compression_plugin or None + def _resolve_latest_ref(cache, ref): if ref.revision is None or ref.revision == "latest": diff --git a/conan/internal/api/uploader.py b/conan/internal/api/uploader.py index 44681eebb45..ee0ea15f769 100644 --- a/conan/internal/api/uploader.py +++ b/conan/internal/api/uploader.py @@ -12,7 +12,7 @@ from conan.errors import ConanException from conan.internal.paths import (CONAN_MANIFEST, CONANFILE, EXPORT_SOURCES_TGZ_NAME, EXPORT_TGZ_NAME, PACKAGE_TGZ_NAME, CONANINFO) -from conan.internal.util.files import (clean_dirty, is_dirty, gather_files, +from conan.internal.util.files import (COMPRESSED_PLUGIN_TAR_NAME, clean_dirty, is_dirty, gather_files, remove, set_dirty_context_manager, mkdir, human_size) UPLOAD_POLICY_FORCE = "force-upload" @@ -155,9 +155,9 @@ def add_tgz(tgz_name, tgz_files): if os.path.isfile(tgz): result[tgz_name] = tgz elif tgz_files: - compresslevel = self._global_conf.get("core.gzip:compresslevel", check_type=int) tgz = compress_files(tgz_files, tgz_name, download_export_folder, - compresslevel=compresslevel, ref=ref) + conf=self._global_conf, ref=ref, + compression_plugin=self._app.conan_api.cache.compression_plugin) result[tgz_name] = tgz add_tgz(EXPORT_TGZ_NAME, files) @@ -202,9 +202,9 @@ def _compress_package_files(self, layout, pref): if not os.path.isfile(package_tgz): tgz_files = {f: path for f, path in files.items()} - compresslevel = self._global_conf.get("core.gzip:compresslevel", check_type=int) tgz_path = compress_files(tgz_files, PACKAGE_TGZ_NAME, download_pkg_folder, - compresslevel=compresslevel, ref=pref) + conf=self._global_conf, ref=pref, + compression_plugin=self._app.conan_api.cache.compression_plugin) assert tgz_path == package_tgz assert os.path.exists(package_tgz) @@ -271,13 +271,16 @@ def gzopen_without_timestamps(name, fileobj, compresslevel=None): return t -def compress_files(files, name, dest_dir, compresslevel=None, ref=None, recursive=False): +def compress_files(files, name, dest_dir, conf=None, ref=None, recursive=False, compression_plugin=None): + if compression_plugin: + return _compress_files_with_plugin(files, name, dest_dir, conf, ref, recursive, compression_plugin) + + tgz_path = os.path.join(dest_dir, name) t1 = time.time() # FIXME, better write to disk sequentially and not keep tgz contents in memory - tgz_path = os.path.join(dest_dir, name) - if ref: - ConanOutput(scope=str(ref) if ref else None).info(f"Compressing {name}") + ConanOutput(scope=str(ref or "")).info(f"Compressing {name}") with set_dirty_context_manager(tgz_path), open(tgz_path, "wb") as tgz_handle: + compresslevel = conf.get("core.gzip:compresslevel", check_type=int) if conf else None tgz = gzopen_without_timestamps(name, fileobj=tgz_handle, compresslevel=compresslevel) for filename, abs_path in sorted(files.items()): # recursive is False by default in case it is a symlink to a folder @@ -288,6 +291,32 @@ def compress_files(files, name, dest_dir, compresslevel=None, ref=None, recursiv ConanOutput().debug(f"{name} compressed in {duration} time") return tgz_path +def _compress_files_with_plugin(files, name, dest_dir, conf, ref, recursive, compression_plugin): + t1 = time.time() + abs_path_without_extension = os.path.join(dest_dir, COMPRESSED_PLUGIN_TAR_NAME) + ConanOutput(scope=str(ref or "")).info(f"Compressing {name} using compression plugin") + compressed_extension = compression_plugin.tar_compress( + archive_path=abs_path_without_extension, + files=files, + recursive=recursive, + conf=conf, + ref=ref, + ) + ConanOutput().debug(f"Compressed in {time.time() - t1} time") + ConanOutput().success(f"{time.time() - t1}") + if not compressed_extension or not compressed_extension.startswith("."): + raise ConanException("The 'compression.py' did not return the compressed extension.") + + compressed_path = abs_path_without_extension + compressed_extension + t1 = time.time() + tgz_path = os.path.join(dest_dir, name) + with set_dirty_context_manager(tgz_path), open(tgz_path, "wb") as tgz_handle: + tgz = gzopen_without_timestamps(name, fileobj=tgz_handle, compresslevel=0) + tgz.add(compressed_path, arcname=os.path.basename(compressed_path), recursive=recursive) + tgz.close() + ConanOutput().debug(f"{name} wrapped in {time.time() - t1} time") + remove(compressed_path) + return tgz_path def _total_size(cache_files): total_size = 0 diff --git a/conan/internal/cache/home_paths.py b/conan/internal/cache/home_paths.py index 6256f0503ef..d7b81820d61 100644 --- a/conan/internal/cache/home_paths.py +++ b/conan/internal/cache/home_paths.py @@ -88,3 +88,7 @@ def settings_path_user(self): @property def config_version_path(self): return os.path.join(self._home, "config_version.json") + + @property + def compression_plugin_path(self): + return os.path.join(self._home, _EXTENSIONS_FOLDER, _PLUGINS, "compression.py") diff --git a/conan/internal/conan_app.py b/conan/internal/conan_app.py index ecac376905c..48b514babb0 100644 --- a/conan/internal/conan_app.py +++ b/conan/internal/conan_app.py @@ -54,7 +54,7 @@ def __init__(self, conan_api): auth_manager = ConanApiAuthManager(conan_api.remotes.requester, cache_folder, localdb, global_conf) # Handle remote connections - self.remote_manager = RemoteManager(self.cache, auth_manager, cache_folder) + self.remote_manager = RemoteManager(self.cache, auth_manager, cache_folder, self.conan_api) global_editables = conan_api.local.editable_packages ws_editables = conan_api.workspace.packages() self.editable_packages = global_editables.update_copy(ws_editables) @@ -84,10 +84,10 @@ class LocalRecipesIndexApp: - loader (for the export phase of local-recipes-index) The others are internally use by other collaborators """ - def __init__(self, cache_folder): + def __init__(self, cache_folder, conan_api): self.global_conf = ConfDefinition() self.cache = PkgCache(cache_folder, self.global_conf) - self.remote_manager = RemoteManager(self.cache, auth_manager=None, home_folder=cache_folder) + self.remote_manager = RemoteManager(self.cache, auth_manager=None, home_folder=cache_folder, conan_api=conan_api) editable_packages = EditablePackages() self.proxy = ConanProxy(self, editable_packages) self.range_resolver = RangeResolver(self, self.global_conf, editable_packages) diff --git a/conan/internal/rest/remote_manager.py b/conan/internal/rest/remote_manager.py index ef664d4da3e..418f6a2ddcc 100644 --- a/conan/internal/rest/remote_manager.py +++ b/conan/internal/rest/remote_manager.py @@ -26,15 +26,16 @@ class RemoteManager: _ErrorMsg = namedtuple("ErrorMsg", ["message"]) - def __init__(self, cache, auth_manager, home_folder): + def __init__(self, cache, auth_manager, home_folder, conan_api): self._cache = cache self._auth_manager = auth_manager self._signer = PkgSignaturesPlugin(cache, home_folder) self._home_folder = home_folder + self._conan_api = conan_api def _local_folder_remote(self, remote): if remote.remote_type == LOCAL_RECIPES_INDEX: - return RestApiClientLocalRecipesIndex(remote, self._home_folder) + return RestApiClientLocalRecipesIndex(remote, self._home_folder, self._conan_api) def check_credentials(self, remote, force_auth=False): self._call_remote(remote, "check_credentials", force_auth) @@ -87,7 +88,7 @@ def get_recipe(self, ref, remote, metadata=None): tgz_file = zipped_files.pop(EXPORT_TGZ_NAME, None) if tgz_file: - uncompress_file(tgz_file, export_folder, scope=str(ref)) + uncompress_file(tgz_file, export_folder, scope=str(ref), conan_api=self._conan_api) mkdir(export_folder) for file_name, file_path in zipped_files.items(): # copy CONANFILE shutil.move(file_path, os.path.join(export_folder, file_name)) @@ -129,7 +130,7 @@ def get_recipe_sources(self, ref, layout, remote): self._signer.verify(ref, download_folder, files=zipped_files) tgz_file = zipped_files[EXPORT_SOURCES_TGZ_NAME] - uncompress_file(tgz_file, export_sources_folder, scope=str(ref)) + uncompress_file(tgz_file, export_sources_folder, scope=str(ref), conan_api=self._conan_api) def get_package(self, pref, remote, metadata=None): output = ConanOutput(scope=str(pref.ref)) @@ -177,7 +178,7 @@ def _get_package(self, layout, pref, remote, scoped_output, metadata): tgz_file = zipped_files.pop(PACKAGE_TGZ_NAME, None) package_folder = layout.package() - uncompress_file(tgz_file, package_folder, scope=str(pref.ref)) + uncompress_file(tgz_file, package_folder, scope=str(pref.ref), conan_api=self._conan_api) mkdir(package_folder) # Just in case it doesn't exist, because uncompress did nothing for file_name, file_path in zipped_files.items(): # copy CONANINFO and CONANMANIFEST shutil.move(file_path, os.path.join(package_folder, file_name)) @@ -303,15 +304,18 @@ def _call_remote(self, remote, method, *args, **kwargs): raise ConanException(exc, remote=remote) -def uncompress_file(src_path, dest_folder, scope=None): +def uncompress_file(src_path, dest_folder, scope="", conan_api=None): try: filesize = os.path.getsize(src_path) big_file = filesize > 10000000 # 10 MB if big_file: hs = human_size(filesize) ConanOutput(scope=scope).info(f"Decompressing {hs} {os.path.basename(src_path)}") + + compression_plugin=conan_api.cache.compression_plugin if conan_api and conan_api.cache.compression_plugin else None + conf=conan_api.config._helpers.global_conf if conan_api else None with open(src_path, mode='rb') as file_handler: - tar_extract(file_handler, dest_folder) + tar_extract(fileobj=file_handler, destination_dir=dest_folder, compression_plugin=compression_plugin, conf=conf) except Exception as e: error_msg = "Error while extracting downloaded file '%s' to %s\n%s\n"\ % (src_path, dest_folder, str(e)) diff --git a/conan/internal/rest/rest_client_local_recipe_index.py b/conan/internal/rest/rest_client_local_recipe_index.py index bde8e2ac434..5b37dceb81c 100644 --- a/conan/internal/rest/rest_client_local_recipe_index.py +++ b/conan/internal/rest/rest_client_local_recipe_index.py @@ -58,14 +58,14 @@ class RestApiClientLocalRecipesIndex: a local folder assuming the conan-center-index repo layout """ - def __init__(self, remote, home_folder): + def __init__(self, remote, home_folder, conan_api): self._remote = remote local_recipes_index_path = HomePaths(home_folder).local_recipes_index_path local_recipes_index_path = os.path.join(local_recipes_index_path, remote.name, ".conan") repo_folder = self._remote.url from conan.internal.conan_app import LocalRecipesIndexApp - self._app = LocalRecipesIndexApp(local_recipes_index_path) + self._app = LocalRecipesIndexApp(local_recipes_index_path, conan_api) self._hook_manager = HookManager(HomePaths(local_recipes_index_path).hooks_path) self._layout = _LocalRecipesIndexLayout(repo_folder) diff --git a/conan/internal/util/files.py b/conan/internal/util/files.py index 98746479ae0..40a989cc5e7 100644 --- a/conan/internal/util/files.py +++ b/conan/internal/util/files.py @@ -1,4 +1,6 @@ import errno +from pathlib import Path +import tempfile import gzip import hashlib import os @@ -11,10 +13,13 @@ from contextlib import contextmanager +from conan.api.output import ConanOutput from conan.errors import ConanException _DIRTY_FOLDER = ".dirty" +# Name (without extension) of the tar file to be created by the compression plugin +COMPRESSED_PLUGIN_TAR_NAME = "__conan_plugin_compressed_contents__" def set_dirty(folder): dirty_file = os.path.normpath(folder) + _DIRTY_FOLDER @@ -256,7 +261,11 @@ def mkdir(path): os.makedirs(path) -def tar_extract(fileobj, destination_dir): +def tar_extract(fileobj, destination_dir, compression_plugin=None, conf=None): + if compression_plugin: + _tar_extract_with_plugin(fileobj, destination_dir, compression_plugin, conf) + return + the_tar = tarfile.open(fileobj=fileobj) # NOTE: The errorlevel=2 has been removed because it was failing in Win10, it didn't allow to # "could not change modification time", with time=0 @@ -264,8 +273,39 @@ def tar_extract(fileobj, destination_dir): the_tar.extraction_filter = (lambda member, path: member) # fully_trusted, avoid Py3.14 break the_tar.extractall(path=destination_dir) the_tar.close() + if list(Path(destination_dir).glob(f"{COMPRESSED_PLUGIN_TAR_NAME}.*")): + raise ConanException(f"Error while extracting {os.path.basename(fileobj.name)}.\n" + "This file has been compressed using a `compression` plugin.\n" + "If your organization uses this plugin, ensure it is correctly installed on your environment.") +def _tar_extract_with_plugin(fileobj, destination_dir, compression_plugin, conf): + """First remove tar.gz wrapper and then call the plugin to extract""" + t1 = time.time() + the_tar = tarfile.open(fileobj=fileobj) + the_tar.extraction_filter = (lambda member, path: member) # fully_trusted, avoid Py3.14 break + the_tar.extractall(path=destination_dir) + extracted_files = the_tar.getnames() + the_tar.close() + # Check if the tar was compressed with the compression plugin by checking the existence of + # our constant COMPRESSED_PLUGIN_TAR_NAME (without extension as extension is added by the plugin) + ConanOutput().success(f"{time.time() - t1}") + for path in extracted_files: + if os.path.basename(path).startswith(COMPRESSED_PLUGIN_TAR_NAME): + # Extract the actual contents from the plugin tar (ignore other files present). + ConanOutput().debug(f"Unwrapped in {time.time() - t1}") + t1 = time.time() + compression_plugin.tar_extract( + archive_path=os.path.join(destination_dir, path), + dest_dir=destination_dir, + conf=conf, + ) + # Remove extracted files from tar + for f in extracted_files: + remove(os.path.join(destination_dir, f)) + break + ConanOutput().debug(f"Extracted in {time.time() - t1}") + def merge_directories(src, dst): from conan.tools.files import copy copy(None, pattern="*", src=src, dst=dst) diff --git a/test/integration/command/cache/test_cache_save_restore.py b/test/integration/command/cache/test_cache_save_restore.py index a6bc91b495d..113a0dd0ac5 100644 --- a/test/integration/command/cache/test_cache_save_restore.py +++ b/test/integration/command/cache/test_cache_save_restore.py @@ -158,9 +158,13 @@ def test_cache_save_excluded_folders(): # exclude source c.run("cache save * --no-source") + # Check default compression function is being used and not compression.py plugin one + assert "Compressing conan_cache_save.tgz\n" in c.out c3 = TestClient() shutil.copy2(cache_path, c3.current_folder) c3.run("cache restore conan_cache_save.tgz") + # Default decompress does not have any output + assert "Decompressing conan_cache_save.tgz" not in c3.out ref_layout = c3.get_latest_ref_layout(ref) assert not os.path.exists(os.path.join(ref_layout.source(), "mysrc.c")) diff --git a/test/integration/extensions/test_compression_plugin.py b/test/integration/extensions/test_compression_plugin.py new file mode 100644 index 00000000000..0090d03fd34 --- /dev/null +++ b/test/integration/extensions/test_compression_plugin.py @@ -0,0 +1,269 @@ +import os +import textwrap +import tarfile + +from conan.internal.util.files import COMPRESSED_PLUGIN_TAR_NAME, mkdir +from conan.test.assets.genconanfile import GenConanfile +from conan.test.utils.tools import TestClient + + +def test_compression_plugin_not_valid(): + """Test an error is raised if the compression plugin is not valid""" + + c = TestClient() + compression_plugin = textwrap.dedent( + """ + def tar_compress(archive_path, files, recursive, conf=None, *args, **kwargs): + pass + """ + ) + + c.save( + { + os.path.join( + c.cache_folder, "extensions", "plugins", "compression.py" + ): compression_plugin, + "conanfile.py": GenConanfile("pkg", "1.0"), + } + ) + c.run("create .") + c.run("cache save 'pkg/*:*'", assert_error=True) + assert ( + "ERROR: The 'compression.py' plugin does not contain required `tar_extract` or `tar_compress` functions" + in c.out + ) + +def test_compression_plugin_returning_invalid_path(): + """Test an error is raised if the compression plugin does not return expected path""" + + c = TestClient() + compression_plugin = textwrap.dedent( + """ + def tar_compress(archive_path, files, recursive, conf=None, *args, **kwargs): + return + def tar_extract(archive_path, dest_dir, conf=None, *args, **kwargs): + pass + """ + ) + + c.save( + { + os.path.join( + c.cache_folder, "extensions", "plugins", "compression.py" + ): compression_plugin, + "conanfile.py": GenConanfile("pkg", "1.0"), + } + ) + c.run("create .") + c.run("cache save 'pkg/*:*'", assert_error=True) + assert ( + "ERROR: The 'compression.py' did not return the compressed extension." + in c.out + ) + + +def test_compression_plugin_correctly_load(): + """Test that the compression plugin is correctly loaded and used on: + - cache save/restore + - remote upload/download + """ + c = TestClient(default_server_user=True) + + compression_plugin = textwrap.dedent( + """ + import os + import tarfile + from conan.api.output import ConanOutput + + # xz compression + def tar_compress(archive_path, files, recursive, conf=None, ref=None, *args, **kwargs): + extension = ".xz" + archive_path += extension + name = os.path.basename(archive_path) + ConanOutput(scope=ref).info(f"Compressing {name} using compression plugin (xz)") + compresslevel = conf.get("core.gzip:compresslevel", check_type=int) if conf else None + kwargs = {"preset": compresslevel} if compresslevel else {} + with tarfile.open(archive_path, f"w:xz", **kwargs) as tgz: + for filename, abs_path in sorted(files.items()): + tgz.add(abs_path, filename, recursive=True) + return extension + + def tar_extract(archive_path, dest_dir, conf=None, *args, **kwargs): + ConanOutput().info(f"Decompressing {os.path.basename(archive_path)} using compression plugin (xz)") + with open(archive_path, mode='rb') as file_handler: + the_tar = tarfile.open(fileobj=file_handler) + the_tar.extraction_filter = (lambda member, path: member) + the_tar.extractall(path=dest_dir) + the_tar.close() + """ + ) + + c.save( + { + os.path.join( + c.cache_folder, "extensions", "plugins", "compression.py" + ): compression_plugin, + "conanfile.py": GenConanfile("pkg", "1.0"), + } + ) + c.run("create .") + c.run("cache save 'pkg/*:*'") + assert f"Compressing {COMPRESSED_PLUGIN_TAR_NAME}.xz using compression plugin (xz)" in c.out + c.run("remove pkg/* -c") + c.run("cache restore conan_cache_save.tgz") + assert f"Decompressing {COMPRESSED_PLUGIN_TAR_NAME}.xz using compression plugin (xz)" in c.out + c.run("list pkg/1.0") + assert "Found 1 pkg/version recipes matching pkg/1.0 in local cache" in c.out + + # Remove pre existing tgz to force a recompression + c.run("remove pkg/* -c") + c.run("create .") + # Check the plugin is also used on remote interactions + c.run("upload *:* -r=default -c") + assert f"Compressing {COMPRESSED_PLUGIN_TAR_NAME}.xz using compression plugin (xz)" in c.out + assert "pkg/1.0: Uploading recipe" in c.out + c.run("remove pkg/* -c") + c.run("download 'pkg/*' -r=default") + assert f"Decompressing {COMPRESSED_PLUGIN_TAR_NAME}.xz using compression plugin (xz)" in c.out + + +def test_compression_plugin_tar_not_compatible_with_builtin(): + """ + Test that built in tar_extract function fails when uncompressing a non compatible file (a file + which has been compressed using the compression plugin with a different algorithm than the built-in one). + """ + c = TestClient(default_server_user=True) + + compression_plugin = textwrap.dedent( + """ + import os + import zipfile + from conan.api.output import ConanOutput + + # zip compression + def tar_compress(archive_path, files, recursive, conf=None, *args, **kwargs): + # compress files using zipfile library taking into account recursive + extension = ".zip" + archive_path += extension + name = os.path.basename(archive_path) + compresslevel = conf.get("core.gzip:compresslevel", check_type=int) if conf else None + ConanOutput().info(f"Compressing {name} using compression plugin (zip)") + with zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED, compresslevel=compresslevel) as zipf: + for filename, abs_path in sorted(files.items()): + if recursive: + arcname = os.path.relpath(abs_path, start=os.path.dirname(abs_path)) + zipf.write(abs_path, arcname) + else: + zipf.write(abs_path, filename) + return extension + + def tar_extract(archive_path, dest_dir, conf=None, *args, **kwargs): + # extract tar using zipfile library + ConanOutput().info(f"Decompressing {os.path.basename(archive_path)} using compression plugin (zip)") + with zipfile.ZipFile(archive_path, 'r') as zip_ref: + zip_ref.extractall(dest_dir) + """ + ) + + c.save( + { + os.path.join( + c.cache_folder, "extensions", "plugins", "compression.py" + ): compression_plugin, + "conanfile.py": GenConanfile("pkg", "1.0"), + } + ) + c.run("create .") + c.run("cache save 'pkg/*:*'") + c.run("remove pkg/* -c") + os.unlink(os.path.join(c.cache_folder, "extensions", "plugins", "compression.py")) + c.run("cache restore conan_cache_save.tgz", assert_error=True) + assert ( + "Error while extracting conan_cache_save.tgz.\n" + "This file has been compressed using a `compression` plugin.\n" + "If your organization uses this plugin, ensure it is correctly installed on your environment." + ) in c.out + + +# https://github.com/conan-io/conan/issues/18259 +def test_compress_in_subdirectory(): + c = TestClient(default_server_user=True) + compression_plugin = textwrap.dedent( + """ + import os + import tarfile + from conan.api.output import ConanOutput + def tar_compress(archive_path, files, recursive, *args, **kwargs): + # compress files using tarfile putting all content in a `conan/` subfolder + extension = ".tgz" + archive_path += extension + name = os.path.basename(archive_path) + ConanOutput().info(f"Compressing {os.path.basename(name)} in conan subfolder") + with open(archive_path, "wb") as tgz_handle: + tgz = tarfile.open(name, "w", fileobj=tgz_handle) + for filename, abs_path in sorted(files.items()): + tgz.add(abs_path, os.path.join("conan", filename), recursive=recursive) + tgz.close() + return extension + + def tar_extract(archive_path, dest_dir, *args, **kwargs): + ConanOutput().info(f"Decompressing {archive_path} in conan subfolder") + with open(archive_path, mode="rb") as file_handler: + the_tar = tarfile.open(fileobj=file_handler) + the_tar.extraction_filter = (lambda member, path: member) + for member in the_tar.getmembers(): + if member.name.startswith("conan/"): + member.name = member.name[len("conan/"):] # Strip 'conan/' prefix + the_tar.extract(member, path=dest_dir) + the_tar.close() + """ + ) + c.save( + { + os.path.join( + c.cache_folder, "extensions", "plugins", "compression.py" + ): compression_plugin, + "conanfile.py": GenConanfile("pkg", "1.0"), + } + ) + c.run("create .") + c.run("cache save 'pkg/*:*'") + tgz = os.path.join(c.current_folder, "conan_cache_save.tgz") + assert os.path.exists(tgz) + + mkdir("extract_folder") + destination_dir = os.path.join(c.current_folder, "extract_folder") + extracted_files = _tar_extract(tgz, destination_dir) + assert extracted_files == [COMPRESSED_PLUGIN_TAR_NAME + ".tgz"] + # Create example files + c.save({os.path.join(destination_dir, "README.md"): "This is a readme file.", + os.path.join(destination_dir, "Cache-contents-graph-report.html"): "Cache contents graph report", + os.path.join(destination_dir, "Cache-contents-graph-report.exe"): "executable file...", + os.path.join(destination_dir, "conan_cache_save.tgz"): "this should also be ignored", + }) + # Recompress the file with metadata + _tar_compress(os.path.join(c.current_folder, "conan_cache_save_rearchived.tgz"), destination_dir) + + c.run("remove pkg/* -c") + c.run("cache restore conan_cache_save_rearchived.tgz") + + # Check any of the metadata are present in the cache + assert any(item not in os.listdir(os.path.join(c.cache_folder, "p")) for item in ("README.md", + "Cache-contents-graph-report.html", + "Cache-contents-graph-report.exe")) + + +def _tar_extract(tgz_path, destination_dir): + with open(tgz_path, "rb") as fileobj: + the_tar = tarfile.open(fileobj=fileobj) + the_tar.extraction_filter = (lambda member, path: member) + the_tar.extractall(path=destination_dir) + return the_tar.getnames() + + +def _tar_compress(archive_path, folder): + with open(archive_path, "wb") as tgz_handle: + tgz = tarfile.open(os.path.basename(archive_path), "w", fileobj=tgz_handle) + for filename in os.listdir(folder): + tgz.add(os.path.join(folder, filename), filename, recursive=True) + tgz.close()