diff --git a/chia/cmds/plots.py b/chia/cmds/plots.py index 3a75b3f0f3bf..72ba5f916bbe 100644 --- a/chia/cmds/plots.py +++ b/chia/cmds/plots.py @@ -228,3 +228,34 @@ def remove_cmd(ctx: click.Context, final_dir: str) -> None: @click.pass_context def show_cmd(ctx: click.Context) -> None: show_plots(ChiaCliContext.set_default(ctx).root_path) + + +@plots_cmd.command("refresh", help="Refresh the plot list") +@click.option("--hard", help="Clear the plot cache and do a full refresh", is_flag=True, default=False) +@click.pass_context +def refresh_cmd(ctx: click.Context, hard: bool) -> None: + """ + Refreshes the plot list, optionally clearing the cache for a full refresh + """ + root_path = ChiaCliContext.set_default(ctx).root_path + asyncio.run(refresh_plots(root_path, hard)) + + +async def refresh_plots(root_path: Path, hard: bool = False) -> None: + """ + Refreshes the plot list, optionally clearing the cache for a full refresh + """ + from chia.cmds.cmds_util import get_any_service_client + from chia.rpc.harvester_rpc_client import HarvesterRpcClient + + try: + async with get_any_service_client(HarvesterRpcClient, root_path, None) as (harvester_client, _): + if hard: + print("Performing hard refresh (clearing cache)...") + await harvester_client.hard_refresh_plots() + else: + print("Refreshing plots...") + await harvester_client.refresh_plots() + print("Plot refresh initiated. Check logs for progress.") + except Exception as e: + print(f"Failed to refresh plots: {e}") diff --git a/chia/plotting/cache.py b/chia/plotting/cache.py index c70504d4c31f..19b8ab9b1750 100644 --- a/chia/plotting/cache.py +++ b/chia/plotting/cache.py @@ -211,3 +211,9 @@ def changed(self) -> bool: def path(self) -> Path: return self._path + + def clear(self) -> None: + """Clear all entries from the cache and mark it as changed.""" + self._data = {} + self._changed = True + log.info(f"Cleared plot cache at {self._path}") diff --git a/chia/plotting/util.py b/chia/plotting/util.py index c2ad2a136e05..5d680ebe844e 100644 --- a/chia/plotting/util.py +++ b/chia/plotting/util.py @@ -5,6 +5,7 @@ from enum import Enum, IntEnum from pathlib import Path from typing import Any, Optional, Union +import os from chia_rs import G1Element, PrivateKey from chia_rs.sized_bytes import bytes32 @@ -111,13 +112,27 @@ def get_plot_filenames(root_path: Path) -> dict[Path, list[Path]]: config = load_config(root_path, "config.yaml") recursive_scan: bool = config["harvester"].get("recursive_plot_scan", DEFAULT_RECURSIVE_PLOT_SCAN) recursive_follow_links: bool = config["harvester"].get("recursive_follow_links", False) - for directory_name in get_plot_directories(root_path, config): + + # Get all plot directories from config + plot_directories = get_plot_directories(root_path, config) + + # Process each directory + for directory_name in plot_directories: try: directory = Path(directory_name).resolve() - except (OSError, RuntimeError): - log.exception(f"Failed to resolve {directory_name}") + except (OSError, RuntimeError) as e: + log.exception(f"Failed to resolve {directory_name}: {e}") + continue + + try: + # Get all plot files in this directory + plot_files = get_filenames(directory, recursive_scan, recursive_follow_links) + all_files[directory] = plot_files + except Exception as e: + # If there's an error processing this directory, log it and continue with other directories + log.error(f"Error processing directory {directory}: {e}") + all_files[directory] = [] continue - all_files[directory] = get_filenames(directory, recursive_scan, recursive_follow_links) return all_files @@ -233,19 +248,68 @@ def get_filenames(directory: Path, recursive: bool, follow_links: bool) -> list[ if follow_links and recursive: import glob - files = glob.glob(str(directory / "**" / "*.plot"), recursive=True) - for file in files: - filepath = Path(file).resolve() - if filepath.is_file() and not filepath.name.startswith("._"): - all_files.append(filepath) + try: + files = glob.glob(str(directory / "**" / "*.plot"), recursive=True) + for file in files: + try: + filepath = Path(file).resolve() + if filepath.is_file() and not filepath.name.startswith("._"): + all_files.append(filepath) + except Exception as e: + # If we can't process a specific file, log and continue + log.warning(f"Error processing file {file}: {e}") + continue + except Exception as e: + log.warning(f"Error during glob in directory {directory}: {e}") + # Fall back to manual recursive scanning if glob fails + try: + # Manually walk the directory tree to handle errors more gracefully + for root, _, files in os.walk(directory, followlinks=follow_links, onerror=lambda err: log.warning(f"Error walking directory \"{directory}\": {err}")): + for file in files: + if file.endswith(".plot") and not file.startswith("._"): + try: + filepath = Path(os.path.join(root, file)).resolve() + if filepath.is_file(): + all_files.append(filepath) + except Exception as e: + log.exception(f"Error processing file {os.path.join(root, file)}") + continue + except Exception as e: + log.warning(f"Error during manual directory walk of {directory}: {e}") else: - glob_function = directory.rglob if recursive else directory.glob - all_files = [ - child for child in glob_function("*.plot") if child.is_file() and not child.name.startswith("._") - ] + try: + if recursive: + # Use os.walk for recursive scanning to handle errors better + for root, _, files in os.walk(directory, followlinks=follow_links, onerror=lambda err: log.warning(f"Error walking directory: {err}")): + for file in files: + if file.endswith(".plot") and not file.startswith("._"): + try: + filepath = Path(os.path.join(root, file)).resolve() + if filepath.is_file(): + all_files.append(filepath) + except Exception as e: + log.exception(f"Error processing file {os.path.join(root, file)}") + continue + else: + # Non-recursive case - just use glob + glob_function = directory.glob + for child in glob_function("*.plot"): + try: + if child.is_file() and not child.name.startswith("._"): + all_files.append(child) + except Exception as e: + # If we can't process a specific file, log and continue + log.exception(f"Error processing file {child}") + continue + except Exception as e: + log.exception(f"Error during directory scanning in {directory}") + # Continue rather than returning empty + log.debug(f"get_filenames: {len(all_files)} files found in {directory}, recursive: {recursive}") except Exception as e: - log.warning(f"Error reading directory {directory} {e}") + log.exception(f"Error reading directory {directory}") + # We still return whatever files we found before the error + return all_files diff --git a/chia/rpc/harvester_rpc_api.py b/chia/rpc/harvester_rpc_api.py index f2dab2db5442..1f399c4eff1f 100644 --- a/chia/rpc/harvester_rpc_api.py +++ b/chia/rpc/harvester_rpc_api.py @@ -23,6 +23,7 @@ def get_routes(self) -> dict[str, Endpoint]: return { "/get_plots": self.get_plots, "/refresh_plots": self.refresh_plots, + "/hard_refresh_plots": self.hard_refresh_plots, "/delete_plot": self.delete_plot, "/add_plot_directory": self.add_plot_directory, "/get_plot_directories": self.get_plot_directories, @@ -66,6 +67,13 @@ async def refresh_plots(self, _: dict[str, Any]) -> EndpointResult: self.service.plot_manager.trigger_refresh() return {} + async def hard_refresh_plots(self, _: dict[str, Any]) -> EndpointResult: + # Clear the plot cache + self.service.plot_manager.cache.clear() + # Trigger a refresh + self.service.plot_manager.trigger_refresh() + return {} + async def delete_plot(self, request: dict[str, Any]) -> EndpointResult: filename = request["filename"] if self.service.delete_plot(filename): diff --git a/chia/rpc/harvester_rpc_client.py b/chia/rpc/harvester_rpc_client.py index 5831d8641032..c38a4b0cd2ca 100644 --- a/chia/rpc/harvester_rpc_client.py +++ b/chia/rpc/harvester_rpc_client.py @@ -20,6 +20,9 @@ async def get_plots(self) -> dict[str, Any]: async def refresh_plots(self) -> None: await self.fetch("refresh_plots", {}) + async def hard_refresh_plots(self) -> None: + await self.fetch("hard_refresh_plots", {}) + async def delete_plot(self, filename: str) -> bool: response = await self.fetch("delete_plot", {"filename": filename}) # TODO: casting due to lack of type checked deserialization