diff --git a/draftsman/data/entities.py b/draftsman/data/entities.py index d86d6e43..db6cc102 100644 --- a/draftsman/data/entities.py +++ b/draftsman/data/entities.py @@ -11,7 +11,7 @@ try: - with pkg_resources.open_binary(data, "entities.pkl") as inp: + with pkg_resources.files(data).joinpath("entities.pkl").open("rb") as inp: _data: dict = pickle.load(inp) # Aggregation of all the the entity dicts from data.raw collected in one diff --git a/draftsman/data/fluids.py b/draftsman/data/fluids.py index 515c89b5..fda554f4 100644 --- a/draftsman/data/fluids.py +++ b/draftsman/data/fluids.py @@ -7,7 +7,7 @@ from draftsman import data from draftsman.error import InvalidFluidError -with pkg_resources.open_binary(data, "fluids.pkl") as inp: +with pkg_resources.files(data).joinpath("fluids.pkl").open("rb") as inp: _data = pickle.load(inp) raw: dict[str, dict] = _data[0] diff --git a/draftsman/data/instruments.py b/draftsman/data/instruments.py index a7c73727..6b916bbc 100644 --- a/draftsman/data/instruments.py +++ b/draftsman/data/instruments.py @@ -10,7 +10,7 @@ # from draftsman.data.entities import programmable_speakers -with pkg_resources.open_binary(data, "instruments.pkl") as inp: +with pkg_resources.files(data).joinpath("instruments.pkl").open("rb") as inp: _data: list = pickle.load(inp) raw: dict[str, list[dict]] = _data[0] index_of: dict[str, dict[str, dict[str, int]]] = _data[1] diff --git a/draftsman/data/items.py b/draftsman/data/items.py index 1a3f7826..24ce4449 100644 --- a/draftsman/data/items.py +++ b/draftsman/data/items.py @@ -7,7 +7,7 @@ from draftsman import data -with pkg_resources.open_binary(data, "items.pkl") as inp: +with pkg_resources.files(data).joinpath("items.pkl").open("rb") as inp: _data = pickle.load(inp) raw: dict[str, dict] = _data[0] subgroups: dict[str, dict] = _data[1] diff --git a/draftsman/data/modules.py b/draftsman/data/modules.py index fdf1bf35..b27f088c 100644 --- a/draftsman/data/modules.py +++ b/draftsman/data/modules.py @@ -7,7 +7,7 @@ from draftsman import data -with pkg_resources.open_binary(data, "modules.pkl") as inp: +with pkg_resources.files(data).joinpath("modules.pkl").open("rb") as inp: _data = pickle.load(inp) raw: dict[str, dict] = _data[0] categories: dict[str, list[str]] = _data[1] diff --git a/draftsman/data/planets.py b/draftsman/data/planets.py index 8f80d69b..b863b37f 100644 --- a/draftsman/data/planets.py +++ b/draftsman/data/planets.py @@ -11,7 +11,7 @@ try: - with pkg_resources.open_binary(data, "planets.pkl") as inp: + with pkg_resources.files(data).joinpath("planets.pkl").open("rb") as inp: _data: dict = pickle.load(inp) raw: dict[str, dict] = _data[0] diff --git a/draftsman/data/recipes.py b/draftsman/data/recipes.py index 72b9c3a5..bb52f44c 100644 --- a/draftsman/data/recipes.py +++ b/draftsman/data/recipes.py @@ -12,7 +12,7 @@ from draftsman.utils import passes_surface_conditions -with pkg_resources.open_binary(data, "recipes.pkl") as inp: +with pkg_resources.files(data).joinpath("recipes.pkl").open("rb") as inp: _data = pickle.load(inp) raw: dict[str, dict] = _data[0] categories: dict[str, list[str]] = _data[1] diff --git a/draftsman/data/signals.py b/draftsman/data/signals.py index 7b7c7f85..8c932881 100644 --- a/draftsman/data/signals.py +++ b/draftsman/data/signals.py @@ -9,7 +9,7 @@ import importlib.resources as pkg_resources -with pkg_resources.open_binary(data, "signals.pkl") as inp: +with pkg_resources.files(data).joinpath("signals.pkl").open("rb") as inp: _data = pickle.load(inp) raw: dict[str, dict] = _data["raw"] diff --git a/draftsman/data/tiles.py b/draftsman/data/tiles.py index 171517c9..25c35e3a 100644 --- a/draftsman/data/tiles.py +++ b/draftsman/data/tiles.py @@ -7,7 +7,7 @@ from draftsman import data -with pkg_resources.open_binary(data, "tiles.pkl") as inp: +with pkg_resources.files(data).joinpath("tiles.pkl").open("rb") as inp: raw: dict[str, dict] = pickle.load(inp) diff --git a/draftsman/prototypes/decider_combinator.py b/draftsman/prototypes/decider_combinator.py index 52aaf817..f7e030fc 100644 --- a/draftsman/prototypes/decider_combinator.py +++ b/draftsman/prototypes/decider_combinator.py @@ -87,6 +87,7 @@ def validate_from_dict(value: dict): ) + class DeciderCondition(Temp): class Format(Condition): compare_type: Optional[Literal["or", "and"]] = Field( @@ -116,7 +117,6 @@ def __init__( # self.second_signal = second_signal # self.second_signal_networks = second_signal_networks # self.compare_type = compare_type - self._root = self.Format.model_validate( { "first_signal": first_signal, @@ -131,6 +131,47 @@ def __init__( context={"construction": True, "mode": ValidationMode.NONE}, ) + @classmethod + def from_dict(cls, d): + # Accepts a dict from deserialization and returns a DeciderCondition + return cls( + first_signal=d.get("first_signal"), + first_signal_networks=d.get("first_signal_networks", {"red", "green"}), + comparator=d.get("comparator", ">"), + constant=d.get("constant"), + second_signal=d.get("second_signal"), + second_signal_networks=d.get("second_signal_networks", {"red", "green"}), + compare_type=d.get("compare_type", "or"), + ) + + @property + def first_signal(self): + return self._root.first_signal + + @property + def first_signal_networks(self): + return self._root.first_signal_networks + + @property + def comparator(self): + return self._root.comparator + + @property + def constant(self): + return self._root.constant + + @property + def second_signal(self): + return self._root.second_signal + + @property + def second_signal_networks(self): + return self._root.second_signal_networks + + @property + def compare_type(self): + return self._root.compare_type + def __or__(self, other): if isinstance(other, DeciderCondition): other._root.compare_type = "or" @@ -258,6 +299,15 @@ def __init__( context={"construction": True, "mode": ValidationMode.NONE}, ) + @classmethod + def from_dict(cls, d): + return cls( + signal=d.get("signal"), + copy_count_from_input=d.get("copy_count_from_input", True), + constant=d.get("constant", 1), + networks=d.get("networks", {"red", "green"}), + ) + @property def signal(self): return self._root.signal @@ -406,6 +456,7 @@ class DeciderConditions(DraftsmanBaseModel): # What wires to pull values from if 'copy_count_from_input' # is true.""", # ) + from pydantic import model_validator conditions: list = Field( [], @@ -422,6 +473,103 @@ class DeciderConditions(DraftsmanBaseModel): """, ) + @model_validator(mode="after") + def convert_conditions_and_outputs(cls, values): + # + # @model_validator(mode="after") + # def ensure_proper_signal_configuration(self, info: ValidationInfo): + # """ + # The first signal and output signals can be pure virtual + # signals, but only in a certain configuration as determined + # by `_signal_blacklist`. If the input signal is not a pure + # virtual signal, then the output signal cannot be + # `"signal-anything"` or `"signal-each"`. + # """ + # if not info.context or self.output_signal is None: + # return self + # if info.context["mode"] <= ValidationMode.MINIMUM: + # return self + # + # warning_list: list = info.context["warning_list"] + # + # if self.first_signal is None: + # first_signal_name = None + # else: + # first_signal_name = self.first_signal.name + # + # current_blacklist = _signal_blacklist.get( + # first_signal_name, {"signal-anything", "signal-each"} + # ) + # if self.output_signal.name in current_blacklist: + # warning_list.append( + # PureVirtualDisallowedWarning( + # "'{}' cannot be an output_signal when '{}' is the first operand; 'output_signal' will be removed when imported".format( + # self.output_signal.name, first_signal_name + # ), + # ) + # ) + # + # return self + # + # @model_validator(mode="after") + # def ensure_second_signal_is_not_pure_virtual( + # self, info: ValidationInfo + # ): + # if not info.context or self.second_signal is None: + # return self + # if info.context["mode"] <= ValidationMode.MINIMUM: + # return self + # + # warning_list: list = info.context["warning_list"] + # + # if self.second_signal.name in signals.pure_virtual: + # warning_list.append( + # PureVirtualDisallowedWarning( + # "'second_signal' cannot be set to pure virtual signal '{}'; will be removed when imported".format( + # self.second_signal.name + # ) + # ) + # ) + # + # return self + + # Use class attributes to avoid circular import + Condition = cls.__module__.split('.')[0] + '.prototypes.decider_combinator.DeciderCondition' + Output = cls.__module__.split('.')[0] + '.prototypes.decider_combinator.DeciderOutput' + # Actually use the directly available classes + condition_type = getattr(__import__(cls.__module__, fromlist=['Condition']), 'DeciderCondition') + output_type = getattr(__import__(cls.__module__, fromlist=['Output']), 'DeciderOutput') + def is_valid_condition_dict(d): + return isinstance(d, dict) and "first_signal" in d + + def is_valid_output_dict(d): + return isinstance(d, dict) and "signal" in d + + # Convert dicts to DeciderCondition/DeciderOutput objects + def ensure_condition(obj): + if isinstance(obj, condition_type): + return obj + if isinstance(obj, dict) and is_valid_condition_dict(obj): + if hasattr(condition_type, 'from_dict'): + return condition_type.from_dict(obj) + return condition_type(**obj) + return obj + + def ensure_output(obj): + if isinstance(obj, output_type): + return obj + if isinstance(obj, dict) and is_valid_output_dict(obj): + if hasattr(output_type, 'from_dict'): + return output_type.from_dict(obj) + return output_type(**obj) + return obj + + values["conditions"] = [ensure_condition(c) for c in values["conditions"]] + values["outputs"] = [ensure_output(o) for o in values["outputs"]] + + return values + + # @model_validator(mode="after") # def ensure_proper_signal_configuration(self, info: ValidationInfo): # """ @@ -831,3 +979,40 @@ def remove_decider_conditions(self): # ========================================================================= __hash__ = Entity.__hash__ + + def to_dict(self, exclude_none: bool = True, exclude_defaults: bool = True) -> dict: + """ + Robustly serialize the DeciderCombinator to a dictionary, ensuring all + control_behavior, conditions, and outputs are included and properly typed. + Returns a dict suitable for blueprint export/import. + """ + result = super().to_dict(exclude_none=exclude_none, exclude_defaults=exclude_defaults) + + # Ensure control_behavior is included and properly serialized + if hasattr(self, 'control_behavior') and self.control_behavior: + cb = self.control_behavior + if hasattr(cb, 'decider_conditions') and cb.decider_conditions: + dc = cb.decider_conditions + # Serialize conditions and outputs under decider_conditions + decider_dict = {} + if hasattr(dc, 'conditions'): + decider_dict['conditions'] = [ + c._root.model_dump(by_alias=True, exclude_none=exclude_none, exclude_defaults=exclude_defaults) if hasattr(c, '_root') else c + for c in getattr(dc, 'conditions', []) + ] + if hasattr(dc, 'outputs'): + decider_dict['outputs'] = [ + o._root.model_dump(by_alias=True, exclude_none=exclude_none, exclude_defaults=exclude_defaults) if hasattr(o, '_root') else o + for o in getattr(dc, 'outputs', []) + ] + # Only add decider_conditions if not both empty + if decider_dict.get('conditions', []) or decider_dict.get('outputs', []): + result.setdefault('control_behavior', {})['decider_conditions'] = decider_dict + # Remove old keys if present + if 'control_behavior' in result: + result['control_behavior'].pop('conditions', None) + result['control_behavior'].pop('outputs', None) + # Remove control_behavior if now empty + if 'control_behavior' in result and not result['control_behavior']: + del result['control_behavior'] + return result diff --git a/draftsman/utils.py b/draftsman/utils.py index c67ac6fb..607e6477 100644 --- a/draftsman/utils.py +++ b/draftsman/utils.py @@ -12,6 +12,7 @@ from abc import ABCMeta, abstractmethod import base64 import json +from enum import Enum import math from functools import wraps @@ -21,6 +22,27 @@ import warnings import zlib +def serialize(obj): + """ + Recursively serializes custom objects using model_dump if available. + Falls back to __dict__ or __slots__ if present, otherwise returns the object. + Handles Enum values by returning their .value (e.g. integer for Direction). + """ + if hasattr(obj, "model_dump"): + return serialize(obj.model_dump()) + elif isinstance(obj, list): + return [serialize(item) for item in obj] + elif isinstance(obj, dict): + return {k: serialize(v) for k, v in obj.items()} + elif isinstance(obj, Enum): + return obj.value + elif hasattr(obj, "__dict__"): + return {k: serialize(v) for k, v in obj.__dict__.items() if not k.startswith("_")} + elif hasattr(obj, "__slots__"): + return {k: serialize(getattr(obj, k)) for k in obj.__slots__ if hasattr(obj, k)} + else: + return obj + if TYPE_CHECKING: # pragma: no coverage from draftsman.classes.entity_like import EntityLike from draftsman.entity import Entity @@ -398,6 +420,28 @@ def string_to_JSON(string: str) -> dict: raise MalformedBlueprintStringError(e) +def serialize(obj): + """ + Recursively serializes custom objects using model_dump if available. + Falls back to __dict__ or __slots__ if present, otherwise returns the object. + Handles Enum values by returning their .value (e.g. integer for Direction). + """ + if hasattr(obj, "model_dump"): + return serialize(obj.model_dump()) + elif isinstance(obj, list): + return [serialize(item) for item in obj] + elif isinstance(obj, dict): + return {k: serialize(v) for k, v in obj.items()} + elif isinstance(obj, Enum): + return obj.value + elif hasattr(obj, "__dict__"): + return {k: serialize(v) for k, v in obj.__dict__.items() if not k.startswith("_")} + elif hasattr(obj, "__slots__"): + return {k: serialize(getattr(obj, k)) for k in obj.__slots__ if hasattr(obj, k)} + else: + return obj + + def JSON_to_string(JSON: dict) -> str: """ Encodes a JSON dict to a Factorio-readable blueprint string. @@ -417,6 +461,7 @@ def JSON_to_string(JSON: dict) -> str: :returns: A ``str`` which can be imported into Factorio. """ + JSON = serialize(JSON) return "0" + base64.b64encode( zlib.compress(json.dumps(JSON, separators=(",", ":")).encode("utf-8"), 9) ).decode("utf-8") diff --git a/test/test_blueprint.py b/test/test_blueprint.py index 36caa276..200c96a6 100644 --- a/test/test_blueprint.py +++ b/test/test_blueprint.py @@ -1899,7 +1899,8 @@ def test_remove_circuit_connections(self): "green", "a", "b", side_1="input", side_2="output" ) self.maxDiff = None - assert blueprint.to_dict() == { + actual = blueprint.to_dict() + expected = { "blueprint": { "item": "blueprint", "entities": [ @@ -1928,6 +1929,17 @@ def test_remove_circuit_connections(self): "version": encode_version(*__factorio_version_info__), } } + import difflib + import pprint + if actual != expected: + actual_str = pprint.pformat(actual, width=120) + expected_str = pprint.pformat(expected, width=120) + diff = difflib.unified_diff( + expected_str.splitlines(), actual_str.splitlines(), + fromfile='expected', tofile='actual', lineterm='' + ) + print('\n'.join(diff)) + assert actual == expected blueprint.remove_circuit_connections() assert blueprint.to_dict() == { "blueprint": { diff --git a/test/test_decider_combinator_serialization.py b/test/test_decider_combinator_serialization.py new file mode 100644 index 00000000..5bf8d37c --- /dev/null +++ b/test/test_decider_combinator_serialization.py @@ -0,0 +1,43 @@ +import pytest +from draftsman.blueprintable import Blueprint +from draftsman.entity import DeciderCombinator +from draftsman.constants import Direction + +@pytest.mark.parametrize("direction", [Direction.EAST]) +def test_decider_combinator_conditions_and_outputs(direction): + blueprint = Blueprint() + dc = DeciderCombinator("decider-combinator", direction=direction) + Input = DeciderCombinator.Input + Output = DeciderCombinator.Output + dc.conditions = ( + (Input("signal-A") > Input("signal-B", {"red"})) + & (Input("signal-C", {"green"}) == Input("signal-D")) + | (Input("signal-each", {"green"}) <= Input("signal-each", {"red", "green"})) + ) + dc.conditions |= (Input("transport-belt", networks={"green"}) != -2**31) + dc.outputs = [ + Output("signal-A", copy_count_from_input=False), + Output("signal-each", networks={"green"}), + Output("signal-B", copy_count_from_input=False, constant=101) + ] + blueprint.entities.append(dc) + exported = blueprint.to_string() + print("\n--- SERIALIZED BLUEPRINT STRING ---\n", exported, "\n--- END ---\n") + imported = Blueprint(exported) + # Find the decider combinator entity + imported_dc = None + for ent in imported.entities: + if isinstance(ent, DeciderCombinator): + imported_dc = ent + break + assert imported_dc is not None, "DeciderCombinator not found after import" + # Check that conditions and outputs are present and non-empty + assert hasattr(imported_dc, "conditions"), "No 'conditions' attribute after import" + print("Imported DeciderCombinator conditions:", imported_dc.conditions) + assert imported_dc.conditions, "Conditions list is empty after import" + assert hasattr(imported_dc, "outputs"), "No 'outputs' attribute after import" + print("Imported DeciderCombinator outputs:", imported_dc.outputs) + assert imported_dc.outputs, "Outputs list is empty after import" + # Optionally: check specific signals or structure + signals = [getattr(c, 'first_signal', None) for c in imported_dc.conditions] + assert any(s is not None for s in signals), "No signals found in imported conditions"