Skip to content

(#164) Serde improvements, open_binary -> files changes #165

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion draftsman/data/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@


try:
with pkg_resources.open_binary(data, "entities.pkl") as inp:
with pkg_resources.files(data).joinpath("entities.pkl").open("rb") as inp:
_data: dict = pickle.load(inp)

# Aggregation of all the the entity dicts from data.raw collected in one
Expand Down
2 changes: 1 addition & 1 deletion draftsman/data/fluids.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from draftsman import data
from draftsman.error import InvalidFluidError

with pkg_resources.open_binary(data, "fluids.pkl") as inp:
with pkg_resources.files(data).joinpath("fluids.pkl").open("rb") as inp:
_data = pickle.load(inp)
raw: dict[str, dict] = _data[0]

Expand Down
2 changes: 1 addition & 1 deletion draftsman/data/instruments.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
# from draftsman.data.entities import programmable_speakers


with pkg_resources.open_binary(data, "instruments.pkl") as inp:
with pkg_resources.files(data).joinpath("instruments.pkl").open("rb") as inp:
_data: list = pickle.load(inp)
raw: dict[str, list[dict]] = _data[0]
index_of: dict[str, dict[str, dict[str, int]]] = _data[1]
Expand Down
2 changes: 1 addition & 1 deletion draftsman/data/items.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from draftsman import data


with pkg_resources.open_binary(data, "items.pkl") as inp:
with pkg_resources.files(data).joinpath("items.pkl").open("rb") as inp:
_data = pickle.load(inp)
raw: dict[str, dict] = _data[0]
subgroups: dict[str, dict] = _data[1]
Expand Down
2 changes: 1 addition & 1 deletion draftsman/data/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from draftsman import data


with pkg_resources.open_binary(data, "modules.pkl") as inp:
with pkg_resources.files(data).joinpath("modules.pkl").open("rb") as inp:
_data = pickle.load(inp)
raw: dict[str, dict] = _data[0]
categories: dict[str, list[str]] = _data[1]
Expand Down
2 changes: 1 addition & 1 deletion draftsman/data/planets.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@


try:
with pkg_resources.open_binary(data, "planets.pkl") as inp:
with pkg_resources.files(data).joinpath("planets.pkl").open("rb") as inp:
_data: dict = pickle.load(inp)

raw: dict[str, dict] = _data[0]
Expand Down
2 changes: 1 addition & 1 deletion draftsman/data/recipes.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from draftsman.utils import passes_surface_conditions


with pkg_resources.open_binary(data, "recipes.pkl") as inp:
with pkg_resources.files(data).joinpath("recipes.pkl").open("rb") as inp:
_data = pickle.load(inp)
raw: dict[str, dict] = _data[0]
categories: dict[str, list[str]] = _data[1]
Expand Down
2 changes: 1 addition & 1 deletion draftsman/data/signals.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import importlib.resources as pkg_resources


with pkg_resources.open_binary(data, "signals.pkl") as inp:
with pkg_resources.files(data).joinpath("signals.pkl").open("rb") as inp:
_data = pickle.load(inp)

raw: dict[str, dict] = _data["raw"]
Expand Down
2 changes: 1 addition & 1 deletion draftsman/data/tiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from draftsman import data


with pkg_resources.open_binary(data, "tiles.pkl") as inp:
with pkg_resources.files(data).joinpath("tiles.pkl").open("rb") as inp:
raw: dict[str, dict] = pickle.load(inp)


Expand Down
187 changes: 186 additions & 1 deletion draftsman/prototypes/decider_combinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ def validate_from_dict(value: dict):
)



class DeciderCondition(Temp):
class Format(Condition):
compare_type: Optional[Literal["or", "and"]] = Field(
Expand Down Expand Up @@ -116,7 +117,6 @@ def __init__(
# self.second_signal = second_signal
# self.second_signal_networks = second_signal_networks
# self.compare_type = compare_type

self._root = self.Format.model_validate(
{
"first_signal": first_signal,
Expand All @@ -131,6 +131,47 @@ def __init__(
context={"construction": True, "mode": ValidationMode.NONE},
)

@classmethod
def from_dict(cls, d):
# Accepts a dict from deserialization and returns a DeciderCondition
return cls(
first_signal=d.get("first_signal"),
first_signal_networks=d.get("first_signal_networks", {"red", "green"}),
comparator=d.get("comparator", ">"),
constant=d.get("constant"),
second_signal=d.get("second_signal"),
second_signal_networks=d.get("second_signal_networks", {"red", "green"}),
compare_type=d.get("compare_type", "or"),
)

@property
def first_signal(self):
return self._root.first_signal

@property
def first_signal_networks(self):
return self._root.first_signal_networks

@property
def comparator(self):
return self._root.comparator

@property
def constant(self):
return self._root.constant

@property
def second_signal(self):
return self._root.second_signal

@property
def second_signal_networks(self):
return self._root.second_signal_networks

@property
def compare_type(self):
return self._root.compare_type

def __or__(self, other):
if isinstance(other, DeciderCondition):
other._root.compare_type = "or"
Expand Down Expand Up @@ -258,6 +299,15 @@ def __init__(
context={"construction": True, "mode": ValidationMode.NONE},
)

@classmethod
def from_dict(cls, d):
return cls(
signal=d.get("signal"),
copy_count_from_input=d.get("copy_count_from_input", True),
constant=d.get("constant", 1),
networks=d.get("networks", {"red", "green"}),
)

@property
def signal(self):
return self._root.signal
Expand Down Expand Up @@ -406,6 +456,7 @@ class DeciderConditions(DraftsmanBaseModel):
# What wires to pull values from if 'copy_count_from_input'
# is true.""",
# )
from pydantic import model_validator

conditions: list = Field(
[],
Expand All @@ -422,6 +473,103 @@ class DeciderConditions(DraftsmanBaseModel):
""",
)

@model_validator(mode="after")
def convert_conditions_and_outputs(cls, values):
#
# @model_validator(mode="after")
# def ensure_proper_signal_configuration(self, info: ValidationInfo):
# """
# The first signal and output signals can be pure virtual
# signals, but only in a certain configuration as determined
# by `_signal_blacklist`. If the input signal is not a pure
# virtual signal, then the output signal cannot be
# `"signal-anything"` or `"signal-each"`.
# """
# if not info.context or self.output_signal is None:
# return self
# if info.context["mode"] <= ValidationMode.MINIMUM:
# return self
#
# warning_list: list = info.context["warning_list"]
#
# if self.first_signal is None:
# first_signal_name = None
# else:
# first_signal_name = self.first_signal.name
#
# current_blacklist = _signal_blacklist.get(
# first_signal_name, {"signal-anything", "signal-each"}
# )
# if self.output_signal.name in current_blacklist:
# warning_list.append(
# PureVirtualDisallowedWarning(
# "'{}' cannot be an output_signal when '{}' is the first operand; 'output_signal' will be removed when imported".format(
# self.output_signal.name, first_signal_name
# ),
# )
# )
#
# return self
#
# @model_validator(mode="after")
# def ensure_second_signal_is_not_pure_virtual(
# self, info: ValidationInfo
# ):
# if not info.context or self.second_signal is None:
# return self
# if info.context["mode"] <= ValidationMode.MINIMUM:
# return self
#
# warning_list: list = info.context["warning_list"]
#
# if self.second_signal.name in signals.pure_virtual:
# warning_list.append(
# PureVirtualDisallowedWarning(
# "'second_signal' cannot be set to pure virtual signal '{}'; will be removed when imported".format(
# self.second_signal.name
# )
# )
# )
#
# return self

# Use class attributes to avoid circular import
Condition = cls.__module__.split('.')[0] + '.prototypes.decider_combinator.DeciderCondition'
Output = cls.__module__.split('.')[0] + '.prototypes.decider_combinator.DeciderOutput'
# Actually use the directly available classes
condition_type = getattr(__import__(cls.__module__, fromlist=['Condition']), 'DeciderCondition')
output_type = getattr(__import__(cls.__module__, fromlist=['Output']), 'DeciderOutput')
def is_valid_condition_dict(d):
return isinstance(d, dict) and "first_signal" in d

def is_valid_output_dict(d):
return isinstance(d, dict) and "signal" in d

# Convert dicts to DeciderCondition/DeciderOutput objects
def ensure_condition(obj):
if isinstance(obj, condition_type):
return obj
if isinstance(obj, dict) and is_valid_condition_dict(obj):
if hasattr(condition_type, 'from_dict'):
return condition_type.from_dict(obj)
return condition_type(**obj)
return obj

def ensure_output(obj):
if isinstance(obj, output_type):
return obj
if isinstance(obj, dict) and is_valid_output_dict(obj):
if hasattr(output_type, 'from_dict'):
return output_type.from_dict(obj)
return output_type(**obj)
return obj

values["conditions"] = [ensure_condition(c) for c in values["conditions"]]
values["outputs"] = [ensure_output(o) for o in values["outputs"]]

return values


# @model_validator(mode="after")
# def ensure_proper_signal_configuration(self, info: ValidationInfo):
# """
Expand Down Expand Up @@ -831,3 +979,40 @@ def remove_decider_conditions(self):
# =========================================================================

__hash__ = Entity.__hash__

def to_dict(self, exclude_none: bool = True, exclude_defaults: bool = True) -> dict:
"""
Robustly serialize the DeciderCombinator to a dictionary, ensuring all
control_behavior, conditions, and outputs are included and properly typed.
Returns a dict suitable for blueprint export/import.
"""
result = super().to_dict(exclude_none=exclude_none, exclude_defaults=exclude_defaults)

# Ensure control_behavior is included and properly serialized
if hasattr(self, 'control_behavior') and self.control_behavior:
cb = self.control_behavior
if hasattr(cb, 'decider_conditions') and cb.decider_conditions:
dc = cb.decider_conditions
# Serialize conditions and outputs under decider_conditions
decider_dict = {}
if hasattr(dc, 'conditions'):
decider_dict['conditions'] = [
c._root.model_dump(by_alias=True, exclude_none=exclude_none, exclude_defaults=exclude_defaults) if hasattr(c, '_root') else c
for c in getattr(dc, 'conditions', [])
]
if hasattr(dc, 'outputs'):
decider_dict['outputs'] = [
o._root.model_dump(by_alias=True, exclude_none=exclude_none, exclude_defaults=exclude_defaults) if hasattr(o, '_root') else o
for o in getattr(dc, 'outputs', [])
]
# Only add decider_conditions if not both empty
if decider_dict.get('conditions', []) or decider_dict.get('outputs', []):
result.setdefault('control_behavior', {})['decider_conditions'] = decider_dict
# Remove old keys if present
if 'control_behavior' in result:
result['control_behavior'].pop('conditions', None)
result['control_behavior'].pop('outputs', None)
# Remove control_behavior if now empty
if 'control_behavior' in result and not result['control_behavior']:
del result['control_behavior']
return result
45 changes: 45 additions & 0 deletions draftsman/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from abc import ABCMeta, abstractmethod
import base64
import json
from enum import Enum
import math
from functools import wraps

Expand All @@ -21,6 +22,27 @@
import warnings
import zlib

def serialize(obj):
"""
Recursively serializes custom objects using model_dump if available.
Falls back to __dict__ or __slots__ if present, otherwise returns the object.
Handles Enum values by returning their .value (e.g. integer for Direction).
"""
if hasattr(obj, "model_dump"):
return serialize(obj.model_dump())
elif isinstance(obj, list):
return [serialize(item) for item in obj]
elif isinstance(obj, dict):
return {k: serialize(v) for k, v in obj.items()}
elif isinstance(obj, Enum):
return obj.value
elif hasattr(obj, "__dict__"):
return {k: serialize(v) for k, v in obj.__dict__.items() if not k.startswith("_")}
elif hasattr(obj, "__slots__"):
return {k: serialize(getattr(obj, k)) for k in obj.__slots__ if hasattr(obj, k)}
else:
return obj

if TYPE_CHECKING: # pragma: no coverage
from draftsman.classes.entity_like import EntityLike
from draftsman.entity import Entity
Expand Down Expand Up @@ -398,6 +420,28 @@ def string_to_JSON(string: str) -> dict:
raise MalformedBlueprintStringError(e)


def serialize(obj):
"""
Recursively serializes custom objects using model_dump if available.
Falls back to __dict__ or __slots__ if present, otherwise returns the object.
Handles Enum values by returning their .value (e.g. integer for Direction).
"""
if hasattr(obj, "model_dump"):
return serialize(obj.model_dump())
elif isinstance(obj, list):
return [serialize(item) for item in obj]
elif isinstance(obj, dict):
return {k: serialize(v) for k, v in obj.items()}
elif isinstance(obj, Enum):
return obj.value
elif hasattr(obj, "__dict__"):
return {k: serialize(v) for k, v in obj.__dict__.items() if not k.startswith("_")}
elif hasattr(obj, "__slots__"):
return {k: serialize(getattr(obj, k)) for k in obj.__slots__ if hasattr(obj, k)}
else:
return obj


def JSON_to_string(JSON: dict) -> str:
"""
Encodes a JSON dict to a Factorio-readable blueprint string.
Expand All @@ -417,6 +461,7 @@ def JSON_to_string(JSON: dict) -> str:

:returns: A ``str`` which can be imported into Factorio.
"""
JSON = serialize(JSON)
return "0" + base64.b64encode(
zlib.compress(json.dumps(JSON, separators=(",", ":")).encode("utf-8"), 9)
).decode("utf-8")
Expand Down
Loading