Open
Description
Hello here , I have a python code using step functions sdk that was created a while ago using version 2.3.0 , suddenly without making any changes to the used code itself but maybe because i have added other dependencies to the project that uses pydantic classes ( though it wasn't added to the code that uses sfn sdk but rather same poetry environment ) I started receiving this weird error `NameError: Field name "json" shadows a BaseModel attribute; use a different field name with "alias='json'".
The error already happens with the imports
`
Reproduction Steps
import json
from typing import Union, List, Tuple, Any, Iterable
from stepfunctions.workflow import Workflow
from stepfunctions.steps import Chain
from stepfunctions.steps.states import State
def generate_asl_json_file_for_workflow(
workflow: Workflow, output_file_path: str
) -> str:
workflow_json = json.loads(workflow.definition.to_json())
# Serializing json
pretty_workflow_json = json.dumps(workflow_json, indent=4)
# Writing to sample.json
with open(f"{output_file_path}.asl.json", "w") as outfile:
outfile.write(pretty_workflow_json)
return pretty_workflow_json
What did you expect to happen?
Creating and generating step functions workflows after creating it using sfn python sdk
What actually happened?
What happens is that i receive this log error :
Traceback (most recent call last) ──────────────────────╮
│ /Users/zied/PycharmProjects/ada-datasets-creation-service/ada_datasets_pipel │
│ ine/src/step_functions_scripts/main.py:2 in <module> │
│ │
│ 1 from ada_datasets_pipeline.src.step_functions_scripts.parameters import │
│ ❱ 2 from ada_datasets_pipeline.src.step_functions_scripts.src.create_ada_da │
│ 3 │ generate_ada_pipeline_state_machine, │
│ 4 ) │
│ 5 │
│ │
│ /Users/zied/PycharmProjects/ada-datasets-creation-service/ada_datasets_pipel │
│ ine/src/step_functions_scripts/src/create_ada_dataset.py:1 in <module> │
│ │
│ ❱ 1 from ada_datasets_pipeline.src.step_functions_scripts.src.utils import │
│ 2 │ states_format, │
│ 3 │ generate_asl_json_file_for_workflow, │
│ 4 │ chain_events_list, │
│ │
│ /Users/zied/PycharmProjects/ada-datasets-creation-service/ada_datasets_pipel │
│ ine/src/step_functions_scripts/src/utils.py:4 in <module> │
│ │
│ 1 import json │
│ 2 from typing import Union, List, Tuple, Any, Iterable │
│ 3 │
│ ❱ 4 from stepfunctions.workflow import Workflow │
│ 5 from stepfunctions.steps import Chain │
│ 6 from stepfunctions.steps.states import State │
│ 7 │
│ │
│ /Users/zied/Library/Caches/pypoetry/virtualenvs/ada-datasets-pipeline-gB2QjA │
│ dD-py3.10/lib/python3.10/site-packages/stepfunctions/__init__.py:25 in │
│ <module> │
│ │
│ 22 # disable logging.warning() from import packages │
│ 23 logging.getLogger().setLevel(logging.ERROR) │
│ 24 │
│ ❱ 25 from stepfunctions import steps │
│ 26 from stepfunctions import workflow │
│ 27 from stepfunctions import template │
│ 28 from stepfunctions.workflow.utils import CustomColorFormatter │
│ │
│ /Users/zied/Library/Caches/pypoetry/virtualenvs/ada-datasets-pipeline-gB2QjA │
│ dD-py3.10/lib/python3.10/site-packages/stepfunctions/steps/__init__.py:19 in │
│ <module> │
│ │
│ 16 │
│ 17 from stepfunctions.steps.states import Pass, Succeed, Fail, Wait, Choic │
│ 18 from stepfunctions.steps.states import Graph, FrozenGraph │
│ ❱ 19 from stepfunctions.steps.sagemaker import TrainingStep, TransformStep, │
│ 20 from stepfunctions.steps.compute import LambdaStep, BatchSubmitJobStep, │
│ 21 from stepfunctions.steps.service import DynamoDBGetItemStep, DynamoDBPu │
│ 22 │
│ │
│ /Users/zied/Library/Caches/pypoetry/virtualenvs/ada-datasets-pipeline-gB2QjA │
│ dD-py3.10/lib/python3.10/site-packages/stepfunctions/steps/sagemaker.py:25 │
│ in <module> │
│ │
│ 22 from stepfunctions.steps.utils import merge_dicts, tags_dict_to_kv_lis │
│ 23 from stepfunctions.steps.integration_resources import IntegrationPatte │
│ 24 │
│ ❱ 25 from sagemaker.workflow.airflow import training_config, transform_conf │
│ 26 from sagemaker.model import Model, FrameworkModel │
│ 27 from sagemaker.model_monitor import DataCaptureConfig │
│ 28 │
│ │
│ /Users/zied/Library/Caches/pypoetry/virtualenvs/ada-datasets-pipeline-gB2QjA │
│ dD-py3.10/lib/python3.10/site-packages/sagemaker/__init__.py:18 in <module> │
│ │
│ 15 │
│ 16 import importlib_metadata │
│ 17 │
│ ❱ 18 from sagemaker import estimator, parameter, tuner # noqa: F401 │
│ 19 from sagemaker.amazon.kmeans import KMeans, KMeansModel, KMeansPredicto │
│ 20 from sagemaker.amazon.pca import PCA, PCAModel, PCAPredictor # noqa: F │
│ 21 from sagemaker.amazon.lda import LDA, LDAModel, LDAPredictor # noqa: F │
│ │
│ /Users/zied/Library/Caches/pypoetry/virtualenvs/ada-datasets-pipeline-gB2QjA │
│ dD-py3.10/lib/python3.10/site-packages/sagemaker/estimator.py:30 in <module> │
│ │
│ 27 from six.moves.urllib.parse import urlparse │
│ 28 │
│ 29 import sagemaker │
│ ❱ 30 from sagemaker import git_utils, image_uris, vpc_utils, s3 │
│ 31 from sagemaker.analytics import TrainingJobAnalytics │
│ 32 from sagemaker.config import ( │
│ 33 │ ESTIMATOR_DEBUG_HOOK_CONFIG_PATH, │
│ │
│ /Users/zied/Library/Caches/pypoetry/virtualenvs/ada-datasets-pipeline-gB2QjA │
│ dD-py3.10/lib/python3.10/site-packages/sagemaker/image_uris.py:24 in │
│ <module> │
│ │
│ 21 from packaging.version import Version │
│ 22 │
│ 23 from sagemaker import utils │
│ ❱ 24 from sagemaker.jumpstart.constants import DEFAULT_JUMPSTART_SAGEMAKER_ │
│ 25 from sagemaker.jumpstart.enums import JumpStartModelType │
│ 26 from sagemaker.jumpstart.utils import is_jumpstart_model_input │
│ 27 from sagemaker.spark import defaults │
│ │
│ /Users/zied/Library/Caches/pypoetry/virtualenvs/ada-datasets-pipeline-gB2QjA │
│ dD-py3.10/lib/python3.10/site-packages/sagemaker/jumpstart/constants.py:27 │
│ in <module> │
│ │
│ 24 │ MIMEType, │
│ 25 │ JumpStartModelType, │
│ 26 ) │
│ ❱ 27 from sagemaker.jumpstart.types import JumpStartLaunchedRegionInfo, Jum │
│ 28 from sagemaker.base_serializers import ( │
│ 29 │ BaseSerializer, │
│ 30 │ CSVSerializer, │
│ │
│ /Users/zied/Library/Caches/pypoetry/virtualenvs/ada-datasets-pipeline-gB2QjA │
│ dD-py3.10/lib/python3.10/site-packages/sagemaker/jumpstart/types.py:20 in │
│ <module> │
│ │
│ 17 from copy import deepcopy │
│ 18 from enum import Enum │
│ 19 from typing import Any, Dict, List, Optional, Set, Union │
│ ❱ 20 from sagemaker_core.shapes import ModelAccessConfig as CoreModelAcces │
│ 21 from sagemaker.model_card.model_card import ModelCard, ModelPackageMo │
│ 22 from sagemaker.utils import ( │
│ 23 │ S3_PREFIX, │
│ │
│ /Users/zied/Library/Caches/pypoetry/virtualenvs/ada-datasets-pipeline-gB2QjA │
│ dD-py3.10/lib/python3.10/site-packages/sagemaker_core/shapes/__init__.py:1 │
│ in <module> │
│ │
│ ❱ 1 from ..main.shapes import * │
│ 2 │
│ │
│ /Users/zied/Library/Caches/pypoetry/virtualenvs/ada-datasets-pipeline-gB2QjA │
│ dD-py3.10/lib/python3.10/site-packages/sagemaker_core/main/shapes.py:2509 in │
│ <module> │
│ │
│ 2506 │ """ │
│ 2507 │
│ 2508 │
│ ❱ 2509 class MonitoringDatasetFormat(Base): │
│ 2510 │ """ │
│ 2511 │ MonitoringDatasetFormat │
│ 2512 │ Represents the dataset format used when running a monitoring j │
│ │
│ in pydantic.main.ModelMetaclass.__new__:186 │
│ │
│ in pydantic.utils.validate_field_name:168 │
╰──────────────────────────────────────────────────────────────────────────────╯
NameError: Field name "json" shadows a BaseModel attribute; use a different
field name with "alias='json'".
Environment
- **AWS Step Functions Data Science Python SDK version : 2.3.0
- **Python Version: Python 3.10.15
This is 🐛 Bug Report