diff --git a/interactive_ai/libs/iai_core_py/iai_core/factories/project_validator.py b/interactive_ai/libs/iai_core_py/iai_core/factories/project_validator.py index 712e43b27..25e2f9c48 100644 --- a/interactive_ai/libs/iai_core_py/iai_core/factories/project_validator.py +++ b/interactive_ai/libs/iai_core_py/iai_core/factories/project_validator.py @@ -146,6 +146,7 @@ def _validate_empty_labels(cls, parser: ParserT) -> None: raise ReservedLabelNameException(label_name=empty_label_name) @classmethod + @abstractmethod def _validate_keypoint_structure(cls, parser: ParserT) -> None: """ Validates that a user defined label graph edge has exactly 2 nodes, node names match with existing labels, @@ -160,29 +161,6 @@ def _validate_keypoint_structure(cls, parser: ParserT) -> None: :raises NodeNameNotInLabelsException: if a node name does not match any of the label names :raises NodePositionIsOutOfBoundsException: if a node is out of bounds (not in the range [0.0, 1.0]) """ - duplicate_list = [] - for task_name in parser.get_tasks_names(): - keypoint_structure = parser.get_keypoint_structure_data(task_name=task_name) - if not keypoint_structure: - continue - label_names = parser.get_custom_labels_names_by_task(task_name=task_name) - edges = keypoint_structure["edges"] - for edge in edges: - nodes = edge["nodes"] - if len(nodes) != 2: - raise WrongNumberOfNodesException - if nodes[0] not in label_names or nodes[1] not in label_names: - raise IncorrectNodeNameInGraphException - if set(nodes) in duplicate_list: - raise DuplicateEdgeInGraphException - duplicate_list.append(set(nodes)) - - positions = keypoint_structure["positions"] - for position in positions: - if position["label"] not in label_names: - raise NodeNameNotInLabelsException - if not 0 <= position["x"] <= 1 or not 0 <= position["y"] <= 1: - raise NodePositionIsOutOfBoundsException @classmethod @abstractmethod @@ -417,6 +395,45 @@ def __validate_parent_labels_in_parent_task( ), ) + @classmethod + def _validate_keypoint_structure(cls, parser: ProjectParser) -> None: + """ + Validates that a user defined label graph edge has exactly 2 nodes, node names match with existing labels, + and has no duplicate edges + + This method must be run after labels validation since it assumes that its labels param is valid. + + :param parser: A parser instance that can read and decode the information necessary to create a project + :raises WrongNumberOfNodesException: if an edge does not have 2 vertices + :raises IncorrectNodeNameInGraphException: if an edge has an incorrect name + :raises DuplicateEdgeInGraphException: if the graph contains a duplicate edge + :raises NodeNameNotInLabelsException: if a node name does not match any of the label names + :raises NodePositionIsOutOfBoundsException: if a node is out of bounds (not in the range [0.0, 1.0]) + """ + duplicate_list = [] + for task_name in parser.get_tasks_names(): + keypoint_structure = parser.get_keypoint_structure_data(task_name=task_name) + if not keypoint_structure: + continue + label_names = parser.get_custom_labels_names_by_task(task_name=task_name) + edges = keypoint_structure["edges"] + for edge in edges: + nodes = edge["nodes"] + if len(nodes) != 2: + raise WrongNumberOfNodesException + if nodes[0] not in label_names or nodes[1] not in label_names: + raise IncorrectNodeNameInGraphException + if set(nodes) in duplicate_list: + raise DuplicateEdgeInGraphException + duplicate_list.append(set(nodes)) + + positions = keypoint_structure["positions"] + for position in positions: + if position["label"] not in label_names: + raise NodeNameNotInLabelsException + if not 0 <= position["x"] <= 1 or not 0 <= position["y"] <= 1: + raise NodePositionIsOutOfBoundsException + class ProjectUpdateValidator(ProjectValidator[ProjectUpdateParser]): def validate(self, parser: ProjectUpdateParser) -> None: @@ -726,3 +743,47 @@ def __validate_parent_labels_in_parent_task( if not is_found ), ) + + @classmethod + def _validate_keypoint_structure(cls, parser: ProjectUpdateParser) -> None: + """ + Validates that a user defined label graph edge has exactly 2 nodes, node names match with existing labels, + and has no duplicate edges + + This method must be run after labels validation since it assumes that its labels param is valid. + + :param parser: A parser instance that can read and decode the information necessary to create a project + :raises WrongNumberOfNodesException: if an edge does not have 2 vertices + :raises IncorrectNodeNameInGraphException: if an edge has an incorrect name + :raises DuplicateEdgeInGraphException: if the graph contains a duplicate edge + :raises NodeNameNotInLabelsException: if a node name does not match any of the label names + :raises NodePositionIsOutOfBoundsException: if a node is out of bounds (not in the range [0.0, 1.0]) + """ + duplicate_list = [] + for task_name in parser.get_tasks_names(): + keypoint_structure = parser.get_keypoint_structure_data(task_name=task_name) + if not keypoint_structure: + continue + label_names = list(parser.get_custom_labels_names_by_task(task_name=task_name)) + label_ids = [ + str(parser.get_label_id_by_name(task_name=task_name, label_name=label_name)) + for label_name in label_names + ] + labels = label_names + label_ids + edges = keypoint_structure["edges"] + for edge in edges: + nodes = edge["nodes"] + if len(nodes) != 2: + raise WrongNumberOfNodesException + if nodes[0] not in labels or nodes[1] not in labels: + raise IncorrectNodeNameInGraphException + if set(nodes) in duplicate_list: + raise DuplicateEdgeInGraphException + duplicate_list.append(set(nodes)) + + positions = keypoint_structure["positions"] + for position in positions: + if position["label"] not in labels: + raise NodeNameNotInLabelsException + if not 0 <= position["x"] <= 1 or not 0 <= position["y"] <= 1: + raise NodePositionIsOutOfBoundsException diff --git a/interactive_ai/libs/iai_core_py/iai_core/utils/project_builder.py b/interactive_ai/libs/iai_core_py/iai_core/utils/project_builder.py index b1c3304d3..737250ae2 100644 --- a/interactive_ai/libs/iai_core_py/iai_core/utils/project_builder.py +++ b/interactive_ai/libs/iai_core_py/iai_core/utils/project_builder.py @@ -827,15 +827,16 @@ def _build_keypoint_structure( :return: the KeypointStructure """ label_name_to_id: dict[str, ID] = {label.name: label.id_ for label in labels} + label_ids = [label.id_ for label in labels] edges = [] for edge in keypoint_structure_data["edges"]: - node_1 = label_name_to_id[edge["nodes"][0]] - node_2 = label_name_to_id[edge["nodes"][1]] + node_1 = ID(edge["nodes"][0]) if ID(edge["nodes"][0]) in label_ids else label_name_to_id[edge["nodes"][0]] + node_2 = ID(edge["nodes"][1]) if ID(edge["nodes"][1]) in label_ids else label_name_to_id[edge["nodes"][1]] edges.append(KeypointEdge(node_1=node_1, node_2=node_2)) positions = [] for position in keypoint_structure_data["positions"]: - node = label_name_to_id[position["label"]] + node = ID(position["label"]) if ID(position["label"]) in label_ids else label_name_to_id[position["label"]] x = position["x"] y = position["y"] positions.append(KeypointPosition(node=node, x=x, y=y)) diff --git a/interactive_ai/libs/iai_core_py/iai_core/utils/project_factory.py b/interactive_ai/libs/iai_core_py/iai_core/utils/project_factory.py index 47dd8caae..687b780e8 100644 --- a/interactive_ai/libs/iai_core_py/iai_core/utils/project_factory.py +++ b/interactive_ai/libs/iai_core_py/iai_core/utils/project_factory.py @@ -22,7 +22,7 @@ from iai_core.entities.model_template import ModelTemplate, NullModelTemplate from iai_core.entities.project import Project from iai_core.entities.task_graph import TaskEdge, TaskGraph -from iai_core.entities.task_node import TaskNode, TaskProperties +from iai_core.entities.task_node import TaskNode, TaskProperties, TaskType from iai_core.repos import ( ActiveModelStateRepo, ConfigurableParametersRepo, @@ -109,6 +109,7 @@ def create_project_with_task_graph( # noqa: PLR0913 project_id: ID | None = None, user_names: list[str] | None = None, hidden: bool = False, + keypoint_structure: KeypointStructure | None = None, ) -> Project: """ Create a project given a task graph @@ -122,6 +123,7 @@ def create_project_with_task_graph( # noqa: PLR0913 :param model_templates: List of model templates to create the model storages for each task :param user_names: User names to assign to the project :param hidden: Whether to keep the project as hidden after creation + :param keypoint_structure: Keypoint structure to assign to the project, only for Keypoint Detection projects :return: created project """ if project_id is None: @@ -146,8 +148,8 @@ def create_project_with_task_graph( # noqa: PLR0913 _id=DatasetStorageRepo.generate_id(), ) DatasetStorageRepo(project_identifier).save(dataset_storage) - keypoint_structure = None - if FeatureFlagProvider.is_enabled(FEATURE_FLAG_KEYPOINT_DETECTION): + + if FeatureFlagProvider.is_enabled(FEATURE_FLAG_KEYPOINT_DETECTION) and keypoint_structure is None: keypoint_structure = KeypointStructure( edges=[KeypointEdge(node_1=ID("node_1"), node_2=ID("node_2"))], positions=[ @@ -155,6 +157,7 @@ def create_project_with_task_graph( # noqa: PLR0913 KeypointPosition(node=ID("node_2"), x=1, y=1), ], ) + # Create graph with one task project = Project( id=project_id, @@ -252,6 +255,7 @@ def create_project_single_task( # noqa: PLR0915, PLR0913 empty_label_name: str | None = None, is_multi_label_classification: bool | None = False, hidden: bool = False, + keypoint_structure: KeypointStructure | None = None, ) -> Project: """ Create a project with one task in the pipeline. @@ -267,19 +271,19 @@ def create_project_single_task( # noqa: PLR0915, PLR0913 This attribute is ignored when label_schema is provided. :param model_template_id: Model template for the project (either the model template ID or the model template itself) - :param user_names: User names to assign to the project - :param configurable_parameters: Optional, configurable parameters to assign - to the task node in the Project. - :param workspace: Optional, workspace + :param user_names: Usernames to assign to the project :param label_schema: Optional, label schema relative to the project. If provided, then label_names is ignored If unspecified, the default workspace is used. + :param label_groups: Optional. label group metadata + :param labelname_to_parent: Optional. label tree structure + :param configurable_parameters: Optional, configurable parameters to assign + to the task node in the Project. :param empty_label_name: Optional. If an empty label needs to be created, this parameter is used to customize its name. :param is_multi_label_classification: Optional. True if created project is multi-label classification :param hidden: Whether to keep the project as hidden after creation. - :param label_groups: Optional. label group metadata - :param labelname_to_parent: Optional. label tree structure + :param keypoint_structure: Keypoint structure to assign to the project, only for Keypoint Detection projects :return: Created project """ logger.warning("Method `create_project_single_task` is deprecated.") @@ -293,7 +297,6 @@ def create_project_single_task( # noqa: PLR0915, PLR0913 if isinstance(model_template, NullModelTemplate): raise ModelTemplateError("A NullModelTemplate was created.") - CTX_SESSION_VAR.get().workspace_id project_id = ProjectRepo.generate_id() dataset_template = ModelTemplateList().get_by_id("dataset") task_node_id = TaskNodeRepo.generate_id() @@ -323,6 +326,9 @@ def create_project_single_task( # noqa: PLR0915, PLR0913 task_edge = TaskEdge(from_task=image_dataset_task_node, to_task=task_node) task_graph.add_task_edge(task_edge) + if task_node.task_properties.task_type == TaskType.KEYPOINT_DETECTION and not keypoint_structure: + raise ValueError("Please provide a keypoint structure for keypoint detection projects.") + project = ProjectFactory.create_project_with_task_graph( project_id=project_id, name=name, @@ -332,6 +338,7 @@ def create_project_single_task( # noqa: PLR0915, PLR0913 task_graph=task_graph, model_templates=model_templates, hidden=hidden, + keypoint_structure=keypoint_structure, ) project_labels: list[Label] @@ -374,7 +381,7 @@ def create_project_single_task( # noqa: PLR0915, PLR0913 label_groups=label_groups, labelname_to_label=labelname_to_label ) - # labels not have an explicite grouping should be included to an exclusive_group + # labels not have an explicit grouping should be included to an exclusive_group ungrouped_label_names = [label for label in project_labels if label.name not in grouped_label_names] exclusive_group = LabelGroup( name="labels", diff --git a/interactive_ai/services/api/schemas/projects/requests/put/keypoint_edge.yaml b/interactive_ai/services/api/schemas/projects/requests/put/keypoint_edge.yaml index 124a27413..8b3452369 100644 --- a/interactive_ai/services/api/schemas/projects/requests/put/keypoint_edge.yaml +++ b/interactive_ai/services/api/schemas/projects/requests/put/keypoint_edge.yaml @@ -5,4 +5,6 @@ properties: nodes: type: array items: - $ref: '../../../mongo_id.yaml' \ No newline at end of file + anyOf: + - type: string + - $ref: '../../../mongo_id.yaml' \ No newline at end of file diff --git a/interactive_ai/services/api/schemas/projects/requests/put/keypoint_position.yaml b/interactive_ai/services/api/schemas/projects/requests/put/keypoint_position.yaml index b01f69e34..dd18a35a2 100644 --- a/interactive_ai/services/api/schemas/projects/requests/put/keypoint_position.yaml +++ b/interactive_ai/services/api/schemas/projects/requests/put/keypoint_position.yaml @@ -5,7 +5,9 @@ required: - y properties: label: - $ref: '../../../mongo_id.yaml' + anyOf: + - type: string + - $ref: '../../../mongo_id.yaml' x: type: number format: float diff --git a/interactive_ai/services/dataset_ie/app/communication/helpers/import_utils.py b/interactive_ai/services/dataset_ie/app/communication/helpers/import_utils.py index 2e5cc7d64..6a06c9d55 100644 --- a/interactive_ai/services/dataset_ie/app/communication/helpers/import_utils.py +++ b/interactive_ai/services/dataset_ie/app/communication/helpers/import_utils.py @@ -28,6 +28,7 @@ Domain.ANOMALY_DETECTION, Domain.ANOMALY_SEGMENTATION, Domain.ROTATED_DETECTION, + Domain.KEYPOINT_DETECTION, ] @@ -142,6 +143,7 @@ def get_validated_task_type(cls, project: Project) -> TaskType: TaskType.ANOMALY_DETECTION, TaskType.ANOMALY_SEGMENTATION, TaskType.ROTATED_DETECTION, + TaskType.KEYPOINT_DETECTION, ] trainable_tasks = project.get_trainable_task_nodes() diff --git a/interactive_ai/services/director/app/coordination/dataset_manager/dataset_counter_config.py b/interactive_ai/services/director/app/coordination/dataset_manager/dataset_counter_config.py index 430c1ce28..c9ac97d7d 100644 --- a/interactive_ai/services/director/app/coordination/dataset_manager/dataset_counter_config.py +++ b/interactive_ai/services/director/app/coordination/dataset_manager/dataset_counter_config.py @@ -127,5 +127,5 @@ class KeypointDetectionCounterConfig(DatasetCounterConfig): description="The minimum number of new annotations required " "before auto-train is triggered. Auto-training will start every time " "that this number of annotations is created.", - visible_in_ui=False, + visible_in_ui=True, ) diff --git a/interactive_ai/services/resource/app/communication/rest_data_validator/project_rest_validator.py b/interactive_ai/services/resource/app/communication/rest_data_validator/project_rest_validator.py index c3cc05f01..65f5e376b 100644 --- a/interactive_ai/services/resource/app/communication/rest_data_validator/project_rest_validator.py +++ b/interactive_ai/services/resource/app/communication/rest_data_validator/project_rest_validator.py @@ -670,7 +670,7 @@ def _validate_keypoint_structure(data: dict[str, Any], labels: list[LabelPropert :raises NodeNameNotInLabelsException: if a node name does not match any of the label names :raises NodePositionIsOutOfBoundsException: if a node is out of bounds (not in the range [0.0, 1.0]) """ - existing_labels = [label.name for label in labels] + [label.id for label in labels] + existing_labels = [label.name for label in labels] + [str(label.id) for label in labels] pipeline_data = data[PIPELINE] duplicate_list = [] is_anomaly_reduced = FeatureFlagProvider.is_enabled(FeatureFlag.FEATURE_FLAG_ANOMALY_REDUCTION) diff --git a/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/convert_utils.py b/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/convert_utils.py index 371c697f0..d19c05125 100644 --- a/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/convert_utils.py +++ b/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/convert_utils.py @@ -424,7 +424,7 @@ def get_label_annotation( # noqa: C901 # e.g.) 1 <- a0, 1 <- a1, 2 <- b0, 2 <- b1, [1, 2], [a0, a1], [b0, b1] # if an item is annotated to 'a0' and 'b0', # it disobeys the multi-label rule after including ancestors of 'a0' and 'b0' - # becaus '1' and '2' are grouped together. + # because '1' and '2' are grouped together. # In this case, we remove 'b0' and all ancestors of 'b0' multi_label_error = False for sc_label in sc_labels_for_dm_label: @@ -649,7 +649,7 @@ def get_labels_mapper_from_dataset( :param dm_categories: Categories of datumaro dataset to map. :param dm_infos: Infos of datumaro dataset to map. :param sc_labels: SC labels. - :param domain: Lable domain. We should provide special mapping for anomaly labels. + :param domain: Label domain. We should provide special mapping for anomaly labels. :return: Method that return sc label corresponding to datumaro label id. """ dm_label_mapper = ConvertUtils.get_dm_label_mapper(dm_categories, domain) @@ -699,10 +699,14 @@ def get_labels_mapper_from_labels_map(dm_categories: dm.CategoriesInfo, labels_m :return: method mapping dm label id to sc label """ dm_labels: dm.LabelCategories = dm_categories[dm.AnnotationType.label] + dm_points: dm.PointsCategories | None = dm_categories.get(dm.AnnotationType.points, None) def _get_sc_label(dm_label_id: int | str) -> Label | None: try: - dm_label_name = dm_labels[dm_label_id].name if isinstance(dm_label_id, int) else dm_label_id + if dm_points: + dm_label_name = dm_points.items[0].labels[dm_label_id] + else: + dm_label_name = dm_labels[dm_label_id].name if isinstance(dm_label_id, int) else dm_label_id return labels_map[dm_label_name] except KeyError: return NullLabel() @@ -715,7 +719,7 @@ def get_sc_label_to_group_id(label_schema: LabelSchema) -> dict[Label, ID]: Mapping of sc_label to group id :param label_schema: label_schema of target sc project - :return: mapping of sc_label to all ancester labels + :return: mapping of sc_label to all ancestor labels """ sc_label_to_group_id = {} for label_group in label_schema.get_groups(include_empty=False): @@ -726,11 +730,11 @@ def get_sc_label_to_group_id(label_schema: LabelSchema) -> dict[Label, ID]: @staticmethod def get_sc_label_to_all_parents(sc_labels: list[Label], label_schema: LabelSchema) -> dict[Label, list[Label]]: """ - Mapping of sc_label to all ancester labels + Mapping of sc_label to all ancestor labels :param sc_labels: list of sc_label :param label_schema: label_schema of target sc project - :return: mapping of sc_label to all ancester labels + :return: mapping of sc_label to all ancestor labels """ sc_label_to_all_parents = defaultdict(list) for sc_label in sc_labels: @@ -782,10 +786,10 @@ def get_label_metadata( # noqa: C901, PLR0912 :param selected_labels: labels selected by a user for being imported :param project_type: Geti project type :param include_all_labels: if True, ignore selected_labels then include all possible labels for each task_type - :return: label_groups metadata, hierachical labeling tree, list of label names + :return: label_groups metadata, hierarchical labeling tree, list of label names """ # Collect the label names from the categories() - # that are utilized to construct a hierachical labeling structure or LabelGroup + # that are utilized to construct a hierarchical labeling structure or LabelGroup label_groups: list[dict[str, Any]] = [] labelname_to_parent: dict[str, str] = {} @@ -903,6 +907,9 @@ def get_keypoint_structure( if not include_all_labels and not any(label in selected_labels for label in joint_names): continue structure["edges"].append({"nodes": list(joint_names)}) - # structure["positions"].append(???) TODO CVS-156570 + for i in range(0, len(cat.positions), 2): + structure["positions"].append( + {"label": cat.labels[i // 2], "x": cat.positions[i], "y": cat.positions[i + 1]} + ) return structure diff --git a/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/import_utils.py b/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/import_utils.py index 597e06997..6d678f731 100644 --- a/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/import_utils.py +++ b/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/import_utils.py @@ -70,6 +70,7 @@ class ImportErrorDetail: Domain.ANOMALY_DETECTION, Domain.ANOMALY_SEGMENTATION, Domain.ROTATED_DETECTION, + Domain.KEYPOINT_DETECTION, ] @@ -124,8 +125,8 @@ def is_dataset_from_multi_label_classification( """ Check if given dm_dataset is exported from multi-label classification - :param dm_categories: Categories of Datumaro dataset obejct - :param dm_infos: Infos of Datumaro dataset obejct + :param dm_categories: Categories of Datumaro dataset object + :param dm_infos: Infos of Datumaro dataset object :return: True if dm_dataset is exported from multi-label classification project """ project_type = ImportUtils.get_exported_project_type(dm_infos) @@ -149,7 +150,7 @@ def get_exported_project_type(dm_infos: dict[str, Any]) -> GetiProjectType: """ Find the Geti project type where dm_dataset is exported from - :param dm_infos: Infos of Datumaro dataset obejct + :param dm_infos: Infos of Datumaro dataset object :return: Geti project type """ project_task_type: str = dm_infos.get("GetiProjectTask", "NONE") @@ -170,8 +171,8 @@ def is_dataset_from_hierarchical_classification( """ Check if given dm_dataset is exported from hierarchical classification - :param dm_categories: Categories of Datumaro dataset obejct - :param dm_infos: Infos of Datumaro dataset obejct + :param dm_categories: Categories of Datumaro dataset object + :param dm_infos: Infos of Datumaro dataset object :return: True if dm_dataset is exported from hierarchical classification project """ project_type = ImportUtils.get_exported_project_type(dm_infos) @@ -251,7 +252,7 @@ def _validate_annotations( :param item: dataset item :param label_categories: label categories within a dataset :param label_name_to_group: mapping between label names and label group names - :return: number of label, bbox, mask annotations within a item and multi-label flag + :return: number of label, bbox, mask annotations within an item and multi-label flag """ number_of_labels: dict[str, int] = defaultdict(lambda: 0) multi_label_flag: bool = False @@ -272,6 +273,8 @@ def _validate_annotations( dm.AnnotationType.ellipse, ]: number_of_labels["mask"] += 1 + elif ann.type == dm.AnnotationType.points: + number_of_labels["points"] += 1 return number_of_labels, multi_label_flag @@ -318,6 +321,8 @@ def _validate_dataset( reports["items_missing_annotation_det"].add((item.id, item.subset)) if number_of_labels["mask"] == 0: reports["items_missing_annotation_seg"].add((item.id, item.subset)) + if number_of_labels["points"] == 0: + reports["items_missing_annotation_key"].add((item.id, item.subset)) return reports @@ -331,7 +336,7 @@ def collect_validation_warnings( ) -> ImportErrorDetail: """ Collect number of non-actionable errors (warnings) in dataset using different validators - for supported task types in dataset, skip classification since classificaiton errors + for supported task types in dataset, skip classification since classification errors were collected earlier :param dm_dataset: datumaro dataset to validate for warnings @@ -435,8 +440,7 @@ def get_label_to_ann_types( :return: dictionary mapping label names to a set of dm annotation type """ label_cat: dm.LabelCategories = dm_dataset.categories()[dm.AnnotationType.label] - - # label_names = [label_item.name for label_item in label_cat] + label_points: dm.PointsCategories | None = dm_dataset.categories().get(dm.AnnotationType.points, None) label_idx_to_ann_types = defaultdict(set) total = len(dm_dataset) @@ -447,7 +451,10 @@ def get_label_to_ann_types( continue for dm_ann in dm_item.annotations: label_id = getattr(dm_ann, "label", None) - if label_id is not None: + if label_points: + # for keypoint annotations, we force the label_id to be 0 which is the first label in the dm dataset + label_idx_to_ann_types[0].add(dm_ann.type) + elif label_id: label_idx_to_ann_types[label_id].add(dm_ann.type) # TODO: need to check if this is needed. # for dm_attr_key in dm_ann.attributes: @@ -472,7 +479,7 @@ def get_valid_project_labels( # noqa: C901 :param project_type: Geti project type :param dm_infos: infos of datumaro dataset - :param dm_categories: Categories of Datumaro dataset obejct + :param dm_categories: Categories of Datumaro dataset object :param label_to_ann_types: A mapping of label names in dataset to ann_types with which they appear :param selected_labels: label_names that a user selected to include in the created project :param include_all_labels: If True, ignore selected_labels then include all possible labels for each task_type @@ -776,7 +783,7 @@ def get_project_type(project: Project) -> GetiProjectType: @staticmethod def project_type_to_label_domain(project_type: GetiProjectType) -> Domain: """ - Get an label domain of interest by project_type + Get a label domain of interest by project_type :param project_type: project_type :return: label domain related to the project_type @@ -792,7 +799,7 @@ def project_type_to_label_domain(project_type: GetiProjectType) -> Domain: GetiProjectType.ANOMALY_SEGMENTATION: Domain.ANOMALY_SEGMENTATION, GetiProjectType.INSTANCE_SEGMENTATION: Domain.INSTANCE_SEGMENTATION, GetiProjectType.ROTATED_DETECTION: Domain.ROTATED_DETECTION, - # For CHAINED_DETECTION_CLSSIFICATION project, we interest in bbox annotations in dm_dataset + # For CHAINED_DETECTION_CLASSIFICATION project, we interest in bbox annotations in dm_dataset GetiProjectType.CHAINED_DETECTION_CLASSIFICATION: Domain.DETECTION, # For CHAINED_DETECTION_SEGMENTATION project, we interest in bbox, polygon, and ellipse annotations # Note that Domain.SEGMENTATION includes all of them. @@ -839,6 +846,7 @@ def get_validated_task_type(cls, project: Project) -> TaskType: TaskType.ANOMALY_DETECTION, TaskType.ANOMALY_SEGMENTATION, TaskType.ROTATED_DETECTION, + TaskType.KEYPOINT_DETECTION, ] trainable_tasks = project.get_trainable_task_nodes() diff --git a/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/mappers/annotation_scene_mapper.py b/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/mappers/annotation_scene_mapper.py index bb796b1ae..a1addaa41 100644 --- a/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/mappers/annotation_scene_mapper.py +++ b/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/mappers/annotation_scene_mapper.py @@ -1,6 +1,6 @@ # Copyright (C) 2022-2025 Intel Corporation # LIMITED EDGE SOFTWARE DISTRIBUTION LICENSE -"""This module implements annotation scene mapper for conversion between SC AnnotationScene and Datumaro annotations""" +"""This module implements a mapper for conversion between Geti AnnotationScene and Datumaro annotations""" from itertools import chain @@ -13,7 +13,7 @@ class LabelMap: - """Map Datumaro label index (integer) to SC Label""" + """Map Datumaro label index (integer) to Geti Label""" def __init__( self, @@ -35,7 +35,7 @@ def __getitem__(self, key: int) -> Label: return self.label_map[key] def get_dm_label_by_sc_label_id(self, label_id: ID) -> int | None: - """Get Datumaro label id (int) from the SC label id (ID). If cannot find, return None.""" + """Get Datumaro label id (int) from the Geti label id (ID). If cannot find, return None.""" dm_label, _ = self.label_categories.find(str(label_id)) return dm_label @@ -64,13 +64,8 @@ def forward(self, instance: AnnotationScene) -> list[dm.Annotation]: for sc_ann in instance.annotations: if not isinstance(sc_ann.shape, Keypoint): # For task-chain, annotations that only contain labels from other tasks will return None and are removed - if ( - dm_ann := self._forward_ann( - annotation=sc_ann, - width=instance.media_width, - height=instance.media_height, - ) - ) is not None: + dm_ann = self._forward_ann(annotation=sc_ann, width=instance.media_width, height=instance.media_height) + if dm_ann: dm_anns.append(dm_ann) else: sc_keypoint_anns.append(sc_ann) diff --git a/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/mappers/label_mapper.py b/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/mappers/label_mapper.py index a670a95e3..5236f316b 100644 --- a/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/mappers/label_mapper.py +++ b/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/mappers/label_mapper.py @@ -8,6 +8,7 @@ from datumaro.components.annotation import GroupType as DmGroupType from geti_types import ID from iai_core.entities.color import Color +from iai_core.entities.keypoint_structure import KeypointStructure from iai_core.entities.label import Label from iai_core.entities.label_schema import LabelGroupType, LabelSchema @@ -59,8 +60,8 @@ class LabelTreeMapper: color_key = "__color__" @classmethod - def forward( - cls, label_schema: LabelSchema, include_empty: bool = False + def forward( # noqa: C901 + cls, label_schema: LabelSchema, include_empty: bool = False, keypoint_structure: KeypointStructure | None = None ) -> tuple[dm.LabelCategories, dm.PointsCategories]: """Convert SC label list and their tree info to Datumaro LabelCategories""" # labels = label_schema.get_labels(include_empty=include_empty) @@ -78,7 +79,17 @@ def forward( label_cat = dm.LabelCategories() point_cat = dm.PointsCategories() - point_cat.add(label_id=0, labels=[str(label.id_) for label in labels], joints=[]) + if keypoint_structure: + label_id_to_idx = {label.id_: idx for idx, label in enumerate(labels)} + joints = [] + for edge in keypoint_structure._edges: + node_1 = label_id_to_idx[edge.node_1] + node_2 = label_id_to_idx[edge.node_2] + joints.append([node_1, node_2]) + positions = [] + for position in keypoint_structure._positions: + positions.extend([position.x, position.y]) + point_cat.add(label_id=0, labels=[str(label.id_) for label in labels], joints=joints, positions=positions) for label in labels: parent = parent_map.get(label) @@ -125,11 +136,13 @@ class LabelSchemaMapper: """Mapping LabelSchema between Datumaro and SC""" @staticmethod - def forward(label_schema: LabelSchema, include_empty: bool = False) -> DmLabelSchemaInfo: + def forward( + label_schema: LabelSchema, include_empty: bool = False, keypoint_structure: KeypointStructure | None = None + ) -> DmLabelSchemaInfo: """Convert SC LabelSchema to DmLabelSchemaInfo""" label_schema_id = label_schema.id_ - label_cat, point_cat = LabelTreeMapper.forward(label_schema, include_empty) + label_cat, point_cat = LabelTreeMapper.forward(label_schema, include_empty, keypoint_structure) label_cat = LabelGroupsMapper.update(label_cat, label_schema, include_empty) mask_cat = dm.MaskCategories( diff --git a/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/sc_extractor.py b/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/sc_extractor.py index d114efd1c..163ba0f51 100644 --- a/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/sc_extractor.py +++ b/interactive_ai/workflows/geti_domain/common/jobs_common_extras/datumaro_conversion/sc_extractor.py @@ -1,6 +1,6 @@ # Copyright (C) 2022-2025 Intel Corporation # LIMITED EDGE SOFTWARE DISTRIBUTION LICENSE -"""This module implements conversion from SC Dataset to Datumaro dataset.""" +"""This module implements conversion from Geti Dataset to Datumaro dataset.""" import logging import os @@ -25,6 +25,7 @@ from datumaro import LabelCategories as dm_LabelCategories from datumaro import MaskCategories as dm_MaskCategories from datumaro import Points as dm_Points +from datumaro import PointsCategories as dm_PointCategories from datumaro import Polygon as dm_Polygon from datumaro import Video as dm_Video from datumaro import VideoFrame as dm_VideoFrame @@ -43,12 +44,21 @@ from iai_core.entities.dataset_storage import DatasetStorage from iai_core.entities.datasets import Dataset, NullDataset from iai_core.entities.image import Image +from iai_core.entities.keypoint_structure import KeypointStructure from iai_core.entities.label import Domain from iai_core.entities.label_schema import LabelGroupType, LabelSchema from iai_core.entities.shapes import Ellipse, Keypoint, Polygon, Rectangle from iai_core.entities.subset import Subset from iai_core.entities.video import Video, VideoFrame -from iai_core.repos import AnnotationSceneRepo, DatasetRepo, ImageRepo, LabelRepo, VideoAnnotationRangeRepo, VideoRepo +from iai_core.repos import ( + AnnotationSceneRepo, + DatasetRepo, + ImageRepo, + LabelRepo, + ProjectRepo, + VideoAnnotationRangeRepo, + VideoRepo, +) from jobs_common_extras.datumaro_conversion.mappers.annotation_scene_mapper import AnnotationSceneMapper, LabelMap from jobs_common_extras.datumaro_conversion.mappers.dataset_item_mapper import DatasetItemMapper @@ -69,7 +79,7 @@ class ScExtractor(dm_DatasetBase): """ - Represents the SC dataset as a lazy dataset for Datumaro. + Represents the Geti dataset as a lazy dataset for Datumaro. """ VERSION = "1.0" @@ -96,6 +106,7 @@ def __init__( workspace_id=dataset_storage_identifier.workspace_id, project_id=dataset_storage_identifier.project_id, ) + project = ProjectRepo().get_by_id(dataset_storage_identifier.project_id) label_repo = LabelRepo(project_identifier=project_identifier) self._label_id_to_label = {label.id_: label for label in label_repo.get_all()} @@ -109,7 +120,9 @@ def __init__( self._init_infos() self._categories: dm_CategoriesInfo = {} - self._init_categories(sc_dataset=self._dataset, label_schema=label_schema) + self._init_categories( + sc_dataset=self._dataset, label_schema=label_schema, keypoint_structure=project.keypoint_structure + ) self._use_subset = use_subset self._set_name_mapper() @@ -125,16 +138,22 @@ def _init_infos(self) -> None: """ self._infos = {"ScExtractorVersion": ScExtractor.VERSION} - def _init_categories(self, sc_dataset: Dataset, label_schema: LabelSchema) -> None: # noqa: ARG002 + def _init_categories( + self, + sc_dataset: Dataset, # noqa: ARG002 + label_schema: LabelSchema, + keypoint_structure: KeypointStructure | None, + ) -> None: label_cat = dm_LabelCategories() mask_cat = dm_MaskCategories() - sc_labels = label_schema.get_labels(include_empty=False) + point_cat = dm_PointCategories() + geti_labels = label_schema.get_labels(include_empty=False) # Collect label names that are necessary to be exported valid_labelnames: set[str] = set() - valid_labelnames.update([sc_label.name for sc_label in sc_labels]) + valid_labelnames.update([sc_label.name for sc_label in geti_labels]) - for i, sc_label in enumerate(sc_labels): + for i, sc_label in enumerate(geti_labels): if label_schema is not None: sc_parent = label_schema.label_tree.get_parent(sc_label) if sc_parent is not None: @@ -147,8 +166,19 @@ def _init_categories(self, sc_dataset: Dataset, label_schema: LabelSchema) -> No label_cat.add(sc_label.name) mask_cat.colormap[i] = sc_label.color.rgb_tuple + if keypoint_structure: + joints = [] + for edge in keypoint_structure._edges: + node_1 = self._label_id_to_idx[edge.node_1] + 1 + node_2 = self._label_id_to_idx[edge.node_2] + 1 + joints.append([node_1, node_2]) # Joints start at 1 index + positions = [] + for position in keypoint_structure._positions: + positions.extend([position.x, position.y]) + point_cat.add(label_id=0, labels=[label.name for label in geti_labels], joints=joints, positions=positions) + # Old version of datumaro doesn't have 'add_label_group' function - # We thus check the existance of function before the function call + # We thus check the existence of function before the function call if label_schema is not None: label_groups = label_schema.get_groups() for label_group in label_groups: @@ -171,6 +201,7 @@ def _init_categories(self, sc_dataset: Dataset, label_schema: LabelSchema) -> No self._categories = { dm_AnnotationType.label: label_cat, dm_AnnotationType.mask: mask_cat, + dm_AnnotationType.points: point_cat, } self._label_name_to_all_parent = self.get_label_to_all_parents() @@ -251,7 +282,7 @@ def _convert_annotations(self, annotations: list[Annotation], width: int, height ) ) elif isinstance(shape, Rectangle): - # Classification is represented as full boxes in SC + # Classification is represented as full boxes in Geti if Rectangle.is_full_box(shape): # hierarchical classification task may store multiple-labels # e.g.) in "rectangle" -> "square" structure, @@ -300,15 +331,15 @@ def _convert_annotations(self, annotations: list[Annotation], width: int, height else: raise NotImplementedError(f"Unsupported conversion to DM of {sc_ann.__class__.__name__} items") - if keypoints and visibilities: - dm_anns.append( - dm_Points( - points=keypoints, - visibility=visibilities, - label=primary_label_id, - attributes=keypoint_labels, - ) + if keypoints and visibilities: + dm_anns.append( + dm_Points( + points=keypoints, + visibility=visibilities, + label=0, + attributes=keypoint_labels, ) + ) return dm_anns @@ -318,9 +349,9 @@ def _convert_item( video_root: str | None = None, ) -> dm_DatasetItem: """ - Convert SC dataset item to DM dataset item. + Convert Geti dataset item to DM dataset item. - :param sc_item: SC dataset item + :param sc_item: Geti dataset item :param video_root: If this value is None, save the video as individual frame images. Otherwise, save the video in its original format. :return: Datumaro dataset item @@ -396,7 +427,7 @@ class DatasetItemWithFuture: class ScExtractorForFlyteJob(ScExtractor): """ - Represents the SC dataset as a lazy dataset for Datumaro. + Represents the Geti dataset as a lazy dataset for Datumaro. It is used for Flyte job. """ @@ -418,16 +449,20 @@ def __init__( def _set_name_mapper(self): self._name_mapper = IDMapper - def _init_categories(self, sc_dataset: Dataset, label_schema: LabelSchema | None) -> None: + def _init_categories( + self, sc_dataset: Dataset, label_schema: LabelSchema | None, keypoint_structure: KeypointStructure | None = None + ) -> None: if label_schema is None: raise RuntimeError("label_schema=None is not allowed.") - dm_label_schema_info = LabelSchemaMapper.forward(label_schema=label_schema, include_empty=True) + dm_label_schema_info = LabelSchemaMapper.forward( + label_schema=label_schema, include_empty=True, keypoint_structure=keypoint_structure + ) self._categories = { - dm_AnnotationType.label: dm_label_schema_info.label_cat, - dm_AnnotationType.mask: dm_label_schema_info.mask_cat, - dm_AnnotationType.points: dm_label_schema_info.point_cat, + dm_AnnotationType.label: dm_label_schema_info.label_cat, # label names: name, parent, attributes + dm_AnnotationType.mask: dm_label_schema_info.mask_cat, # label colour: rgb + dm_AnnotationType.points: dm_label_schema_info.point_cat, # keypoint label: position, visibility } self.dataset_item_mapper = DatasetItemMapper( @@ -521,7 +556,7 @@ class ProgressConfig(NamedTuple): class ScExtractorFromDatasetStorage(ScExtractor): """ - Represents the SC dataset storage as a lazy dataset for Datumaro. + Represents the Geti dataset storage as a lazy dataset for Datumaro. """ def __init__( @@ -534,10 +569,10 @@ def __init__( progress_config: ProgressConfig | None = None, ) -> None: """ - Represents the SC dataset storage as a lazy dataset for Datumaro. + Represents the Geti dataset storage as a lazy dataset for Datumaro. - :param dataset_storage: SC dataset storage - :param label_schema: SC label schema + :param dataset_storage: Geti dataset storage + :param label_schema: Geti label schema :param use_subset: Whether to set subset name from sc item or not :param include_unannotated: Whether to include unannotated media or not :param video_export_config: If this is given, save video as its original format. @@ -599,7 +634,7 @@ def __iter__(self) -> Iterator[dm_DatasetItem]: for i, identifier in enumerate(self._identifiers): if self._progress_config: # Datumaro's export iterates the dataset several times while exporting. - # This is trickey, but we need to consider the implementation detail of datumaro here. + # This is tricky, but we need to consider the implementation detail of datumaro here. if i == 0: self._iter_count += 1 logger.info( diff --git a/interactive_ai/workflows/geti_domain/common/pyproject.toml b/interactive_ai/workflows/geti_domain/common/pyproject.toml index a971826ed..6f47ec2e4 100644 --- a/interactive_ai/workflows/geti_domain/common/pyproject.toml +++ b/interactive_ai/workflows/geti_domain/common/pyproject.toml @@ -27,12 +27,12 @@ evaluation = [ ] shard-dataset = [ - "datumaro==1.8.0", + "datumaro==1.10.0", ] datumaro-conversion = [ "media-utils", - "datumaro==1.8.0", + "datumaro==1.10.0", ] diff --git a/interactive_ai/workflows/geti_domain/common/tests/fixtures/dataset.py b/interactive_ai/workflows/geti_domain/common/tests/fixtures/dataset.py index 968d8740f..5aea67c45 100644 --- a/interactive_ai/workflows/geti_domain/common/tests/fixtures/dataset.py +++ b/interactive_ai/workflows/geti_domain/common/tests/fixtures/dataset.py @@ -11,6 +11,7 @@ from iai_core.entities.dataset_item import DatasetItem from iai_core.entities.datasets import Dataset from iai_core.entities.image import Image +from iai_core.entities.keypoint_structure import KeypointEdge, KeypointPosition, KeypointStructure from iai_core.entities.label_schema import LabelSchema from iai_core.entities.subset import Subset from iai_core.entities.video import VideoFrame @@ -203,6 +204,15 @@ def fxt_dataset_and_label_schema( request: pytest.FixtureRequest, ) -> tuple[DatasetStorageIdentifier, Dataset, LabelSchema]: model_template_id = request.param + keypoint_structure = None + if model_template_id == "keypoint_detection": + keypoint_structure = KeypointStructure( + edges=[KeypointEdge(node_1=ID("node_1"), node_2=ID("node_2"))], + positions=[ + KeypointPosition(node=ID("node_1"), x=0.123, y=0.123), + KeypointPosition(node=ID("node_2"), x=1, y=1), + ], + ) project, label_schema = generate_random_annotated_project( request, name="__Test dataset entities (1)", @@ -211,13 +221,14 @@ def fxt_dataset_and_label_schema( number_of_images=12, number_of_videos=1, # This is required by OTX training classification task code - # We need to compare `Label` equivalance + # We need to compare `Label` equivalence # `color (rgba)` and `hotkey` are used to the comparison. label_configs=[ {"name": "rectangle", "color": "#00ff00ee", "hotkey": "ctrl-1"}, {"name": "ellipse", "color": "#0000ff99", "hotkey": "ctrl-2"}, {"name": "triangle", "color": "#ff000011", "hotkey": "ctrl-3"}, ], + keypoint_structure=keypoint_structure, ) ( diff --git a/interactive_ai/workflows/geti_domain/common/tests/test_helpers.py b/interactive_ai/workflows/geti_domain/common/tests/test_helpers.py index a7f233d01..50796463b 100644 --- a/interactive_ai/workflows/geti_domain/common/tests/test_helpers.py +++ b/interactive_ai/workflows/geti_domain/common/tests/test_helpers.py @@ -273,6 +273,7 @@ def generate_random_annotated_project( max_size_shape: int = 100, configurable_parameters: HyperParameters[ConfigurableParameters] | None = None, label_configs=None, + keypoint_structure=None, ) -> tuple[Project, LabelSchema]: """ Will create a randomly annotated project in the default workspace with images @@ -295,6 +296,7 @@ def generate_random_annotated_project( :param configurable_parameters: [Optionally] Set the configurable parameters for the newly created task in the project :param label_configs: List of label configuration which is a dictionary of label attributes + :param keypoint_structure: Keypoint structure to assign to the project, only for Keypoint Detection projects :return: Generated project """ @@ -324,6 +326,7 @@ def generate_random_annotated_project( labels=label_configs, model_template_id=model_template_id, configurable_parameters=parameter_data, + keypoint_structure=keypoint_structure, ) dataset_storage = project_input.get_training_dataset_storage() dataset_storage_identifier = dataset_storage.identifier diff --git a/interactive_ai/workflows/geti_domain/common/tests/unit/extras/datumaro_conversion/test_mapper.py b/interactive_ai/workflows/geti_domain/common/tests/unit/extras/datumaro_conversion/test_mapper.py index b24faa4fc..07ea1bb76 100644 --- a/interactive_ai/workflows/geti_domain/common/tests/unit/extras/datumaro_conversion/test_mapper.py +++ b/interactive_ai/workflows/geti_domain/common/tests/unit/extras/datumaro_conversion/test_mapper.py @@ -13,11 +13,12 @@ from iai_core.entities.label import Label from iai_core.entities.label_schema import LabelSchema from iai_core.entities.scored_label import ScoredLabel -from iai_core.entities.shapes import Ellipse, Point, Polygon, Rectangle +from iai_core.entities.shapes import Ellipse, Keypoint, Point, Polygon, Rectangle from jobs_common_extras.datumaro_conversion.mappers.annotation_scene_mapper import AnnotationSceneMapper, LabelMap from jobs_common_extras.datumaro_conversion.mappers.dataset_item_mapper import ( EllipseMapper, + KeypointMapper, LabelMapper, PolygonMapper, RectangleMapper, @@ -120,6 +121,22 @@ def test_forward(self) -> None: assert mapped_polygon == expected_mapped_polygon +class TestKeypointMapper: + def test_forward(self) -> None: + modification_date = datetime.datetime.now(tz=datetime.timezone.utc) + keypoint = Keypoint(x=0.5, y=0.6, is_visible=True, modification_date=modification_date) + expected_mapped_keypoint = { + "type": "KEYPOINT", + "points": [0.5, 0.6], + "modification_date": modification_date.strftime("%Y-%m-%dT%H:%M:%S.%f"), + "visibility": [dm.Points.Visibility.visible], + } + + mapped_keypoint = KeypointMapper.forward(keypoint) + + assert mapped_keypoint == expected_mapped_keypoint + + class TestShapeMapper: @pytest.mark.parametrize( "shape", diff --git a/interactive_ai/workflows/geti_domain/common/uv.lock b/interactive_ai/workflows/geti_domain/common/uv.lock index b7520e7d8..d1ab6ae3f 100644 --- a/interactive_ai/workflows/geti_domain/common/uv.lock +++ b/interactive_ai/workflows/geti_domain/common/uv.lock @@ -551,7 +551,7 @@ wheels = [ [[package]] name = "datumaro" -version = "1.8.0" +version = "1.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -572,6 +572,7 @@ dependencies = [ { name = "ovmsclient" }, { name = "pandas" }, { name = "pillow" }, + { name = "portalocker" }, { name = "protobuf" }, { name = "pyarrow" }, { name = "pycocotools" }, @@ -589,11 +590,11 @@ dependencies = [ { name = "tritonclient", extra = ["all"] }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/12/7c/0d29f21a68e9e1543f87cb78213e5870f499cf28bee850fa519ba491e31a/datumaro-1.8.0.tar.gz", hash = "sha256:2a8ef4bd36f968ded6b6f4f56fe974431cd7bd4d1e3d9f75e447b1c928fa1dbc", size = 564692 } +sdist = { url = "https://files.pythonhosted.org/packages/50/62/a915845b2d650ec2e2dc9f07716872228f898ff4ee7c45a729304295b3b7/datumaro-1.10.0.tar.gz", hash = "sha256:408a07fb4c74a2d832d4493c9c8001283cc0f40b02fdddc26b9e05ffb6ee8477", size = 567726 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/a0/ada181dd1cb3d05f88ef24e6eb39c30e0065cfc27758f2c404fdcb715b9c/datumaro-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74a4f3f5a0acecb331c91a4eac2127bf28fea90ece75fc98a51f81eaa65d8a4a", size = 1137441 }, - { url = "https://files.pythonhosted.org/packages/77/54/e1d97265dfbd6024b5166bda01289a4f630b6bae727d1dcb408b2ae4c1b2/datumaro-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f3fe0af8d503e36479e97a53295571008d38826d2a6483fb3747ae1ff24dde96", size = 1654376 }, - { url = "https://files.pythonhosted.org/packages/d4/25/5ad4d1f77ee40860b8e78265f128c69b4aa28c9011f4d8b12968c91ec342/datumaro-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:673556ebc0090956029a67ef29c0fe76681c4414d2c3a3ffb5890851946c4b0d", size = 954511 }, + { url = "https://files.pythonhosted.org/packages/46/62/3bb463635ebac05b2603416e7c79e56ac6af7c401056246ab448dc09c8f8/datumaro-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7cee5d990cf85c246e33b8dfe21a67298fbb2a8886aa5607d97924087a0b97", size = 1167120 }, + { url = "https://files.pythonhosted.org/packages/fc/a1/3aa0975be54a51480f83a6f7315e2d6dc31b1b446847faa4cc34589db2f8/datumaro-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d2f41647ed54bb09ca306614203ae90f69e65e89e18d7c36ed0f75c69278ee6a", size = 1682767 }, + { url = "https://files.pythonhosted.org/packages/84/46/035835142298a8eacea7bdc19a7b0949a7d5d33a480814666741c39a63ba/datumaro-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:2823e930c1e4ca2c75046354d6214401498eff5f3c27c63d7a62d84958be56d5", size = 981121 }, ] [[package]] @@ -2237,19 +2238,20 @@ wheels = [ [[package]] name = "orjson" -version = "3.10.5" +version = "3.10.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/ba/a506ace6d9e4cb96cb4bed678fddc2605b8befe7fbbbecc309af1364b7c4/orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d", size = 5249974 } +sdist = { url = "https://files.pythonhosted.org/packages/9e/03/821c8197d0515e46ea19439f5c5d5fd9a9889f76800613cfac947b5d7845/orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3", size = 5056450 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/7e/df5ca5a663af11e1b825c7909c31ed7db2dad8aaf0cced81ac03d1dabc2b/orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932", size = 258729 }, - { url = "https://files.pythonhosted.org/packages/7b/6d/7aafbe1f7e3c6e314c34e2201d7a0c09670753446351edbd939aa5efcd4e/orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1", size = 152635 }, - { url = "https://files.pythonhosted.org/packages/88/e3/adfce728f25dd40afaa54f90d193b3cd9892d8b0037fe8af057aa00c0957/orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6", size = 155820 }, - { url = "https://files.pythonhosted.org/packages/12/6c/69cd85db2486a4372525b5174a274ef9bbc0c997c0b74520e1b8d895a69e/orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63", size = 168867 }, - { url = "https://files.pythonhosted.org/packages/6c/4d/313dcbecc7ab7d9d3590dba4be613e3e8b7df821d6ab77a6cb17e58fc7ae/orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c", size = 144984 }, - { url = "https://files.pythonhosted.org/packages/19/2f/66c02f10bdf5989d2ffb6e0b65975ae2eea286547b01115cffb552bd81dd/orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96", size = 176029 }, - { url = "https://files.pythonhosted.org/packages/cf/c7/b150a843e7aa30e3bd5fb5f00377a720ae0aaba4231db636c0726777d2da/orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b", size = 171438 }, - { url = "https://files.pythonhosted.org/packages/28/89/243a52595958f62a0dae47450c8bb1e87945e71a1ac3af88f7f99ca73e16/orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2", size = 144390 }, - { url = "https://files.pythonhosted.org/packages/a5/38/9f6537686f78de066688d6f2629963a645f0cbe6e79375b3f6b6b176f4e7/orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228", size = 141289 }, + { url = "https://files.pythonhosted.org/packages/49/12/60931cf808b9334f26210ab496442f4a7a3d66e29d1cf12e0a01857e756f/orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12", size = 251312 }, + { url = "https://files.pythonhosted.org/packages/fe/0e/efbd0a2d25f8e82b230eb20b6b8424be6dd95b6811b669be9af16234b6db/orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac", size = 148124 }, + { url = "https://files.pythonhosted.org/packages/dd/47/1ddff6e23fe5f4aeaaed996a3cde422b3eaac4558c03751723e106184c68/orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7", size = 147277 }, + { url = "https://files.pythonhosted.org/packages/04/da/d03d72b54bdd60d05de372114abfbd9f05050946895140c6ff5f27ab8f49/orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c", size = 152955 }, + { url = "https://files.pythonhosted.org/packages/7f/7e/ef8522dbba112af6cc52227dcc746dd3447c7d53ea8cea35740239b547ee/orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9", size = 163955 }, + { url = "https://files.pythonhosted.org/packages/b6/bc/fbd345d771a73cacc5b0e774d034cd081590b336754c511f4ead9fdc4cf1/orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91", size = 141896 }, + { url = "https://files.pythonhosted.org/packages/82/0a/1f09c12d15b1e83156b7f3f621561d38650fe5b8f39f38f04a64de1a87fc/orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250", size = 170166 }, + { url = "https://files.pythonhosted.org/packages/a6/d8/eee30caba21a8d6a9df06d2519bb0ecd0adbcd57f2e79d360de5570031cf/orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84", size = 167804 }, + { url = "https://files.pythonhosted.org/packages/44/fe/d1d89d3f15e343511417195f6ccd2bdeb7ebc5a48a882a79ab3bbcdf5fc7/orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175", size = 143010 }, + { url = "https://files.pythonhosted.org/packages/88/8c/0e7b8d5a523927774758ac4ce2de4d8ca5dda569955ba3aeb5e208344eda/orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c", size = 137306 }, ] [[package]] @@ -2338,6 +2340,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, ] +[[package]] +name = "portalocker" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ac/91/8bfe23e1f7f630f2061ef38b5225d9fda9068d6a30fcbc187951e678e630/portalocker-3.1.1.tar.gz", hash = "sha256:ec20f6dda2ad9ce89fa399a5f31f4f1495f515958f0cb7ca6543cef7bb5a749e", size = 43708 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/60/1974cfdd5bb770568ddc6f89f3e0df4cfdd1acffd5a609dff5e95f48c6e2/portalocker-3.1.1-py3-none-any.whl", hash = "sha256:80e984e24de292ff258a5bea0e4f3f778fff84c0ae1275dbaebc4658de4aacb3", size = 19661 }, +] + [[package]] name = "pre-commit" version = "4.2.0" @@ -3350,8 +3364,8 @@ dev = [ [package.metadata] requires-dist = [ { name = "dataclasses-json", specifier = "==0.5.7" }, - { name = "datumaro", marker = "extra == 'datumaro-conversion'", specifier = "==1.8.0" }, - { name = "datumaro", marker = "extra == 'shard-dataset'", specifier = "==1.8.0" }, + { name = "datumaro", marker = "extra == 'datumaro-conversion'", specifier = "==1.10.0" }, + { name = "datumaro", marker = "extra == 'shard-dataset'", specifier = "==1.10.0" }, { name = "flytekit", specifier = "==1.10.2" }, { name = "flytekitplugins-pod", specifier = "==1.2.4" }, { name = "geti-k8s-tools", editable = "../../../../libs/k8s_tools" }, diff --git a/interactive_ai/workflows/geti_domain/dataset_ie/job/utils/datumaro_parser.py b/interactive_ai/workflows/geti_domain/dataset_ie/job/utils/datumaro_parser.py index cfc837402..cf86e6838 100644 --- a/interactive_ai/workflows/geti_domain/dataset_ie/job/utils/datumaro_parser.py +++ b/interactive_ai/workflows/geti_domain/dataset_ie/job/utils/datumaro_parser.py @@ -24,11 +24,12 @@ class DatumaroProjectParser(ProjectParser): """ An implementation of ProjectParser for Datumaro. - ProjectFactory will call the fucntions in the parser to create a project. + ProjectFactory will call the functions in the parser to create a project. - :param dm_dataset: Datumaro dataset instance :param project_name: A name of project to be created :param project_type: Geti project type + :param dm_infos: Infos of datumaro dataset + :param dm_categories: Categories belonging to the datumaro dataset :param label_to_ann_types: A mapping of label names in dataset to ann_types with which they appear :param selected_labels: label_names that a user selected to include in the created project :param color_by_label: Optional, mapping between label names and their color. @@ -53,7 +54,6 @@ def __init__( # noqa: PLR0913 self._project_name: str = project_name self._project_type: GetiProjectType = project_type self._color_by_label = color_by_label if color_by_label else {} - ( self._task_names_ordered, self._task_name_to_task_type, @@ -99,9 +99,9 @@ def _validate(self): Raise an exception if parsed data is not proper to create a project """ - # 'Minimum 2 top-label calssifcation labels' rule + # 'Minimum 2 top-label classification labels' rule # For a classification task, at least 2 labels should be provided. - # Otherwise the task is ill-defined (a classification model that predicts only 1 class makes no sense). + # Otherwise, the task is ill-defined (a classification model that predicts only 1 class makes no sense). # Top-level means at the root level of the hierarchy. for task_name in self.get_tasks_names(): task_type = self.get_task_type_by_name(task_name=task_name) @@ -153,7 +153,6 @@ def _get_label_name_to_label_meta( # noqa: C901 :param label_groups: metadata for label group information each entry has 'name' and 'labels' :param label_name_to_parent: mapping from label_name to parent_name :param label_infos: metadata for label 'color' and 'hotkey' - :return: mapping from label_name to label_meta """ # Store label group name with label_name as key @@ -165,7 +164,7 @@ def _get_label_name_to_label_meta( # noqa: C901 for label_name in label_names: label_name_to_group[label_name] = group_name - # label_infos optionaly has 'color, 'hotkey' meta-data + # label_infos optionally has 'color, 'hotkey' meta-data label_meta: dict[str, dict[str, Any]] = {} for label_info in label_infos: label_name = label_info["name"] @@ -195,14 +194,14 @@ def _extract_labels_metadata_from_dm_dataset( include_all_labels: bool = False, ) -> dict[str, dict[str, dict[str, Any]]]: """ - Parse Datumaro dataset instance to extracte label meta data. - Label meta data includes 'parent', 'group', 'color', and 'hotkey' of labels + Parse Datumaro dataset instance to extract label metadata. + Label metadata includes 'parent', 'group', 'color', and 'hotkey' of labels :param dm_infos: Infos of datumaro dataset. :param dm_categories: Categories of datumaro dataset. :param selected_labels: label_names that a user selected to include in the created project :param include_all_labels: if True, ignore selected_labels then include all possible labels for each task_type - :return: A dictionary stores label_meta with label_name as key + :return: A dictionary storing label_meta with label_name as key """ ( label_groups, @@ -298,7 +297,7 @@ def _process_task_types( """ Assign task_name to task_type - :param trainable_task_types: task node types that are trainable + :param project_type: the Geti project type :return: A list of task_names (used in rest api), A dictionary mapping task_name to task_type """ trainable_task_types = ImportUtils.get_trainable_tasks_of_project_type(project_type=project_type) @@ -402,7 +401,7 @@ def get_keypoint_structure(self) -> dict[str, list]: def _get_label_meta(self, task_name: str, label_name: str) -> dict[str, Any]: """ Get the label_meta of label:label_name in the task:task_name - label_meta optionaly contains 'group', 'parent', 'color', and 'hotkey' + label_meta optionally contains 'group', 'parent', 'color', and 'hotkey' :param task_name: Name of the task node :param label_name: Name of the label @@ -479,7 +478,6 @@ def parse_project_pipeline(self) -> dict[str, Any]: Build the 'pipeline' field of the REST API 'datasets:prepare-for-import' endpoint retrieving the necessary information from a DatumaroProjectParser. - :param project_parser: A parser to extract project meta-data from the dataset :return: dictionary with the following structure. e.g.) { @@ -557,17 +555,6 @@ def parse_project_pipeline(self) -> dict[str, Any]: FeatureFlagProvider.is_enabled(FeatureFlag.FEATURE_FLAG_KEYPOINT_DETECTION) and task_type == TaskType.KEYPOINT_DETECTION ): - # need 'keypoint_structure' field in the task - # "keypoint_structure": [ - # "edges": [ - # {"nodes": [label1, label2]}, - # ..., - # ], - # "positions": [ - # {"label": label1, "x": 0.5, "y": 0.5}, - # ..., - # ] - # ] task["keypoint_structure"] = self.get_keypoint_structure() tasks.append(task) @@ -595,14 +582,19 @@ def get_filtered_supported_project_types( """ Return possible project types based on CVS-105432, excluding cross-mapping projects. - Project mapping is different based on the dataset format. + Project mapping is different based on the dataset format. (CVS-105432) For Geti-exported Datumaro format: - candaidates should be the task type itself (+ cross-mapping projects). + candidates should be the task type itself (+ cross-mapping projects). For public formats(coco, voc, yolo) + Datumaro format not exported from Geti: candidate tasks would be decided based on annotation types. - label -> classification - bbox -> detection - - polygon/ellipse/mask -> segmentation. + - polygon/ellipse/mask -> segmentation + - points -> keypoint detection + + :param dm_infos: datumaro dataset info + :param label_to_ann_types: mapping of labels in dataset to their ann_type + :return: A list of supported Geti project types """ exported_project_type = ImportUtils.get_exported_project_type(dm_infos) if exported_project_type != GetiProjectType.UNKNOWN: @@ -643,23 +635,16 @@ def get_project_metas_with_labels( Get mapping of domains to label names in the dataset. The list of labels for the domain will be filled with the labels which are compatible with the domain. - :param dm_dataset: datumaro dataset + :param dm_infos: datumaro dataset info + :param dm_categories: datumaro dataset categories + :param label_to_ann_types: mapping of labels in dataset to their ann_type :return: A list containing a dictionary for each supported task with their - compatible labels, list of possible domains in dataset based on present - annotation types + compatible labels, list of possible domains in dataset based on present + annotation types """ - - # Project mapping is different based on the dataset format. (CVS-105432) - # For Geti-exported Datumaro format: - # candaidates should be the task type itself + cross-mapping projects. - # For public formats(coco, voc, yolo) + Datumaro format not exported from Geti: - # candidate tasks would be decided based on annotation types. - # - label -> classification - # - bbox -> detection - # - polygon/ellipse/mask -> segmentation. - filtered_supported_project_types = get_filtered_supported_project_types( - dm_infos=dm_infos, label_to_ann_types=label_to_ann_types + dm_infos=dm_infos, + label_to_ann_types=label_to_ann_types, ) # Extract project meta-data that will be utilized when build a REST API response @@ -686,7 +671,7 @@ def get_project_metas_with_labels( f"{ImportUtils.project_type_to_rest_api_string(project_type)}: {e}" ) - # Re-map project_type for hierarchical classifcation task. + # Re-map project_type for hierarchical classification task. # Note that single-label/multi-label/hierarchical classification project has the same project_type in Geti. # But we should distinguish hierarchical classification on REST API for project_meta in project_metas_with_labels: diff --git a/interactive_ai/workflows/geti_domain/dataset_ie/tests/fixtures/datasets.py b/interactive_ai/workflows/geti_domain/dataset_ie/tests/fixtures/datasets.py index 3707586c3..175ef9f72 100644 --- a/interactive_ai/workflows/geti_domain/dataset_ie/tests/fixtures/datasets.py +++ b/interactive_ai/workflows/geti_domain/dataset_ie/tests/fixtures/datasets.py @@ -773,7 +773,7 @@ def get_dataset_info(fxt_dataset_id_str: str) -> DatasetInfo: "left_ankle", # 16 "right_ankle", # 17 } - keypoint_structure = { + keypoint_structure: dict[str, list] = { "edges": [ {"nodes": ["left_shoulder", "left_hip"]}, # [6,12] {"nodes": ["left_ear", "left_shoulder"]}, # [4,6] @@ -795,7 +795,25 @@ def get_dataset_info(fxt_dataset_id_str: str) -> DatasetInfo: {"nodes": ["left_eye", "right_eye"]}, # [2,3] {"nodes": ["left_ankle", "left_knee"]}, # [16,14] ], - "positions": [], # TODO CVS-156570 + "positions": [ + {"label": "nose", "x": 0.1, "y": 0.4}, + {"label": "left_eye", "x": 0.2, "y": 0.5}, + {"label": "right_eye", "x": 0.3, "y": 0.6}, + {"label": "left_ear", "x": 0.4, "y": 0.7}, + {"label": "right_ear", "x": 0.5, "y": 0.8}, + {"label": "left_shoulder", "x": 0.6, "y": 0.9}, + {"label": "right_shoulder", "x": 0.7, "y": 1.0}, + {"label": "left_elbow", "x": 0.8, "y": 0.9}, + {"label": "right_elbow", "x": 0.9, "y": 0.8}, + {"label": "left_wrist", "x": 1.0, "y": 0.7}, + {"label": "right_wrist", "x": 0.9, "y": 0.6}, + {"label": "left_hip", "x": 0.8, "y": 0.5}, + {"label": "right_hip", "x": 0.7, "y": 0.4}, + {"label": "left_knee", "x": 0.6, "y": 0.3}, + {"label": "right_knee", "x": 0.5, "y": 0.2}, + {"label": "left_ankle", "x": 0.4, "y": 0.1}, + {"label": "right_ankle", "x": 0.3, "y": 0.0}, + ], } return DatasetInfo( exported_project_type=GetiProjectType.KEYPOINT_DETECTION, diff --git a/interactive_ai/workflows/geti_domain/dataset_ie/tests/fixtures/projects.py b/interactive_ai/workflows/geti_domain/dataset_ie/tests/fixtures/projects.py index 5d030bd50..69066a970 100644 --- a/interactive_ai/workflows/geti_domain/dataset_ie/tests/fixtures/projects.py +++ b/interactive_ai/workflows/geti_domain/dataset_ie/tests/fixtures/projects.py @@ -9,6 +9,7 @@ from geti_types import ID from iai_core.algorithms import ModelTemplateList from iai_core.entities.color import Color +from iai_core.entities.keypoint_structure import KeypointEdge, KeypointPosition, KeypointStructure from iai_core.entities.label import Domain, Label from iai_core.entities.label_schema import LabelGroup, LabelGroupType, LabelSchema, LabelSchemaView from iai_core.entities.model_template import HyperParameterData, InstantiationType, ModelTemplate, TaskFamily, TaskType @@ -22,6 +23,7 @@ from tests.test_helpers import ( generate_ellipse_shape, generate_images_and_annotation_scenes, + generate_keypoint_shape, generate_polygon_shape, generate_random_video, generate_rectangle_shape, @@ -743,3 +745,46 @@ def fxt_label_schema_classification() -> LabelSchema: ), ], ) + + +@pytest.fixture +def fxt_keypoint_detection_project(request) -> Project: + name = "_test_keypoint_project" + model_template_id = "keypoint_detection" + register_model_template(request, type(None), model_template_id, trainable=True, task_type="KEYPOINT_DETECTION") + + edges = [] + positions = [] + edges.append(KeypointEdge(node_1=ID("rectangle"), node_2=ID("ellipse"))) + edges.append(KeypointEdge(node_1=ID("ellipse"), node_2=ID("triangle"))) + positions.append(KeypointPosition(node=ID("rectangle"), x=0.3, y=0.3)) + positions.append(KeypointPosition(node=ID("ellipse"), x=0.4, y=0.4)) + positions.append(KeypointPosition(node=ID("triangle"), x=0.5, y=0.5)) + keypoint_structure = KeypointStructure(edges=edges, positions=positions) + + project = ProjectFactory.create_project_single_task( + name=name, + description=name, + creator_id="", + labels=[ + {"name": "rectangle", "color": "#00ff00ff"}, + {"name": "ellipse", "color": "#0000ffff"}, + {"name": "triangle", "color": "#ff0000ff"}, + ], + model_template_id=model_template_id, + keypoint_structure=keypoint_structure, + ) + labels = get_project_labels(project) + + generate_images_and_annotation_scenes( + project=project, + num_annotated_images=10, + num_unannotated_images=2, + labels=labels, + shape_generator=generate_keypoint_shape, + ) + + generate_random_video(project=project, video_name="Annotated Video", generate_annotations=True) + generate_random_video(project=project, video_name="Unannotated Video", generate_annotations=False) + + return project diff --git a/interactive_ai/workflows/geti_domain/dataset_ie/tests/integration/test_export_management.py b/interactive_ai/workflows/geti_domain/dataset_ie/tests/integration/test_export_management.py index 0601c9a99..e7769383e 100644 --- a/interactive_ai/workflows/geti_domain/dataset_ie/tests/integration/test_export_management.py +++ b/interactive_ai/workflows/geti_domain/dataset_ie/tests/integration/test_export_management.py @@ -46,6 +46,7 @@ def setup_class(self): ("anom_class", "datumaro"): self._assert_datumaro_export_valid, ("anom_seg", "datumaro"): self._assert_datumaro_export_valid, ("anom_det", "datumaro"): self._assert_datumaro_export_valid, + ("keypoint", "datumaro"): self._assert_datumaro_export_valid, } @patch("jobs_common.tasks.utils.secrets.set_env_vars", new=return_none) @@ -69,6 +70,7 @@ def setup_class(self): ("fxt_annotated_anomaly_cls_project_with_video", "anom_class", "datumaro"), ("fxt_annotated_anomaly_det_project", "anom_det", "datumaro"), ("fxt_annotated_anomaly_seg_project", "anom_seg", "datumaro"), + ("fxt_annotated_keypoint_det_project", "keypoint", "datumaro"), ], ) def test_export_projects( diff --git a/interactive_ai/workflows/geti_domain/dataset_ie/tests/integration/test_parse_dataset_new_project.py b/interactive_ai/workflows/geti_domain/dataset_ie/tests/integration/test_parse_dataset_new_project.py index dc18b8453..b6eeb8e1c 100644 --- a/interactive_ai/workflows/geti_domain/dataset_ie/tests/integration/test_parse_dataset_new_project.py +++ b/interactive_ai/workflows/geti_domain/dataset_ie/tests/integration/test_parse_dataset_new_project.py @@ -283,7 +283,25 @@ def test_parse_dataset_for_import_to_new_project__datumaro_format__keypoint_dete {"nodes": ["left_eye", "right_eye"]}, # [2,3] {"nodes": ["left_ankle", "left_knee"]}, # [16,14] ], - "positions": [], # TODO CVS-156570 + "positions": [ + {"label": "nose", "x": 0.1, "y": 0.4}, + {"label": "left_eye", "x": 0.2, "y": 0.5}, + {"label": "right_eye", "x": 0.3, "y": 0.6}, + {"label": "left_ear", "x": 0.4, "y": 0.7}, + {"label": "right_ear", "x": 0.5, "y": 0.8}, + {"label": "left_shoulder", "x": 0.6, "y": 0.9}, + {"label": "right_shoulder", "x": 0.7, "y": 1.0}, + {"label": "left_elbow", "x": 0.8, "y": 0.9}, + {"label": "right_elbow", "x": 0.9, "y": 0.8}, + {"label": "left_wrist", "x": 1.0, "y": 0.7}, + {"label": "right_wrist", "x": 0.9, "y": 0.6}, + {"label": "left_hip", "x": 0.8, "y": 0.5}, + {"label": "right_hip", "x": 0.7, "y": 0.4}, + {"label": "left_knee", "x": 0.6, "y": 0.3}, + {"label": "right_knee", "x": 0.5, "y": 0.2}, + {"label": "left_ankle", "x": 0.4, "y": 0.1}, + {"label": "right_ankle", "x": 0.3, "y": 0.0}, + ], } if fxt_keypoint_detection: dataset_info = DatasetInfo( diff --git a/interactive_ai/workflows/geti_domain/dataset_ie/tests/test_helpers.py b/interactive_ai/workflows/geti_domain/dataset_ie/tests/test_helpers.py index 6f192a7b2..3b9b7b69d 100644 --- a/interactive_ai/workflows/geti_domain/dataset_ie/tests/test_helpers.py +++ b/interactive_ai/workflows/geti_domain/dataset_ie/tests/test_helpers.py @@ -195,6 +195,15 @@ def generated_rotated_rectangle_shape() -> Polygon: return Polygon(points=points) +def generate_keypoint_shape() -> Keypoint: + """ + Generate keypoint shape + + :return: polygon entity + """ + return Keypoint(x=0.2, y=0.3, is_visible=True) + + def generate_images_and_annotation_scenes( project: Project, num_annotated_images: int, diff --git a/interactive_ai/workflows/geti_domain/dataset_ie/uv.lock b/interactive_ai/workflows/geti_domain/dataset_ie/uv.lock index 897c65613..6ae07e3ea 100644 --- a/interactive_ai/workflows/geti_domain/dataset_ie/uv.lock +++ b/interactive_ai/workflows/geti_domain/dataset_ie/uv.lock @@ -642,7 +642,7 @@ dev = [ [[package]] name = "datumaro" -version = "1.8.0" +version = "1.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -663,6 +663,7 @@ dependencies = [ { name = "ovmsclient" }, { name = "pandas" }, { name = "pillow" }, + { name = "portalocker" }, { name = "protobuf" }, { name = "pyarrow" }, { name = "pycocotools" }, @@ -680,11 +681,11 @@ dependencies = [ { name = "tritonclient", extra = ["all"] }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/12/7c/0d29f21a68e9e1543f87cb78213e5870f499cf28bee850fa519ba491e31a/datumaro-1.8.0.tar.gz", hash = "sha256:2a8ef4bd36f968ded6b6f4f56fe974431cd7bd4d1e3d9f75e447b1c928fa1dbc", size = 564692 } +sdist = { url = "https://files.pythonhosted.org/packages/50/62/a915845b2d650ec2e2dc9f07716872228f898ff4ee7c45a729304295b3b7/datumaro-1.10.0.tar.gz", hash = "sha256:408a07fb4c74a2d832d4493c9c8001283cc0f40b02fdddc26b9e05ffb6ee8477", size = 567726 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/a0/ada181dd1cb3d05f88ef24e6eb39c30e0065cfc27758f2c404fdcb715b9c/datumaro-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74a4f3f5a0acecb331c91a4eac2127bf28fea90ece75fc98a51f81eaa65d8a4a", size = 1137441 }, - { url = "https://files.pythonhosted.org/packages/77/54/e1d97265dfbd6024b5166bda01289a4f630b6bae727d1dcb408b2ae4c1b2/datumaro-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f3fe0af8d503e36479e97a53295571008d38826d2a6483fb3747ae1ff24dde96", size = 1654376 }, - { url = "https://files.pythonhosted.org/packages/d4/25/5ad4d1f77ee40860b8e78265f128c69b4aa28c9011f4d8b12968c91ec342/datumaro-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:673556ebc0090956029a67ef29c0fe76681c4414d2c3a3ffb5890851946c4b0d", size = 954511 }, + { url = "https://files.pythonhosted.org/packages/46/62/3bb463635ebac05b2603416e7c79e56ac6af7c401056246ab448dc09c8f8/datumaro-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7cee5d990cf85c246e33b8dfe21a67298fbb2a8886aa5607d97924087a0b97", size = 1167120 }, + { url = "https://files.pythonhosted.org/packages/fc/a1/3aa0975be54a51480f83a6f7315e2d6dc31b1b446847faa4cc34589db2f8/datumaro-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d2f41647ed54bb09ca306614203ae90f69e65e89e18d7c36ed0f75c69278ee6a", size = 1682767 }, + { url = "https://files.pythonhosted.org/packages/84/46/035835142298a8eacea7bdc19a7b0949a7d5d33a480814666741c39a63ba/datumaro-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:2823e930c1e4ca2c75046354d6214401498eff5f3c27c63d7a62d84958be56d5", size = 981121 }, ] [[package]] @@ -2307,19 +2308,20 @@ wheels = [ [[package]] name = "orjson" -version = "3.10.5" +version = "3.10.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/ba/a506ace6d9e4cb96cb4bed678fddc2605b8befe7fbbbecc309af1364b7c4/orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d", size = 5249974 } +sdist = { url = "https://files.pythonhosted.org/packages/9e/03/821c8197d0515e46ea19439f5c5d5fd9a9889f76800613cfac947b5d7845/orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3", size = 5056450 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/7e/df5ca5a663af11e1b825c7909c31ed7db2dad8aaf0cced81ac03d1dabc2b/orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932", size = 258729 }, - { url = "https://files.pythonhosted.org/packages/7b/6d/7aafbe1f7e3c6e314c34e2201d7a0c09670753446351edbd939aa5efcd4e/orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1", size = 152635 }, - { url = "https://files.pythonhosted.org/packages/88/e3/adfce728f25dd40afaa54f90d193b3cd9892d8b0037fe8af057aa00c0957/orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6", size = 155820 }, - { url = "https://files.pythonhosted.org/packages/12/6c/69cd85db2486a4372525b5174a274ef9bbc0c997c0b74520e1b8d895a69e/orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63", size = 168867 }, - { url = "https://files.pythonhosted.org/packages/6c/4d/313dcbecc7ab7d9d3590dba4be613e3e8b7df821d6ab77a6cb17e58fc7ae/orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c", size = 144984 }, - { url = "https://files.pythonhosted.org/packages/19/2f/66c02f10bdf5989d2ffb6e0b65975ae2eea286547b01115cffb552bd81dd/orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96", size = 176029 }, - { url = "https://files.pythonhosted.org/packages/cf/c7/b150a843e7aa30e3bd5fb5f00377a720ae0aaba4231db636c0726777d2da/orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b", size = 171438 }, - { url = "https://files.pythonhosted.org/packages/28/89/243a52595958f62a0dae47450c8bb1e87945e71a1ac3af88f7f99ca73e16/orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2", size = 144390 }, - { url = "https://files.pythonhosted.org/packages/a5/38/9f6537686f78de066688d6f2629963a645f0cbe6e79375b3f6b6b176f4e7/orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228", size = 141289 }, + { url = "https://files.pythonhosted.org/packages/49/12/60931cf808b9334f26210ab496442f4a7a3d66e29d1cf12e0a01857e756f/orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12", size = 251312 }, + { url = "https://files.pythonhosted.org/packages/fe/0e/efbd0a2d25f8e82b230eb20b6b8424be6dd95b6811b669be9af16234b6db/orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac", size = 148124 }, + { url = "https://files.pythonhosted.org/packages/dd/47/1ddff6e23fe5f4aeaaed996a3cde422b3eaac4558c03751723e106184c68/orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7", size = 147277 }, + { url = "https://files.pythonhosted.org/packages/04/da/d03d72b54bdd60d05de372114abfbd9f05050946895140c6ff5f27ab8f49/orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c", size = 152955 }, + { url = "https://files.pythonhosted.org/packages/7f/7e/ef8522dbba112af6cc52227dcc746dd3447c7d53ea8cea35740239b547ee/orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9", size = 163955 }, + { url = "https://files.pythonhosted.org/packages/b6/bc/fbd345d771a73cacc5b0e774d034cd081590b336754c511f4ead9fdc4cf1/orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91", size = 141896 }, + { url = "https://files.pythonhosted.org/packages/82/0a/1f09c12d15b1e83156b7f3f621561d38650fe5b8f39f38f04a64de1a87fc/orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250", size = 170166 }, + { url = "https://files.pythonhosted.org/packages/a6/d8/eee30caba21a8d6a9df06d2519bb0ecd0adbcd57f2e79d360de5570031cf/orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84", size = 167804 }, + { url = "https://files.pythonhosted.org/packages/44/fe/d1d89d3f15e343511417195f6ccd2bdeb7ebc5a48a882a79ab3bbcdf5fc7/orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175", size = 143010 }, + { url = "https://files.pythonhosted.org/packages/88/8c/0e7b8d5a523927774758ac4ce2de4d8ca5dda569955ba3aeb5e208344eda/orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c", size = 137306 }, ] [[package]] @@ -2408,6 +2410,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, ] +[[package]] +name = "portalocker" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ac/91/8bfe23e1f7f630f2061ef38b5225d9fda9068d6a30fcbc187951e678e630/portalocker-3.1.1.tar.gz", hash = "sha256:ec20f6dda2ad9ce89fa399a5f31f4f1495f515958f0cb7ca6543cef7bb5a749e", size = 43708 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/60/1974cfdd5bb770568ddc6f89f3e0df4cfdd1acffd5a609dff5e95f48c6e2/portalocker-3.1.1-py3-none-any.whl", hash = "sha256:80e984e24de292ff258a5bea0e4f3f778fff84c0ae1275dbaebc4658de4aacb3", size = 19661 }, +] + [[package]] name = "pre-commit" version = "2.15.0" @@ -3364,8 +3378,8 @@ datumaro-conversion = [ [package.metadata] requires-dist = [ { name = "dataclasses-json", specifier = "==0.5.7" }, - { name = "datumaro", marker = "extra == 'datumaro-conversion'", specifier = "==1.8.0" }, - { name = "datumaro", marker = "extra == 'shard-dataset'", specifier = "==1.8.0" }, + { name = "datumaro", marker = "extra == 'datumaro-conversion'", specifier = "==1.10.0" }, + { name = "datumaro", marker = "extra == 'shard-dataset'", specifier = "==1.10.0" }, { name = "flytekit", specifier = "==1.10.2" }, { name = "flytekitplugins-pod", specifier = "==1.2.4" }, { name = "geti-k8s-tools", editable = "../../../../libs/k8s_tools" }, diff --git a/interactive_ai/workflows/geti_domain/model_test/uv.lock b/interactive_ai/workflows/geti_domain/model_test/uv.lock index 5e9803312..17838cc32 100644 --- a/interactive_ai/workflows/geti_domain/model_test/uv.lock +++ b/interactive_ai/workflows/geti_domain/model_test/uv.lock @@ -2627,8 +2627,8 @@ evaluation = [ [package.metadata] requires-dist = [ { name = "dataclasses-json", specifier = "==0.5.7" }, - { name = "datumaro", marker = "extra == 'datumaro-conversion'", specifier = "==1.8.0" }, - { name = "datumaro", marker = "extra == 'shard-dataset'", specifier = "==1.8.0" }, + { name = "datumaro", marker = "extra == 'datumaro-conversion'", specifier = "==1.10.0" }, + { name = "datumaro", marker = "extra == 'shard-dataset'", specifier = "==1.10.0" }, { name = "flytekit", specifier = "==1.10.2" }, { name = "flytekitplugins-pod", specifier = "==1.2.4" }, { name = "geti-k8s-tools", editable = "../../../../libs/k8s_tools" }, diff --git a/interactive_ai/workflows/geti_domain/optimize/uv.lock b/interactive_ai/workflows/geti_domain/optimize/uv.lock index fa4e1e14e..09d87ef2a 100644 --- a/interactive_ai/workflows/geti_domain/optimize/uv.lock +++ b/interactive_ai/workflows/geti_domain/optimize/uv.lock @@ -562,7 +562,7 @@ wheels = [ [[package]] name = "datumaro" -version = "1.8.0" +version = "1.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -583,6 +583,7 @@ dependencies = [ { name = "ovmsclient" }, { name = "pandas" }, { name = "pillow" }, + { name = "portalocker" }, { name = "protobuf" }, { name = "pyarrow" }, { name = "pycocotools" }, @@ -600,11 +601,11 @@ dependencies = [ { name = "tritonclient", extra = ["all"] }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/12/7c/0d29f21a68e9e1543f87cb78213e5870f499cf28bee850fa519ba491e31a/datumaro-1.8.0.tar.gz", hash = "sha256:2a8ef4bd36f968ded6b6f4f56fe974431cd7bd4d1e3d9f75e447b1c928fa1dbc", size = 564692 } +sdist = { url = "https://files.pythonhosted.org/packages/50/62/a915845b2d650ec2e2dc9f07716872228f898ff4ee7c45a729304295b3b7/datumaro-1.10.0.tar.gz", hash = "sha256:408a07fb4c74a2d832d4493c9c8001283cc0f40b02fdddc26b9e05ffb6ee8477", size = 567726 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/a0/ada181dd1cb3d05f88ef24e6eb39c30e0065cfc27758f2c404fdcb715b9c/datumaro-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74a4f3f5a0acecb331c91a4eac2127bf28fea90ece75fc98a51f81eaa65d8a4a", size = 1137441 }, - { url = "https://files.pythonhosted.org/packages/77/54/e1d97265dfbd6024b5166bda01289a4f630b6bae727d1dcb408b2ae4c1b2/datumaro-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f3fe0af8d503e36479e97a53295571008d38826d2a6483fb3747ae1ff24dde96", size = 1654376 }, - { url = "https://files.pythonhosted.org/packages/d4/25/5ad4d1f77ee40860b8e78265f128c69b4aa28c9011f4d8b12968c91ec342/datumaro-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:673556ebc0090956029a67ef29c0fe76681c4414d2c3a3ffb5890851946c4b0d", size = 954511 }, + { url = "https://files.pythonhosted.org/packages/46/62/3bb463635ebac05b2603416e7c79e56ac6af7c401056246ab448dc09c8f8/datumaro-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7cee5d990cf85c246e33b8dfe21a67298fbb2a8886aa5607d97924087a0b97", size = 1167120 }, + { url = "https://files.pythonhosted.org/packages/fc/a1/3aa0975be54a51480f83a6f7315e2d6dc31b1b446847faa4cc34589db2f8/datumaro-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d2f41647ed54bb09ca306614203ae90f69e65e89e18d7c36ed0f75c69278ee6a", size = 1682767 }, + { url = "https://files.pythonhosted.org/packages/84/46/035835142298a8eacea7bdc19a7b0949a7d5d33a480814666741c39a63ba/datumaro-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:2823e930c1e4ca2c75046354d6214401498eff5f3c27c63d7a62d84958be56d5", size = 981121 }, ] [[package]] @@ -2260,19 +2261,20 @@ dev = [ [[package]] name = "orjson" -version = "3.10.5" +version = "3.10.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/ba/a506ace6d9e4cb96cb4bed678fddc2605b8befe7fbbbecc309af1364b7c4/orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d", size = 5249974 } +sdist = { url = "https://files.pythonhosted.org/packages/9e/03/821c8197d0515e46ea19439f5c5d5fd9a9889f76800613cfac947b5d7845/orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3", size = 5056450 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/7e/df5ca5a663af11e1b825c7909c31ed7db2dad8aaf0cced81ac03d1dabc2b/orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932", size = 258729 }, - { url = "https://files.pythonhosted.org/packages/7b/6d/7aafbe1f7e3c6e314c34e2201d7a0c09670753446351edbd939aa5efcd4e/orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1", size = 152635 }, - { url = "https://files.pythonhosted.org/packages/88/e3/adfce728f25dd40afaa54f90d193b3cd9892d8b0037fe8af057aa00c0957/orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6", size = 155820 }, - { url = "https://files.pythonhosted.org/packages/12/6c/69cd85db2486a4372525b5174a274ef9bbc0c997c0b74520e1b8d895a69e/orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63", size = 168867 }, - { url = "https://files.pythonhosted.org/packages/6c/4d/313dcbecc7ab7d9d3590dba4be613e3e8b7df821d6ab77a6cb17e58fc7ae/orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c", size = 144984 }, - { url = "https://files.pythonhosted.org/packages/19/2f/66c02f10bdf5989d2ffb6e0b65975ae2eea286547b01115cffb552bd81dd/orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96", size = 176029 }, - { url = "https://files.pythonhosted.org/packages/cf/c7/b150a843e7aa30e3bd5fb5f00377a720ae0aaba4231db636c0726777d2da/orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b", size = 171438 }, - { url = "https://files.pythonhosted.org/packages/28/89/243a52595958f62a0dae47450c8bb1e87945e71a1ac3af88f7f99ca73e16/orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2", size = 144390 }, - { url = "https://files.pythonhosted.org/packages/a5/38/9f6537686f78de066688d6f2629963a645f0cbe6e79375b3f6b6b176f4e7/orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228", size = 141289 }, + { url = "https://files.pythonhosted.org/packages/49/12/60931cf808b9334f26210ab496442f4a7a3d66e29d1cf12e0a01857e756f/orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12", size = 251312 }, + { url = "https://files.pythonhosted.org/packages/fe/0e/efbd0a2d25f8e82b230eb20b6b8424be6dd95b6811b669be9af16234b6db/orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac", size = 148124 }, + { url = "https://files.pythonhosted.org/packages/dd/47/1ddff6e23fe5f4aeaaed996a3cde422b3eaac4558c03751723e106184c68/orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7", size = 147277 }, + { url = "https://files.pythonhosted.org/packages/04/da/d03d72b54bdd60d05de372114abfbd9f05050946895140c6ff5f27ab8f49/orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c", size = 152955 }, + { url = "https://files.pythonhosted.org/packages/7f/7e/ef8522dbba112af6cc52227dcc746dd3447c7d53ea8cea35740239b547ee/orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9", size = 163955 }, + { url = "https://files.pythonhosted.org/packages/b6/bc/fbd345d771a73cacc5b0e774d034cd081590b336754c511f4ead9fdc4cf1/orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91", size = 141896 }, + { url = "https://files.pythonhosted.org/packages/82/0a/1f09c12d15b1e83156b7f3f621561d38650fe5b8f39f38f04a64de1a87fc/orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250", size = 170166 }, + { url = "https://files.pythonhosted.org/packages/a6/d8/eee30caba21a8d6a9df06d2519bb0ecd0adbcd57f2e79d360de5570031cf/orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84", size = 167804 }, + { url = "https://files.pythonhosted.org/packages/44/fe/d1d89d3f15e343511417195f6ccd2bdeb7ebc5a48a882a79ab3bbcdf5fc7/orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175", size = 143010 }, + { url = "https://files.pythonhosted.org/packages/88/8c/0e7b8d5a523927774758ac4ce2de4d8ca5dda569955ba3aeb5e208344eda/orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c", size = 137306 }, ] [[package]] @@ -2361,6 +2363,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, ] +[[package]] +name = "portalocker" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ac/91/8bfe23e1f7f630f2061ef38b5225d9fda9068d6a30fcbc187951e678e630/portalocker-3.1.1.tar.gz", hash = "sha256:ec20f6dda2ad9ce89fa399a5f31f4f1495f515958f0cb7ca6543cef7bb5a749e", size = 43708 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/60/1974cfdd5bb770568ddc6f89f3e0df4cfdd1acffd5a609dff5e95f48c6e2/portalocker-3.1.1-py3-none-any.whl", hash = "sha256:80e984e24de292ff258a5bea0e4f3f778fff84c0ae1275dbaebc4658de4aacb3", size = 19661 }, +] + [[package]] name = "pre-commit" version = "2.15.0" @@ -3276,8 +3290,8 @@ shard-dataset = [ [package.metadata] requires-dist = [ { name = "dataclasses-json", specifier = "==0.5.7" }, - { name = "datumaro", marker = "extra == 'datumaro-conversion'", specifier = "==1.8.0" }, - { name = "datumaro", marker = "extra == 'shard-dataset'", specifier = "==1.8.0" }, + { name = "datumaro", marker = "extra == 'datumaro-conversion'", specifier = "==1.10.0" }, + { name = "datumaro", marker = "extra == 'shard-dataset'", specifier = "==1.10.0" }, { name = "flytekit", specifier = "==1.10.2" }, { name = "flytekitplugins-pod", specifier = "==1.2.4" }, { name = "geti-k8s-tools", editable = "../../../../libs/k8s_tools" }, diff --git a/interactive_ai/workflows/geti_domain/project_ie/uv.lock b/interactive_ai/workflows/geti_domain/project_ie/uv.lock index 4b466c968..3d64d4f5f 100644 --- a/interactive_ai/workflows/geti_domain/project_ie/uv.lock +++ b/interactive_ai/workflows/geti_domain/project_ie/uv.lock @@ -2578,8 +2578,8 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "dataclasses-json", specifier = "==0.5.7" }, - { name = "datumaro", marker = "extra == 'datumaro-conversion'", specifier = "==1.8.0" }, - { name = "datumaro", marker = "extra == 'shard-dataset'", specifier = "==1.8.0" }, + { name = "datumaro", marker = "extra == 'datumaro-conversion'", specifier = "==1.10.0" }, + { name = "datumaro", marker = "extra == 'shard-dataset'", specifier = "==1.10.0" }, { name = "flytekit", specifier = "==1.10.2" }, { name = "flytekitplugins-pod", specifier = "==1.2.4" }, { name = "geti-k8s-tools", editable = "../../../../libs/k8s_tools" }, diff --git a/interactive_ai/workflows/geti_domain/train/uv.lock b/interactive_ai/workflows/geti_domain/train/uv.lock index 21b487da9..e221dc27a 100644 --- a/interactive_ai/workflows/geti_domain/train/uv.lock +++ b/interactive_ai/workflows/geti_domain/train/uv.lock @@ -562,7 +562,7 @@ wheels = [ [[package]] name = "datumaro" -version = "1.8.0" +version = "1.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -583,6 +583,7 @@ dependencies = [ { name = "ovmsclient" }, { name = "pandas" }, { name = "pillow" }, + { name = "portalocker" }, { name = "protobuf" }, { name = "pyarrow" }, { name = "pycocotools" }, @@ -600,11 +601,11 @@ dependencies = [ { name = "tritonclient", extra = ["all"] }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/12/7c/0d29f21a68e9e1543f87cb78213e5870f499cf28bee850fa519ba491e31a/datumaro-1.8.0.tar.gz", hash = "sha256:2a8ef4bd36f968ded6b6f4f56fe974431cd7bd4d1e3d9f75e447b1c928fa1dbc", size = 564692 } +sdist = { url = "https://files.pythonhosted.org/packages/50/62/a915845b2d650ec2e2dc9f07716872228f898ff4ee7c45a729304295b3b7/datumaro-1.10.0.tar.gz", hash = "sha256:408a07fb4c74a2d832d4493c9c8001283cc0f40b02fdddc26b9e05ffb6ee8477", size = 567726 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/a0/ada181dd1cb3d05f88ef24e6eb39c30e0065cfc27758f2c404fdcb715b9c/datumaro-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74a4f3f5a0acecb331c91a4eac2127bf28fea90ece75fc98a51f81eaa65d8a4a", size = 1137441 }, - { url = "https://files.pythonhosted.org/packages/77/54/e1d97265dfbd6024b5166bda01289a4f630b6bae727d1dcb408b2ae4c1b2/datumaro-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f3fe0af8d503e36479e97a53295571008d38826d2a6483fb3747ae1ff24dde96", size = 1654376 }, - { url = "https://files.pythonhosted.org/packages/d4/25/5ad4d1f77ee40860b8e78265f128c69b4aa28c9011f4d8b12968c91ec342/datumaro-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:673556ebc0090956029a67ef29c0fe76681c4414d2c3a3ffb5890851946c4b0d", size = 954511 }, + { url = "https://files.pythonhosted.org/packages/46/62/3bb463635ebac05b2603416e7c79e56ac6af7c401056246ab448dc09c8f8/datumaro-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7cee5d990cf85c246e33b8dfe21a67298fbb2a8886aa5607d97924087a0b97", size = 1167120 }, + { url = "https://files.pythonhosted.org/packages/fc/a1/3aa0975be54a51480f83a6f7315e2d6dc31b1b446847faa4cc34589db2f8/datumaro-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d2f41647ed54bb09ca306614203ae90f69e65e89e18d7c36ed0f75c69278ee6a", size = 1682767 }, + { url = "https://files.pythonhosted.org/packages/84/46/035835142298a8eacea7bdc19a7b0949a7d5d33a480814666741c39a63ba/datumaro-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:2823e930c1e4ca2c75046354d6214401498eff5f3c27c63d7a62d84958be56d5", size = 981121 }, ] [[package]] @@ -2213,19 +2214,20 @@ wheels = [ [[package]] name = "orjson" -version = "3.10.5" +version = "3.10.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/ba/a506ace6d9e4cb96cb4bed678fddc2605b8befe7fbbbecc309af1364b7c4/orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d", size = 5249974 } +sdist = { url = "https://files.pythonhosted.org/packages/9e/03/821c8197d0515e46ea19439f5c5d5fd9a9889f76800613cfac947b5d7845/orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3", size = 5056450 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/7e/df5ca5a663af11e1b825c7909c31ed7db2dad8aaf0cced81ac03d1dabc2b/orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932", size = 258729 }, - { url = "https://files.pythonhosted.org/packages/7b/6d/7aafbe1f7e3c6e314c34e2201d7a0c09670753446351edbd939aa5efcd4e/orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1", size = 152635 }, - { url = "https://files.pythonhosted.org/packages/88/e3/adfce728f25dd40afaa54f90d193b3cd9892d8b0037fe8af057aa00c0957/orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6", size = 155820 }, - { url = "https://files.pythonhosted.org/packages/12/6c/69cd85db2486a4372525b5174a274ef9bbc0c997c0b74520e1b8d895a69e/orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63", size = 168867 }, - { url = "https://files.pythonhosted.org/packages/6c/4d/313dcbecc7ab7d9d3590dba4be613e3e8b7df821d6ab77a6cb17e58fc7ae/orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c", size = 144984 }, - { url = "https://files.pythonhosted.org/packages/19/2f/66c02f10bdf5989d2ffb6e0b65975ae2eea286547b01115cffb552bd81dd/orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96", size = 176029 }, - { url = "https://files.pythonhosted.org/packages/cf/c7/b150a843e7aa30e3bd5fb5f00377a720ae0aaba4231db636c0726777d2da/orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b", size = 171438 }, - { url = "https://files.pythonhosted.org/packages/28/89/243a52595958f62a0dae47450c8bb1e87945e71a1ac3af88f7f99ca73e16/orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2", size = 144390 }, - { url = "https://files.pythonhosted.org/packages/a5/38/9f6537686f78de066688d6f2629963a645f0cbe6e79375b3f6b6b176f4e7/orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228", size = 141289 }, + { url = "https://files.pythonhosted.org/packages/49/12/60931cf808b9334f26210ab496442f4a7a3d66e29d1cf12e0a01857e756f/orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12", size = 251312 }, + { url = "https://files.pythonhosted.org/packages/fe/0e/efbd0a2d25f8e82b230eb20b6b8424be6dd95b6811b669be9af16234b6db/orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac", size = 148124 }, + { url = "https://files.pythonhosted.org/packages/dd/47/1ddff6e23fe5f4aeaaed996a3cde422b3eaac4558c03751723e106184c68/orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7", size = 147277 }, + { url = "https://files.pythonhosted.org/packages/04/da/d03d72b54bdd60d05de372114abfbd9f05050946895140c6ff5f27ab8f49/orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c", size = 152955 }, + { url = "https://files.pythonhosted.org/packages/7f/7e/ef8522dbba112af6cc52227dcc746dd3447c7d53ea8cea35740239b547ee/orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9", size = 163955 }, + { url = "https://files.pythonhosted.org/packages/b6/bc/fbd345d771a73cacc5b0e774d034cd081590b336754c511f4ead9fdc4cf1/orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91", size = 141896 }, + { url = "https://files.pythonhosted.org/packages/82/0a/1f09c12d15b1e83156b7f3f621561d38650fe5b8f39f38f04a64de1a87fc/orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250", size = 170166 }, + { url = "https://files.pythonhosted.org/packages/a6/d8/eee30caba21a8d6a9df06d2519bb0ecd0adbcd57f2e79d360de5570031cf/orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84", size = 167804 }, + { url = "https://files.pythonhosted.org/packages/44/fe/d1d89d3f15e343511417195f6ccd2bdeb7ebc5a48a882a79ab3bbcdf5fc7/orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175", size = 143010 }, + { url = "https://files.pythonhosted.org/packages/88/8c/0e7b8d5a523927774758ac4ce2de4d8ca5dda569955ba3aeb5e208344eda/orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c", size = 137306 }, ] [[package]] @@ -2314,6 +2316,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, ] +[[package]] +name = "portalocker" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ac/91/8bfe23e1f7f630f2061ef38b5225d9fda9068d6a30fcbc187951e678e630/portalocker-3.1.1.tar.gz", hash = "sha256:ec20f6dda2ad9ce89fa399a5f31f4f1495f515958f0cb7ca6543cef7bb5a749e", size = 43708 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/60/1974cfdd5bb770568ddc6f89f3e0df4cfdd1acffd5a609dff5e95f48c6e2/portalocker-3.1.1-py3-none-any.whl", hash = "sha256:80e984e24de292ff258a5bea0e4f3f778fff84c0ae1275dbaebc4658de4aacb3", size = 19661 }, +] + [[package]] name = "pre-commit" version = "2.15.0" @@ -3285,8 +3299,8 @@ shard-dataset = [ [package.metadata] requires-dist = [ { name = "dataclasses-json", specifier = "==0.5.7" }, - { name = "datumaro", marker = "extra == 'datumaro-conversion'", specifier = "==1.8.0" }, - { name = "datumaro", marker = "extra == 'shard-dataset'", specifier = "==1.8.0" }, + { name = "datumaro", marker = "extra == 'datumaro-conversion'", specifier = "==1.10.0" }, + { name = "datumaro", marker = "extra == 'shard-dataset'", specifier = "==1.10.0" }, { name = "flytekit", specifier = "==1.10.2" }, { name = "flytekitplugins-pod", specifier = "==1.2.4" }, { name = "geti-k8s-tools", editable = "../../../../libs/k8s_tools" },