Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

PoC for emit file reference record #443

1 change: 1 addition & 0 deletions airbyte_cdk/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
AirbyteMessage,
AirbyteProtocol,
AirbyteRecordMessage,
AirbyteRecordMessageFileReference,
AirbyteStateBlob,
AirbyteStateMessage,
AirbyteStateStats,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,7 @@ def on_record(self, record: Record) -> Iterable[AirbyteMessage]:
stream_name=record.stream_name,
data_or_message=record.data,
is_file_transfer_message=record.is_file_transfer_message,
file_reference=record.file_reference,
)
stream = self._stream_name_to_instance[record.stream_name]

Expand Down
23 changes: 6 additions & 17 deletions airbyte_cdk/sources/declarative/concurrent_declarative_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,19 +207,9 @@ def _group_streams(
# these legacy Python streams the way we do low-code streams to determine if they are concurrent compatible,
# so we need to treat them as synchronous

file_uploader = None
if isinstance(declarative_stream, DeclarativeStream):
file_uploader = (
self._constructor.create_component(
model_type=FileUploader,
component_definition=name_to_stream_mapping[declarative_stream.name][
"file_uploader"
],
config=config,
)
if "file_uploader" in name_to_stream_mapping[declarative_stream.name]
else None
)
supports_file_transfer = (
"file_uploader" in name_to_stream_mapping[declarative_stream.name]
)

if (
isinstance(declarative_stream, DeclarativeStream)
Expand Down Expand Up @@ -288,7 +278,6 @@ def _group_streams(
declarative_stream.get_json_schema(),
retriever,
self.message_repository,
file_uploader,
),
stream_slicer=declarative_stream.retriever.stream_slicer,
)
Expand Down Expand Up @@ -319,7 +308,6 @@ def _group_streams(
declarative_stream.get_json_schema(),
retriever,
self.message_repository,
file_uploader,
),
stream_slicer=cursor,
)
Expand All @@ -339,6 +327,7 @@ def _group_streams(
else None,
logger=self.logger,
cursor=cursor,
supports_file_transfer=supports_file_transfer,
)
)
elif (
Expand All @@ -350,7 +339,6 @@ def _group_streams(
declarative_stream.get_json_schema(),
declarative_stream.retriever,
self.message_repository,
file_uploader,
),
declarative_stream.retriever.stream_slicer,
)
Expand All @@ -371,6 +359,7 @@ def _group_streams(
cursor_field=None,
logger=self.logger,
cursor=final_state_cursor,
supports_file_transfer=supports_file_transfer,
)
)
elif (
Expand Down Expand Up @@ -410,7 +399,6 @@ def _group_streams(
declarative_stream.get_json_schema(),
retriever,
self.message_repository,
file_uploader,
),
perpartition_cursor,
)
Expand All @@ -425,6 +413,7 @@ def _group_streams(
cursor_field=perpartition_cursor.cursor_field.cursor_field_key,
logger=self.logger,
cursor=perpartition_cursor,
supports_file_transfer=supports_file_transfer,
)
)
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1449,6 +1449,15 @@ definitions:
anyOf:
- "$ref": "#/definitions/CustomRecordExtractor"
- "$ref": "#/definitions/DpathExtractor"
filename_extractor:
description: Defines the name to store the file. Stream name is automatically added to the file path. File unique ID can be used to avoid overwriting files. Random UUID will be used if the extractor is not provided.
type: string
interpolation_context:
- config
- record
examples:
- "{{ record.id }}/{{ record.file_name }}/"
- "{{ record.id }}_{{ record.file_name }}/"
$parameters:
type: object
additional_properties: true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from airbyte_cdk.sources.declarative.transformations import RecordTransformation
from airbyte_cdk.sources.types import Config, Record, StreamSlice, StreamState
from airbyte_cdk.sources.utils.transform import TypeTransformer
from airbyte_cdk.sources.declarative.retrievers.file_uploader import FileUploader


@dataclass
Expand All @@ -42,6 +43,7 @@ class RecordSelector(HttpSelector):
record_filter: Optional[RecordFilter] = None
transformations: List[RecordTransformation] = field(default_factory=lambda: [])
transform_before_filtering: bool = False
file_uploader: Optional[FileUploader] = None

def __post_init__(self, parameters: Mapping[str, Any]) -> None:
self._parameters = parameters
Expand Down Expand Up @@ -117,7 +119,10 @@ def filter_and_transform(
transformed_filtered_data, schema=records_schema
)
for data in normalized_data:
yield Record(data=data, stream_name=self.name, associated_slice=stream_slice)
record = Record(data=data, stream_name=self.name, associated_slice=stream_slice)
if self.file_uploader:
self.file_uploader.upload(record)
yield record

def _normalize_by_schema(
self, records: Iterable[Mapping[str, Any]], schema: Optional[Mapping[str, Any]]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1989,6 +1989,31 @@ class Config:
parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")


class FileUploader(BaseModel):
type: Literal["FileUploader"]
requester: Union[CustomRequester, HttpRequester] = Field(
...,
description="Requester component that describes how to prepare HTTP requests to send to the source API.",
)
download_target_extractor: Union[CustomRecordExtractor, DpathExtractor] = Field(
...,
description="Responsible for fetching the url where the file is located. This is applied on each records and not on the HTTP response",
)
file_extractor: Optional[Union[CustomRecordExtractor, DpathExtractor]] = Field(
None,
description="Responsible for fetching the content of the file. If not defined, the assumption is that the whole response body is the file content",
)
filename_extractor: Optional[str] = Field(
None,
description="Defines the name to store the file. Stream name is automatically added to the file path. File unique ID can be used to avoid overwriting files. Random UUID will be used if the extractor is not provided.",
examples=[
"{{ record.id }}/{{ record.file_name }}/",
"{{ record.id }}_{{ record.file_name }}/",
],
)
parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")


class DeclarativeStream(BaseModel):
class Config:
extra = Extra.allow
Expand Down Expand Up @@ -2047,6 +2072,11 @@ class Config:
description="Array of state migrations to be applied on the input state",
title="State Migrations",
)
file_uploader: Optional[FileUploader] = Field(
None,
description="(experimental) Describes how to fetch a file",
title="File Uploader",
)
parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")


Expand Down Expand Up @@ -2278,22 +2308,6 @@ class StateDelegatingStream(BaseModel):
parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")


class FileUploader(BaseModel):
type: Literal["FileUploader"]
requester: Union[CustomRequester, HttpRequester] = Field(
...,
description="Requester component that describes how to prepare HTTP requests to send to the source API.",
)
download_target_extractor: Union[CustomRecordExtractor, DpathExtractor] = Field(
...,
description="Responsible for fetching the url where the file is located. This is applied on each records and not on the HTTP response",
)
file_extractor: Optional[Union[CustomRecordExtractor, DpathExtractor]] = Field(
None,
description="Responsible for fetching the content of the file. If not defined, the assumption is that the whole response body is the file content",
)


class SimpleRetriever(BaseModel):
type: Literal["SimpleRetriever"]
record_selector: RecordSelector = Field(
Expand Down Expand Up @@ -2324,11 +2338,6 @@ class SimpleRetriever(BaseModel):
description="PartitionRouter component that describes how to partition the stream, enabling incremental syncs and checkpointing.",
title="Partition Router",
)
file_uploader: Optional[FileUploader] = Field(
None,
description="(experimental) Describes how to fetch a file",
title="File Uploader",
)
decoder: Optional[
Union[
CustomDecoder,
Expand Down Expand Up @@ -2485,6 +2494,7 @@ class DynamicDeclarativeStream(BaseModel):
DeclarativeSource1.update_forward_refs()
DeclarativeSource2.update_forward_refs()
SelectiveAuthenticator.update_forward_refs()
FileUploader.update_forward_refs()
DeclarativeStream.update_forward_refs()
SessionTokenAuthenticator.update_forward_refs()
DynamicSchemaLoader.update_forward_refs()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1755,6 +1755,11 @@ def create_declarative_stream(
transformations.append(
self._create_component_from_model(model=transformation_model, config=config)
)
file_uploader = None
if model.file_uploader:
file_uploader = self._create_component_from_model(
model=model.file_uploader, config=config
)

retriever = self._create_component_from_model(
model=model.retriever,
Expand All @@ -1766,6 +1771,7 @@ def create_declarative_stream(
stop_condition_on_cursor=stop_condition_on_cursor,
client_side_incremental_sync=client_side_incremental_sync,
transformations=transformations,
file_uploader=file_uploader,
incremental_sync=model.incremental_sync,
)
cursor_field = model.incremental_sync.cursor_field if model.incremental_sync else None
Expand Down Expand Up @@ -2607,6 +2613,7 @@ def create_record_selector(
transformations: List[RecordTransformation] | None = None,
decoder: Decoder | None = None,
client_side_incremental_sync: Dict[str, Any] | None = None,
file_uploader: Optional[FileUploader] = None,
**kwargs: Any,
) -> RecordSelector:
extractor = self._create_component_from_model(
Expand Down Expand Up @@ -2644,6 +2651,7 @@ def create_record_selector(
config=config,
record_filter=record_filter,
transformations=transformations or [],
file_uploader=file_uploader,
schema_normalization=schema_normalization,
parameters=model.parameters or {},
transform_before_filtering=transform_before_filtering,
Expand Down Expand Up @@ -2701,6 +2709,7 @@ def create_simple_retriever(
stop_condition_on_cursor: bool = False,
client_side_incremental_sync: Optional[Dict[str, Any]] = None,
transformations: List[RecordTransformation],
file_uploader: Optional[FileUploader] = None,
incremental_sync: Optional[
Union[
IncrementingCountCursorModel, DatetimeBasedCursorModel, CustomIncrementalSyncModel
Expand All @@ -2723,6 +2732,7 @@ def create_simple_retriever(
decoder=decoder,
transformations=transformations,
client_side_incremental_sync=client_side_incremental_sync,
file_uploader=file_uploader,
)
url_base = (
model.requester.url_base
Expand Down Expand Up @@ -3338,7 +3348,13 @@ def create_file_uploader(
name=name,
**kwargs,
)
return FileUploader(requester, download_target_extractor)
return FileUploader(
requester=requester,
download_target_extractor=download_target_extractor,
config=config,
parameters=model.parameters or {},
filename_extractor=model.filename_extractor if model.filename_extractor else None,
)

def create_moving_window_call_rate_policy(
self, model: MovingWindowCallRatePolicyModel, config: Config, **kwargs: Any
Expand Down
77 changes: 61 additions & 16 deletions airbyte_cdk/sources/declarative/retrievers/file_uploader.py
Original file line number Diff line number Diff line change
@@ -1,44 +1,89 @@
#
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
#

import json
import logging
import uuid
from dataclasses import InitVar, dataclass, field
from pathlib import Path
from typing import Optional
from typing import Optional, Mapping, Union, Any

from airbyte_cdk.sources.declarative.interpolation.interpolated_string import (
InterpolatedString,
)
from airbyte_cdk.models import AirbyteRecordMessageFileReference
from airbyte_cdk.sources.declarative.extractors.record_extractor import RecordExtractor
from airbyte_cdk.sources.declarative.partition_routers.substream_partition_router import (
SafeResponse,
)
from airbyte_cdk.sources.declarative.requesters import Requester
from airbyte_cdk.sources.declarative.types import Record, StreamSlice
from airbyte_cdk.sources.types import Config
from airbyte_cdk.sources.utils.files_directory import get_files_directory

logger = logging.getLogger("airbyte")


@dataclass
class FileUploader:
def __init__(
self,
requester: Requester,
download_target_extractor: RecordExtractor,
content_extractor: Optional[RecordExtractor] = None,
) -> None:
self._requester = requester
self._download_target_extractor = download_target_extractor
self._content_extractor = content_extractor
requester: Requester
download_target_extractor: RecordExtractor
config: Config
parameters: InitVar[Mapping[str, Any]]

filename_extractor: Optional[Union[InterpolatedString, str]] = None
content_extractor: Optional[RecordExtractor] = None

def __post_init__(self, parameters: Mapping[str, Any]) -> None:
if self.filename_extractor:
self.filename_extractor = InterpolatedString.create(
self.filename_extractor,
parameters=parameters,
)

def upload(self, record: Record) -> None:
# TODO validate record shape - is the transformation applied at this point?
mocked_response = SafeResponse()
mocked_response.content = json.dumps(record.data)
download_target = list(self._download_target_extractor.extract_records(mocked_response))[0]
mocked_response.content = json.dumps(record.data).encode("utf-8")
download_target = list(self.download_target_extractor.extract_records(mocked_response))[0]
if not isinstance(download_target, str):
raise ValueError(
f"download_target is expected to be a str but was {type(download_target)}: {download_target}"
)

response = self._requester.send_request(
response = self.requester.send_request(
stream_slice=StreamSlice(
partition={}, cursor_slice={}, extra_fields={"download_target": download_target}
),
)

if self._content_extractor:
if self.content_extractor:
raise NotImplementedError("TODO")
else:
with open(str(Path(__file__).parent / record.data["file_name"]), "ab") as f:
files_directory = Path(get_files_directory())

file_name = (
self.filename_extractor.eval(self.config, record=record)
if self.filename_extractor
else str(uuid.uuid4())
)
file_name = file_name.lstrip("/")
file_relative_path = Path(record.stream_name) / Path(file_name)

full_path = files_directory / file_relative_path
full_path.parent.mkdir(parents=True, exist_ok=True)

with open(str(full_path), "wb") as f:
f.write(response.content)
file_size_bytes = full_path.stat().st_size

logger.info("File uploaded successfully")
logger.info(f"File url: {str(full_path)}")
logger.info(f"File size: {file_size_bytes / 1024} KB")
logger.info(f"File relative path: {str(file_relative_path)}")

record.file_reference = AirbyteRecordMessageFileReference(
file_url=str(full_path),
file_relative_path=str(file_relative_path),
file_size_bytes=file_size_bytes,
)
Loading
Loading