fix(postgres-cache): Store timestamps with timezone #805
5 fail, 16 skipped, 347 pass in 18m 30s
368 tests 347 ✅ 18m 30s ⏱️
1 suites 16 💤
1 files 5 ❌
Results for commit 26abdd9.
Annotations
Check warning on line 0 in tests.integration_tests.test_all_cache_types
github-actions / PyTest Results (Full)
test_faker_read[BigQuery] (tests.integration_tests.test_all_cache_types) failed
build/test-results/test-results.xml [took 13s]
Raw output
sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd1dc71e260> can't render element of type JSON (Background on this error at: https://sqlalche.me/e/20/l7de)
self = JSON()
visitor = <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd1dc71e260>
kw = {}
def _compiler_dispatch(
self: Visitable, visitor: Any, **kw: Any
) -> str:
"""Look for an attribute named "visit_<visit_name>" on the
visitor, and call it with the same kw params.
"""
try:
> meth = getter(visitor)
E AttributeError: 'BigQueryTypeCompiler' object has no attribute 'visit_JSON'. Did you mean: 'visit_BLOB'?
.venv/lib/python3.10/site-packages/sqlalchemy/sql/visitors.py:134: AttributeError
The above exception was the direct cause of the following exception:
source_faker_seed_a = <airbyte.sources.base.Source object at 0x7fd22cfae920>
new_generic_cache = BigQueryCache(schema_name='test_deleteme_01k6jtesr', table_prefix='', cache_dir=PosixPath('/home/runner/work/PyAirbyte...nup=True, database_name='dataline-integration-testing', credentials_path='/tmp/tmp1mciox8i.txt', dataset_location='US')
tracer = <viztracer.viztracer.VizTracer object at 0x7fd1fc348c40>
@pytest.mark.requires_creds
@pytest.mark.slow
def test_faker_read(
source_faker_seed_a: ab.Source,
new_generic_cache: ab.caches.CacheBase,
tracer: VizTracer,
) -> None:
"""Test that the append strategy works as expected."""
with tracer:
> read_result = source_faker_seed_a.read(
new_generic_cache, write_strategy="replace", force_full_refresh=True
)
tests/integration_tests/test_all_cache_types.py:144:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
airbyte/sources/base.py:869: in read
result = self._read_to_cache(
airbyte/sources/base.py:953: in _read_to_cache
cache._write_airbyte_message_stream( # noqa: SLF001 # Non-public API
airbyte/caches/base.py:457: in _write_airbyte_message_stream
cache_processor.process_airbyte_messages(
airbyte/shared/sql_processor.py:331: in process_airbyte_messages
self._write_all_stream_data(
airbyte/shared/sql_processor.py:345: in _write_all_stream_data
self.write_stream_data(
airbyte/shared/sql_processor.py:750: in write_stream_data
final_table_name = self._ensure_final_table_exists(
airbyte/shared/sql_processor.py:640: in _ensure_final_table_exists
column_definition_str = ",\n ".join(
airbyte/shared/sql_processor.py:641: in <genexpr>
f"{self._quote_identifier(column_name)} {sql_type.compile(engine.dialect)}"
.venv/lib/python3.10/site-packages/sqlalchemy/sql/type_api.py:1092: in compile
return dialect.type_compiler_instance.process(self)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/compiler.py:977: in process
return type_._compiler_dispatch(self, **kw)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/visitors.py:136: in _compiler_dispatch
return visitor.visit_unsupported_compilation(self, err, **kw) # type: ignore # noqa: E501
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd1dc71e260>
element = JSON()
err = AttributeError("'BigQueryTypeCompiler' object has no attribute 'visit_JSON'")
kw = {}
def visit_unsupported_compilation(
self, element: Any, err: Exception, **kw: Any
) -> NoReturn:
> raise exc.UnsupportedCompilationError(self, element) from err
E sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd1dc71e260> can't render element of type JSON (Background on this error at: https://sqlalche.me/e/20/l7de)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/compiler.py:982: UnsupportedCompilationError
Check warning on line 0 in tests.integration_tests.test_all_cache_types
github-actions / PyTest Results (Full)
test_replace_strategy[BigQuery] (tests.integration_tests.test_all_cache_types) failed
build/test-results/test-results.xml [took 13s]
Raw output
sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd1a8da3dc0> can't render element of type JSON (Background on this error at: https://sqlalche.me/e/20/l7de)
self = JSON()
visitor = <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd1a8da3dc0>
kw = {}
def _compiler_dispatch(
self: Visitable, visitor: Any, **kw: Any
) -> str:
"""Look for an attribute named "visit_<visit_name>" on the
visitor, and call it with the same kw params.
"""
try:
> meth = getter(visitor)
E AttributeError: 'BigQueryTypeCompiler' object has no attribute 'visit_JSON'. Did you mean: 'visit_BLOB'?
.venv/lib/python3.10/site-packages/sqlalchemy/sql/visitors.py:134: AttributeError
The above exception was the direct cause of the following exception:
source_faker_seed_a = <airbyte.sources.base.Source object at 0x7fd1a8da3160>
new_generic_cache = BigQueryCache(schema_name='test_deleteme_01k6jtnnw', table_prefix='', cache_dir=PosixPath('/home/runner/work/PyAirbyte...nup=True, database_name='dataline-integration-testing', credentials_path='/tmp/tmpppqqds3j.txt', dataset_location='US')
@pytest.mark.requires_creds
@pytest.mark.slow
def test_replace_strategy(
source_faker_seed_a: ab.Source,
new_generic_cache: ab.caches.CacheBase,
) -> None:
"""Test that the append strategy works as expected."""
result: ReadResult
for _ in range(2):
> result = source_faker_seed_a.read(
new_generic_cache, write_strategy="replace", force_full_refresh=True
)
tests/integration_tests/test_all_cache_types.py:201:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
airbyte/sources/base.py:869: in read
result = self._read_to_cache(
airbyte/sources/base.py:953: in _read_to_cache
cache._write_airbyte_message_stream( # noqa: SLF001 # Non-public API
airbyte/caches/base.py:457: in _write_airbyte_message_stream
cache_processor.process_airbyte_messages(
airbyte/shared/sql_processor.py:331: in process_airbyte_messages
self._write_all_stream_data(
airbyte/shared/sql_processor.py:345: in _write_all_stream_data
self.write_stream_data(
airbyte/shared/sql_processor.py:750: in write_stream_data
final_table_name = self._ensure_final_table_exists(
airbyte/shared/sql_processor.py:640: in _ensure_final_table_exists
column_definition_str = ",\n ".join(
airbyte/shared/sql_processor.py:641: in <genexpr>
f"{self._quote_identifier(column_name)} {sql_type.compile(engine.dialect)}"
.venv/lib/python3.10/site-packages/sqlalchemy/sql/type_api.py:1092: in compile
return dialect.type_compiler_instance.process(self)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/compiler.py:977: in process
return type_._compiler_dispatch(self, **kw)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/visitors.py:136: in _compiler_dispatch
return visitor.visit_unsupported_compilation(self, err, **kw) # type: ignore # noqa: E501
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd1a8da3dc0>
element = JSON()
err = AttributeError("'BigQueryTypeCompiler' object has no attribute 'visit_JSON'")
kw = {}
def visit_unsupported_compilation(
self, element: Any, err: Exception, **kw: Any
) -> NoReturn:
> raise exc.UnsupportedCompilationError(self, element) from err
E sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd1a8da3dc0> can't render element of type JSON (Background on this error at: https://sqlalche.me/e/20/l7de)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/compiler.py:982: UnsupportedCompilationError
Check warning on line 0 in tests.integration_tests.test_all_cache_types
github-actions / PyTest Results (Full)
test_cache_create_source_tables[BigQuery] (tests.integration_tests.test_all_cache_types) failed
build/test-results/test-results.xml [took 9s]
Raw output
sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd22e487610> can't render element of type JSON (Background on this error at: https://sqlalche.me/e/20/l7de)
self = JSON()
visitor = <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd22e487610>
kw = {}
def _compiler_dispatch(
self: Visitable, visitor: Any, **kw: Any
) -> str:
"""Look for an attribute named "visit_<visit_name>" on the
visitor, and call it with the same kw params.
"""
try:
> meth = getter(visitor)
E AttributeError: 'BigQueryTypeCompiler' object has no attribute 'visit_JSON'. Did you mean: 'visit_BLOB'?
.venv/lib/python3.10/site-packages/sqlalchemy/sql/visitors.py:134: AttributeError
The above exception was the direct cause of the following exception:
source_faker_seed_a = <airbyte.sources.base.Source object at 0x7fd22cfe2ce0>
new_generic_cache = BigQueryCache(schema_name='test_deleteme_01k6jtyv3', table_prefix='', cache_dir=PosixPath('/home/runner/work/PyAirbyte...nup=True, database_name='dataline-integration-testing', credentials_path='/tmp/tmpe23_0r0f.txt', dataset_location='US')
@pytest.mark.requires_creds
@pytest.mark.slow
def test_cache_create_source_tables(
source_faker_seed_a: ab.Source,
new_generic_cache: ab.caches.CacheBase,
) -> None:
"""Test that the cache creation and source tables work as expected."""
> new_generic_cache.create_source_tables(source_faker_seed_a)
tests/integration_tests/test_all_cache_types.py:214:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
airbyte/caches/base.py:423: in create_source_tables
self.processor._ensure_final_table_exists( # noqa: SLF001
airbyte/shared/sql_processor.py:640: in _ensure_final_table_exists
column_definition_str = ",\n ".join(
airbyte/shared/sql_processor.py:641: in <genexpr>
f"{self._quote_identifier(column_name)} {sql_type.compile(engine.dialect)}"
.venv/lib/python3.10/site-packages/sqlalchemy/sql/type_api.py:1092: in compile
return dialect.type_compiler_instance.process(self)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/compiler.py:977: in process
return type_._compiler_dispatch(self, **kw)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/visitors.py:136: in _compiler_dispatch
return visitor.visit_unsupported_compilation(self, err, **kw) # type: ignore # noqa: E501
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd22e487610>
element = JSON()
err = AttributeError("'BigQueryTypeCompiler' object has no attribute 'visit_JSON'")
kw = {}
def visit_unsupported_compilation(
self, element: Any, err: Exception, **kw: Any
) -> NoReturn:
> raise exc.UnsupportedCompilationError(self, element) from err
E sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd22e487610> can't render element of type JSON (Background on this error at: https://sqlalche.me/e/20/l7de)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/compiler.py:982: UnsupportedCompilationError
Check warning on line 0 in tests.integration_tests.test_all_cache_types
github-actions / PyTest Results (Full)
test_merge_strategy[BigQuery] (tests.integration_tests.test_all_cache_types) failed
build/test-results/test-results.xml [took 18s]
Raw output
sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd22e7b8610> can't render element of type JSON (Background on this error at: https://sqlalche.me/e/20/l7de)
self = JSON()
visitor = <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd22e7b8610>
kw = {}
def _compiler_dispatch(
self: Visitable, visitor: Any, **kw: Any
) -> str:
"""Look for an attribute named "visit_<visit_name>" on the
visitor, and call it with the same kw params.
"""
try:
> meth = getter(visitor)
E AttributeError: 'BigQueryTypeCompiler' object has no attribute 'visit_JSON'. Did you mean: 'visit_BLOB'?
.venv/lib/python3.10/site-packages/sqlalchemy/sql/visitors.py:134: AttributeError
The above exception was the direct cause of the following exception:
source_faker_seed_a = <airbyte.sources.base.Source object at 0x7fd22d09b850>
source_faker_seed_b = <airbyte.sources.base.Source object at 0x7fd22e7b9180>
new_generic_cache = BigQueryCache(schema_name='test_deleteme_01k6jtaa8', table_prefix='', cache_dir=PosixPath('/home/runner/work/PyAirbyte...nup=True, database_name='dataline-integration-testing', credentials_path='/tmp/tmpapkcbz6_.txt', dataset_location='US')
@pytest.mark.requires_creds
@pytest.mark.slow
def test_merge_strategy(
source_faker_seed_a: ab.Source,
source_faker_seed_b: ab.Source,
new_generic_cache: ab.caches.CacheBase,
) -> None:
"""Test that the merge strategy works as expected.
Since all streams have primary keys, we should expect the auto strategy to be identical to the
merge strategy.
"""
assert new_generic_cache, "Cache should not be None."
# First run, seed A (counts should match the scale or the product count)
> result = source_faker_seed_a.read(new_generic_cache, write_strategy="merge")
tests/integration_tests/test_all_cache_types.py:236:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
airbyte/sources/base.py:869: in read
result = self._read_to_cache(
airbyte/sources/base.py:953: in _read_to_cache
cache._write_airbyte_message_stream( # noqa: SLF001 # Non-public API
airbyte/caches/base.py:457: in _write_airbyte_message_stream
cache_processor.process_airbyte_messages(
airbyte/shared/sql_processor.py:331: in process_airbyte_messages
self._write_all_stream_data(
airbyte/shared/sql_processor.py:345: in _write_all_stream_data
self.write_stream_data(
airbyte/shared/sql_processor.py:750: in write_stream_data
final_table_name = self._ensure_final_table_exists(
airbyte/shared/sql_processor.py:640: in _ensure_final_table_exists
column_definition_str = ",\n ".join(
airbyte/shared/sql_processor.py:641: in <genexpr>
f"{self._quote_identifier(column_name)} {sql_type.compile(engine.dialect)}"
.venv/lib/python3.10/site-packages/sqlalchemy/sql/type_api.py:1092: in compile
return dialect.type_compiler_instance.process(self)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/compiler.py:977: in process
return type_._compiler_dispatch(self, **kw)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/visitors.py:136: in _compiler_dispatch
return visitor.visit_unsupported_compilation(self, err, **kw) # type: ignore # noqa: E501
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd22e7b8610>
element = JSON()
err = AttributeError("'BigQueryTypeCompiler' object has no attribute 'visit_JSON'")
kw = {}
def visit_unsupported_compilation(
self, element: Any, err: Exception, **kw: Any
) -> NoReturn:
> raise exc.UnsupportedCompilationError(self, element) from err
E sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd22e7b8610> can't render element of type JSON (Background on this error at: https://sqlalche.me/e/20/l7de)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/compiler.py:982: UnsupportedCompilationError
Check warning on line 0 in tests.integration_tests.test_all_cache_types
github-actions / PyTest Results (Full)
test_auto_add_columns[BigQuery] (tests.integration_tests.test_all_cache_types) failed
build/test-results/test-results.xml [took 15s]
Raw output
sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd22cf6d3f0> can't render element of type JSON (Background on this error at: https://sqlalche.me/e/20/l7de)
self = JSON()
visitor = <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd22cf6d3f0>
kw = {}
def _compiler_dispatch(
self: Visitable, visitor: Any, **kw: Any
) -> str:
"""Look for an attribute named "visit_<visit_name>" on the
visitor, and call it with the same kw params.
"""
try:
> meth = getter(visitor)
E AttributeError: 'BigQueryTypeCompiler' object has no attribute 'visit_JSON'. Did you mean: 'visit_BLOB'?
.venv/lib/python3.10/site-packages/sqlalchemy/sql/visitors.py:134: AttributeError
The above exception was the direct cause of the following exception:
source_faker_seed_a = <airbyte.sources.base.Source object at 0x7fd22e75dcc0>
new_generic_cache = BigQueryCache(schema_name='test_deleteme_01k6jtd7s', table_prefix='', cache_dir=PosixPath('/home/runner/work/PyAirbyte...nup=True, database_name='dataline-integration-testing', credentials_path='/tmp/tmp42a1dbwb.txt', dataset_location='US')
@pytest.mark.requires_creds
@pytest.mark.slow
def test_auto_add_columns(
source_faker_seed_a: ab.Source,
new_generic_cache: ab.caches.CacheBase,
) -> None:
"""Test that the auto-add columns works as expected."""
# Start with a normal read.
> result = source_faker_seed_a.read(
new_generic_cache,
write_strategy="auto",
)
tests/integration_tests/test_all_cache_types.py:264:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
airbyte/sources/base.py:869: in read
result = self._read_to_cache(
airbyte/sources/base.py:953: in _read_to_cache
cache._write_airbyte_message_stream( # noqa: SLF001 # Non-public API
airbyte/caches/base.py:457: in _write_airbyte_message_stream
cache_processor.process_airbyte_messages(
airbyte/shared/sql_processor.py:331: in process_airbyte_messages
self._write_all_stream_data(
airbyte/shared/sql_processor.py:345: in _write_all_stream_data
self.write_stream_data(
airbyte/shared/sql_processor.py:750: in write_stream_data
final_table_name = self._ensure_final_table_exists(
airbyte/shared/sql_processor.py:640: in _ensure_final_table_exists
column_definition_str = ",\n ".join(
airbyte/shared/sql_processor.py:641: in <genexpr>
f"{self._quote_identifier(column_name)} {sql_type.compile(engine.dialect)}"
.venv/lib/python3.10/site-packages/sqlalchemy/sql/type_api.py:1092: in compile
return dialect.type_compiler_instance.process(self)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/compiler.py:977: in process
return type_._compiler_dispatch(self, **kw)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/visitors.py:136: in _compiler_dispatch
return visitor.visit_unsupported_compilation(self, err, **kw) # type: ignore # noqa: E501
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd22cf6d3f0>
element = JSON()
err = AttributeError("'BigQueryTypeCompiler' object has no attribute 'visit_JSON'")
kw = {}
def visit_unsupported_compilation(
self, element: Any, err: Exception, **kw: Any
) -> NoReturn:
> raise exc.UnsupportedCompilationError(self, element) from err
E sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy_bigquery.base.BigQueryTypeCompiler object at 0x7fd22cf6d3f0> can't render element of type JSON (Background on this error at: https://sqlalche.me/e/20/l7de)
.venv/lib/python3.10/site-packages/sqlalchemy/sql/compiler.py:982: UnsupportedCompilationError