Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 13 additions & 4 deletions tap_quickbooks/quickbooks/reportstreams/BaseReport.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,21 @@ def concurrent_get(self, report_entity, params):
else:
return response

def _get_column_metadata(self, resp):
def _get_column_metadata(self, resp, schema=None):
columns = []
for column in resp.get("Columns").get("Column"):
if column.get("ColTitle") == "Memo/Description":
columns.append("Memo")
# To handle multiple languages if schema is passed, always convert Col Titles to english
if schema is not None:
col_type = column["MetaData"][0].get("Value") if column.get("MetaData") else None
if not col_type:
LOGGER.info(f"Metadata for col {column.get('ColTitle')} not found, skipping.")
continue
# append col to columns
columns.append(schema.get(col_type))
else:
columns.append(column.get("ColTitle").replace(" ", ""))
if column.get("ColTitle") == "Memo/Description":
columns.append("Memo")
else:
columns.append(column.get("ColTitle").replace(" ", ""))
columns.append("Categories")
return columns
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import logging
import concurrent.futures
from calendar import monthrange
from tap_quickbooks.util import read_json_file


LOGGER = singer.get_logger()
Expand All @@ -19,6 +20,7 @@ class GeneralLedgerReport(BaseReportStream):
replication_method: ClassVar[str] = "FULL_TABLE"
gl_weekly = False
gl_daily = False
schema_file = "quickbooks/reportstreams/english_schemas/GeneralLedgerReportFields.json"


def _recursive_row_search(self, row, output, categories):
Expand Down Expand Up @@ -144,6 +146,9 @@ def sync(self, catalog_entry):
"columns": ",".join(cols),
}

# read col types from GeneralLedgerReportFields.json
eng_schema = read_json_file(self.schema_file)

if full_sync or self.qb.gl_full_sync:
LOGGER.info(f"Starting full sync of GeneralLedgerReport")
start_date = self.start_date
Expand Down Expand Up @@ -220,7 +225,7 @@ def sync(self, catalog_entry):
self.gl_daily = False

# Get column metadata.
columns = self._get_column_metadata(r)
columns = self._get_column_metadata(r, eng_schema)

# Recursively get row data.
row_group = r.get("Rows")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
{
"tx_date": "Date",
"txn_type": "TransactionType",
"doc_num": "Num",
"is_adj": "Adj",
"create_date": "CreateDate",
"create_by": "CreatedBy",
"last_mod_date": "LastModified",
"last_mod_by": "LastModifiedBy",
"name": "Name",
"cust_name": "Customer",
"emp_name": "Employee",
"item_name": "Product/Service",
"memo": "Memo",
"quantity": "Qty",
"rate": "Rate",
"account_name": "Account",
"split_acc": "Split",
"inv_date": "Invoice Date",
"is_ar_paid": "A/RPaid",
"is_ap_paid": "A/PPaid",
"is_cleared": "Clr",
"chk_print_state": "CheckPrinted",
"debt_home_amt": "Debit",
"credit_home_amt": "Credit",
"nat_home_open_bal": "OpenBalance",
"subt_nat_home_amount": "Amount",
"rbal_nat_home_amount": "Balance",
"exch_rate": "ExchangeRate",
"currency": "Currency",
"home_tax_amount": "TaxAmount",
"home_net_amount": "TaxableAmount",
"nat_foreign_open_bal": "ForeignOpenBalance",
"credit_amt": "ForeignCredit",
"nat_foreign_amount": "ForeignAmount",
"vend_name": "Vendor",
"dept_name": "Department",
"klass_name": "Class",
"account_num": "Account#",
"debt_amt": "ForeignDebit"
}
12 changes: 12 additions & 0 deletions tap_quickbooks/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
_log_thread: Optional[threading.Thread] = None
_stop_event = threading.Event()

__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))

def _log_writer():
"""Background thread that writes logs to file."""
LOG_FILE_PATH.parent.mkdir(parents=True, exist_ok=True)
Expand Down Expand Up @@ -69,3 +71,13 @@ def cleanup():
_log_queue.join()
if _log_thread:
_log_thread.join(timeout=2)

def read_json_file(filename):
# read file
with open(os.path.join(__location__, f"{filename}"), "r") as filetoread:
data = filetoread.read()

# parse file
content = json.loads(data)

return content