diff --git a/docker/base/Dockerfile.duckdb-dbt b/docker/base/Dockerfile.duckdb-dbt index cd6b0a1c..4b6a9240 100644 --- a/docker/base/Dockerfile.duckdb-dbt +++ b/docker/base/Dockerfile.duckdb-dbt @@ -4,6 +4,9 @@ RUN apt-get update && apt-get install -y \ git \ tmux asciinema \ curl \ + && curl -sSL https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64 \ + -o /usr/bin/yq \ + && chmod +x /usr/bin/yq \ && curl -fsSL https://deb.nodesource.com/setup_22.x | bash - \ && apt-get install -y nodejs \ && rm -rf /var/lib/apt/lists/* @@ -25,4 +28,4 @@ RUN pip install --no-cache-dir \ RUN mkdir -p /installed-agent /scripts /sage/solutions /sage /app /app/setup /app/migrations /seeds /solutions /logs /tests WORKDIR /app -CMD ["bash"] \ No newline at end of file +CMD ["bash"] diff --git a/shared/migrations/quickbooks__duckdb_to_snowflake/migration.sh b/shared/migrations/quickbooks__duckdb_to_snowflake/migration.sh new file mode 100644 index 00000000..c5b0fdde --- /dev/null +++ b/shared/migrations/quickbooks__duckdb_to_snowflake/migration.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +# Update primary schema in dbt_project.yml file +yq -i '.vars.quickbooks_schema = "public"' dbt_project.yml + +# Copy Snowflake-specific solution models that handle epoch-to-timestamp conversion +MIGRATION_DIR="$(dirname "$(readlink -f "${BASH_SOURCE}")")" +cp $MIGRATION_DIR/solutions/stg_quickbooks__refund_receipt.sql solutions/ +cp $MIGRATION_DIR/solutions/stg_quickbooks__sales_receipt.sql solutions/ +cp $MIGRATION_DIR/solutions/stg_quickbooks__estimate.sql solutions/ diff --git a/shared/migrations/quickbooks__duckdb_to_snowflake/solutions/stg_quickbooks__estimate.sql b/shared/migrations/quickbooks__duckdb_to_snowflake/solutions/stg_quickbooks__estimate.sql new file mode 100644 index 00000000..621a3f5e --- /dev/null +++ b/shared/migrations/quickbooks__duckdb_to_snowflake/solutions/stg_quickbooks__estimate.sql @@ -0,0 +1,61 @@ +--To disable this model, set the using_estimate variable within your dbt_project.yml file to False. +{{ config(enabled=var('using_estimate', True)) }} + +with base as ( + + select * + from {{ ref('stg_quickbooks__estimate_tmp') }} + +), + +fields as ( + + select + /* + The below macro is used to generate the correct SQL for package staging models. It takes a list of columns + that are expected/needed (staging_columns from dbt_quickbooks_source/models/tmp/) and compares it with columns + in the source (source_columns from dbt_quickbooks_source/macros/). + For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git). + */ + + {{ + fivetran_utils.fill_staging_columns( + source_columns=adapter.get_columns_in_relation(ref('stg_quickbooks__estimate_tmp')), + staging_columns=quickbooks_source.get_estimate_columns() + ) + }} + + {{ + fivetran_utils.source_relation( + union_schema_variable='quickbooks_union_schemas', + union_database_variable='quickbooks_union_databases' + ) + }} + + from base +), + +final as ( + + select + cast(id as {{ dbt.type_string() }}) as estimate_id, + cast(class_id as {{ dbt.type_string() }}) as class_id, + created_at, + currency_id, + customer_id, + cast(department_id as {{ dbt.type_string() }}) as department_id, + -- Convert unix epoch to timestamp, then truncate to date + cast( {{ dbt.date_trunc('day', 'TO_TIMESTAMP_NTZ(due_date)') }} as date) as due_date, + exchange_rate, + total_amount, + cast( {{ dbt.date_trunc('day', 'transaction_date') }} as date) as transaction_date, + transaction_status, + _fivetran_deleted, + source_relation + from fields +) + +select * +from final +where not coalesce(_fivetran_deleted, false) + diff --git a/shared/migrations/quickbooks__duckdb_to_snowflake/solutions/stg_quickbooks__refund_receipt.sql b/shared/migrations/quickbooks__duckdb_to_snowflake/solutions/stg_quickbooks__refund_receipt.sql new file mode 100644 index 00000000..55dd8a6b --- /dev/null +++ b/shared/migrations/quickbooks__duckdb_to_snowflake/solutions/stg_quickbooks__refund_receipt.sql @@ -0,0 +1,62 @@ +--To disable this model, set the using_refund_receipt variable within your dbt_project.yml file to False. +{{ config(enabled=var('using_refund_receipt', True)) }} + +with base as ( + + select * + from {{ ref('stg_quickbooks__refund_receipt_tmp') }} + +), + +fields as ( + + select + /* + The below macro is used to generate the correct SQL for package staging models. It takes a list of columns + that are expected/needed (staging_columns from dbt_quickbooks_source/models/tmp/) and compares it with columns + in the source (source_columns from dbt_quickbooks_source/macros/). + For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git). + */ + + {{ + fivetran_utils.fill_staging_columns( + source_columns=adapter.get_columns_in_relation(ref('stg_quickbooks__refund_receipt_tmp')), + staging_columns=quickbooks_source.get_refund_receipt_columns() + ) + }} + + {{ + fivetran_utils.source_relation( + union_schema_variable='quickbooks_union_schemas', + union_database_variable='quickbooks_union_databases' + ) + }} + + from base +), + +final as ( + + select + cast(id as {{ dbt.type_string() }}) as refund_id, + balance, + cast(doc_number as {{ dbt.type_string() }}) as doc_number, + total_amount, + cast(class_id as {{ dbt.type_string() }}) as class_id, + cast(deposit_to_account_id as {{ dbt.type_string() }}) as deposit_to_account_id, + created_at, + cast(department_id as {{ dbt.type_string() }}) as department_id, + cast(customer_id as {{ dbt.type_string() }}) as customer_id, + currency_id, + exchange_rate, + -- Convert unix epoch to timestamp, then truncate to date + cast( {{ dbt.date_trunc('day', 'TO_TIMESTAMP_NTZ(transaction_date)') }} as date) as transaction_date, + _fivetran_deleted, + source_relation + from fields +) + +select * +from final +where not coalesce(_fivetran_deleted, false) + diff --git a/shared/migrations/quickbooks__duckdb_to_snowflake/solutions/stg_quickbooks__sales_receipt.sql b/shared/migrations/quickbooks__duckdb_to_snowflake/solutions/stg_quickbooks__sales_receipt.sql new file mode 100644 index 00000000..71d04e4c --- /dev/null +++ b/shared/migrations/quickbooks__duckdb_to_snowflake/solutions/stg_quickbooks__sales_receipt.sql @@ -0,0 +1,60 @@ +{{ config(enabled=var('using_sales_receipt', True)) }} + +with base as ( + + select * + from {{ ref('stg_quickbooks__sales_receipt_tmp') }} + +), + +fields as ( + + select + /* + The below macro is used to generate the correct SQL for package staging models. It takes a list of columns + that are expected/needed (staging_columns from dbt_quickbooks_source/models/tmp/) and compares it with columns + in the source (source_columns from dbt_quickbooks_source/macros/). + For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git). + */ + + {{ + fivetran_utils.fill_staging_columns( + source_columns=adapter.get_columns_in_relation(ref('stg_quickbooks__sales_receipt_tmp')), + staging_columns=quickbooks_source.get_sales_receipt_columns() + ) + }} + + {{ + fivetran_utils.source_relation( + union_schema_variable='quickbooks_union_schemas', + union_database_variable='quickbooks_union_databases' + ) + }} + from base +), + +final as ( + + select + cast(id as {{ dbt.type_string() }}) as sales_receipt_id, + balance, + cast(doc_number as {{ dbt.type_string() }}) as doc_number, + total_amount, + cast(deposit_to_account_id as {{ dbt.type_string() }}) as deposit_to_account_id, + created_at, + cast(customer_id as {{ dbt.type_string() }}) as customer_id, + cast(department_id as {{ dbt.type_string() }}) as department_id, + cast(class_id as {{ dbt.type_string() }}) as class_id, + currency_id, + exchange_rate, + -- Convert unix epoch to timestamp, then truncate to date + cast( {{ dbt.date_trunc('day', 'TO_TIMESTAMP_NTZ(transaction_date)') }} as date) as transaction_date, + _fivetran_deleted, + source_relation + from fields +) + +select * +from final +where not coalesce(_fivetran_deleted, false) + diff --git a/shared/migrations/quickbooks__duckdb_to_snowflake_dbtf/migration.sh b/shared/migrations/quickbooks__duckdb_to_snowflake_dbtf/migration.sh new file mode 100644 index 00000000..c5b0fdde --- /dev/null +++ b/shared/migrations/quickbooks__duckdb_to_snowflake_dbtf/migration.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +# Update primary schema in dbt_project.yml file +yq -i '.vars.quickbooks_schema = "public"' dbt_project.yml + +# Copy Snowflake-specific solution models that handle epoch-to-timestamp conversion +MIGRATION_DIR="$(dirname "$(readlink -f "${BASH_SOURCE}")")" +cp $MIGRATION_DIR/solutions/stg_quickbooks__refund_receipt.sql solutions/ +cp $MIGRATION_DIR/solutions/stg_quickbooks__sales_receipt.sql solutions/ +cp $MIGRATION_DIR/solutions/stg_quickbooks__estimate.sql solutions/ diff --git a/shared/migrations/quickbooks__duckdb_to_snowflake_dbtf/solutions/stg_quickbooks__estimate.sql b/shared/migrations/quickbooks__duckdb_to_snowflake_dbtf/solutions/stg_quickbooks__estimate.sql new file mode 100644 index 00000000..621a3f5e --- /dev/null +++ b/shared/migrations/quickbooks__duckdb_to_snowflake_dbtf/solutions/stg_quickbooks__estimate.sql @@ -0,0 +1,61 @@ +--To disable this model, set the using_estimate variable within your dbt_project.yml file to False. +{{ config(enabled=var('using_estimate', True)) }} + +with base as ( + + select * + from {{ ref('stg_quickbooks__estimate_tmp') }} + +), + +fields as ( + + select + /* + The below macro is used to generate the correct SQL for package staging models. It takes a list of columns + that are expected/needed (staging_columns from dbt_quickbooks_source/models/tmp/) and compares it with columns + in the source (source_columns from dbt_quickbooks_source/macros/). + For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git). + */ + + {{ + fivetran_utils.fill_staging_columns( + source_columns=adapter.get_columns_in_relation(ref('stg_quickbooks__estimate_tmp')), + staging_columns=quickbooks_source.get_estimate_columns() + ) + }} + + {{ + fivetran_utils.source_relation( + union_schema_variable='quickbooks_union_schemas', + union_database_variable='quickbooks_union_databases' + ) + }} + + from base +), + +final as ( + + select + cast(id as {{ dbt.type_string() }}) as estimate_id, + cast(class_id as {{ dbt.type_string() }}) as class_id, + created_at, + currency_id, + customer_id, + cast(department_id as {{ dbt.type_string() }}) as department_id, + -- Convert unix epoch to timestamp, then truncate to date + cast( {{ dbt.date_trunc('day', 'TO_TIMESTAMP_NTZ(due_date)') }} as date) as due_date, + exchange_rate, + total_amount, + cast( {{ dbt.date_trunc('day', 'transaction_date') }} as date) as transaction_date, + transaction_status, + _fivetran_deleted, + source_relation + from fields +) + +select * +from final +where not coalesce(_fivetran_deleted, false) + diff --git a/shared/migrations/quickbooks__duckdb_to_snowflake_dbtf/solutions/stg_quickbooks__refund_receipt.sql b/shared/migrations/quickbooks__duckdb_to_snowflake_dbtf/solutions/stg_quickbooks__refund_receipt.sql new file mode 100644 index 00000000..55dd8a6b --- /dev/null +++ b/shared/migrations/quickbooks__duckdb_to_snowflake_dbtf/solutions/stg_quickbooks__refund_receipt.sql @@ -0,0 +1,62 @@ +--To disable this model, set the using_refund_receipt variable within your dbt_project.yml file to False. +{{ config(enabled=var('using_refund_receipt', True)) }} + +with base as ( + + select * + from {{ ref('stg_quickbooks__refund_receipt_tmp') }} + +), + +fields as ( + + select + /* + The below macro is used to generate the correct SQL for package staging models. It takes a list of columns + that are expected/needed (staging_columns from dbt_quickbooks_source/models/tmp/) and compares it with columns + in the source (source_columns from dbt_quickbooks_source/macros/). + For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git). + */ + + {{ + fivetran_utils.fill_staging_columns( + source_columns=adapter.get_columns_in_relation(ref('stg_quickbooks__refund_receipt_tmp')), + staging_columns=quickbooks_source.get_refund_receipt_columns() + ) + }} + + {{ + fivetran_utils.source_relation( + union_schema_variable='quickbooks_union_schemas', + union_database_variable='quickbooks_union_databases' + ) + }} + + from base +), + +final as ( + + select + cast(id as {{ dbt.type_string() }}) as refund_id, + balance, + cast(doc_number as {{ dbt.type_string() }}) as doc_number, + total_amount, + cast(class_id as {{ dbt.type_string() }}) as class_id, + cast(deposit_to_account_id as {{ dbt.type_string() }}) as deposit_to_account_id, + created_at, + cast(department_id as {{ dbt.type_string() }}) as department_id, + cast(customer_id as {{ dbt.type_string() }}) as customer_id, + currency_id, + exchange_rate, + -- Convert unix epoch to timestamp, then truncate to date + cast( {{ dbt.date_trunc('day', 'TO_TIMESTAMP_NTZ(transaction_date)') }} as date) as transaction_date, + _fivetran_deleted, + source_relation + from fields +) + +select * +from final +where not coalesce(_fivetran_deleted, false) + diff --git a/shared/migrations/quickbooks__duckdb_to_snowflake_dbtf/solutions/stg_quickbooks__sales_receipt.sql b/shared/migrations/quickbooks__duckdb_to_snowflake_dbtf/solutions/stg_quickbooks__sales_receipt.sql new file mode 100644 index 00000000..71d04e4c --- /dev/null +++ b/shared/migrations/quickbooks__duckdb_to_snowflake_dbtf/solutions/stg_quickbooks__sales_receipt.sql @@ -0,0 +1,60 @@ +{{ config(enabled=var('using_sales_receipt', True)) }} + +with base as ( + + select * + from {{ ref('stg_quickbooks__sales_receipt_tmp') }} + +), + +fields as ( + + select + /* + The below macro is used to generate the correct SQL for package staging models. It takes a list of columns + that are expected/needed (staging_columns from dbt_quickbooks_source/models/tmp/) and compares it with columns + in the source (source_columns from dbt_quickbooks_source/macros/). + For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git). + */ + + {{ + fivetran_utils.fill_staging_columns( + source_columns=adapter.get_columns_in_relation(ref('stg_quickbooks__sales_receipt_tmp')), + staging_columns=quickbooks_source.get_sales_receipt_columns() + ) + }} + + {{ + fivetran_utils.source_relation( + union_schema_variable='quickbooks_union_schemas', + union_database_variable='quickbooks_union_databases' + ) + }} + from base +), + +final as ( + + select + cast(id as {{ dbt.type_string() }}) as sales_receipt_id, + balance, + cast(doc_number as {{ dbt.type_string() }}) as doc_number, + total_amount, + cast(deposit_to_account_id as {{ dbt.type_string() }}) as deposit_to_account_id, + created_at, + cast(customer_id as {{ dbt.type_string() }}) as customer_id, + cast(department_id as {{ dbt.type_string() }}) as department_id, + cast(class_id as {{ dbt.type_string() }}) as class_id, + currency_id, + exchange_rate, + -- Convert unix epoch to timestamp, then truncate to date + cast( {{ dbt.date_trunc('day', 'TO_TIMESTAMP_NTZ(transaction_date)') }} as date) as transaction_date, + _fivetran_deleted, + source_relation + from fields +) + +select * +from final +where not coalesce(_fivetran_deleted, false) + diff --git a/shared/projects/dbt/quickbooks/models/quickbooks.yml b/shared/projects/dbt/quickbooks/models/quickbooks.yml index 0b5964b3..2f7369b3 100644 --- a/shared/projects/dbt/quickbooks/models/quickbooks.yml +++ b/shared/projects/dbt/quickbooks/models/quickbooks.yml @@ -105,10 +105,11 @@ models: description: "{{ doc('ap_ar_enhanced_table') }}" tests: - dbt_utils.unique_combination_of_columns: - combination_of_columns: - - transaction_id - - source_relation - - estimate_id + arguments: + combination_of_columns: + - transaction_id + - source_relation + - estimate_id columns: - name: transaction_type description: Set to 'bill' of the transaction is a bill and 'invoice' if the transaction is an invoice. @@ -168,7 +169,8 @@ models: description: "{{ doc('expenses_sales_enhanced_table') }}" tests: - dbt_utils.unique_combination_of_columns: - combination_of_columns: + arguments: + combination_of_columns: - transaction_id - source_relation - transaction_line_id diff --git a/tasks/quickbooks001/seeds/_no-op.txt b/tasks/quickbooks001/seeds/_no-op.txt index bd4f12c1..ca748a16 100644 --- a/tasks/quickbooks001/seeds/_no-op.txt +++ b/tasks/quickbooks001/seeds/_no-op.txt @@ -69,3 +69,50 @@ seeds: recent_payment_date: date total_current_payment: double total_current_converted_payment: double + solution__stg_quickbooks__sales_receipt: + +column_types: + sales_receipt_id: varchar + balance: integer + doc_number: varchar + total_amount: integer + deposit_to_account_id: varchar + created_at: timestamp + customer_id: varchar + department_id: varchar + class_id: varchar + currency_id: varchar + exchange_rate: integer + transaction_date: date + _fivetran_deleted: boolean + source_relation: varchar + solution__stg_quickbooks__refund_receipt: + +column_types: + refund_id: varchar + balance: integer + doc_number: varchar + total_amount: integer + class_id: varchar + deposit_to_account_id: varchar + created_at: timestamp + department_id: varchar + customer_id: varchar + currency_id: varchar + exchange_rate: integer + transaction_date: date + _fivetran_deleted: boolean + source_relation: varchar + solution__stg_quickbooks__estimate: + +column_types: + estimate_id: varchar + class_id: varchar + created_at: timestamp + currency_id: varchar + customer_id: integer + department_id: varchar + due_date: date + exchange_rate: integer + total_amount: double + transaction_date: date + transaction_status: varchar + _fivetran_deleted: boolean + source_relation: varchar diff --git a/tasks/quickbooks001/seeds/solution__stg_quickbooks__estimate.csv b/tasks/quickbooks001/seeds/solution__stg_quickbooks__estimate.csv new file mode 100644 index 00000000..e6d59049 --- /dev/null +++ b/tasks/quickbooks001/seeds/solution__stg_quickbooks__estimate.csv @@ -0,0 +1,11 @@ +estimate_id,class_id,created_at,currency_id,customer_id,department_id,due_date,exchange_rate,total_amount,transaction_date,transaction_status,_fivetran_deleted,source_relation +40085,,2021-01-18 23:34:30,USD,4953,,2021-01-18,,29690.0,2021-01-18,Accepted,false,"" +40087,,2021-01-18 23:54:01,USD,5073,,2021-01-18,,77282.0,2021-01-18,Accepted,false,"" +40079,,2021-01-18 23:09:42,USD,5121,,2021-01-18,,26564.0,2021-01-18,Accepted,false,"" +40091,,2021-01-19 00:11:28,USD,4895,,2021-01-19,,34363.5,2021-01-18,Accepted,false,"" +40083,,2021-01-18 23:22:28,USD,4547,,2021-01-18,,32645.0,2021-01-18,Accepted,false,"" +40081,,2021-01-18 23:15:52,USD,4132,,2021-01-18,,28760.0,2021-01-18,Accepted,false,"" +40089,,2021-01-19 00:05:10,USD,3226,,2021-01-19,,49442.0,2021-01-18,Accepted,false,"" +40095,,2021-01-19 00:43:48,USD,4892,,2021-01-19,,27832.0,2021-01-18,Accepted,false,"" +40093,,2021-01-19 00:24:12,USD,4536,,2021-01-19,,56661.5,2021-01-18,Accepted,false,"" +40112,,2021-01-19 20:49:07,USD,4783,,2021-01-19,,56321.0,2021-01-19,Accepted,false,"" diff --git a/tasks/quickbooks001/seeds/solution__stg_quickbooks__refund_receipt.csv b/tasks/quickbooks001/seeds/solution__stg_quickbooks__refund_receipt.csv new file mode 100644 index 00000000..8b6db5ab --- /dev/null +++ b/tasks/quickbooks001/seeds/solution__stg_quickbooks__refund_receipt.csv @@ -0,0 +1 @@ +refund_id,balance,doc_number,total_amount,class_id,deposit_to_account_id,created_at,department_id,customer_id,currency_id,exchange_rate,transaction_date,_fivetran_deleted,source_relation diff --git a/tasks/quickbooks001/seeds/solution__stg_quickbooks__sales_receipt.csv b/tasks/quickbooks001/seeds/solution__stg_quickbooks__sales_receipt.csv new file mode 100644 index 00000000..64be10a7 --- /dev/null +++ b/tasks/quickbooks001/seeds/solution__stg_quickbooks__sales_receipt.csv @@ -0,0 +1 @@ +sales_receipt_id,balance,doc_number,total_amount,deposit_to_account_id,created_at,customer_id,department_id,class_id,currency_id,exchange_rate,transaction_date,_fivetran_deleted,source_relation diff --git a/tasks/quickbooks001/setup.sh b/tasks/quickbooks001/setup.sh index 1f231f7d..4f6ff47f 100644 --- a/tasks/quickbooks001/setup.sh +++ b/tasks/quickbooks001/setup.sh @@ -1,26 +1,33 @@ #!/bin/bash -## Introduce an error by changing the data type of the underlying table. +## Introduce an error by converting date columns to unix epoch integers. -## Get the schema based on the database type. +## Get the schema and epoch function based on the database type. +## Using created_at column to populate the date columns because the sample data is null +## and I need something realistic for the agent to work with. if [[ "$*" == *"--db-type=duckdb"* ]]; then schema='main' + # DuckDB: epoch() returns seconds since 1970-01-01 + epoch_txn_date='epoch(created_at)::integer' + epoch_due_date='epoch(created_at)::integer' else schema='public' + # Snowflake: DATEDIFF to get seconds since epoch + epoch_txn_date="DATEDIFF('second', '1970-01-01'::timestamp, created_at::timestamp)::integer" + epoch_due_date="DATEDIFF('second', '1970-01-01'::timestamp, created_at::timestamp)::integer" fi -# Execute SQL using the run_sql utility. /scripts/run_sql.sh "$@" << SQL create or replace table ${schema}.refund_receipt_data_temp as - select * replace (transaction_date::integer as transaction_date) + select * replace (${epoch_txn_date} as transaction_date) from ${schema}.refund_receipt_data; create or replace table ${schema}.sales_receipt_data_temp as - select * replace (transaction_date::integer as transaction_date) + select * replace (${epoch_txn_date} as transaction_date) from ${schema}.sales_receipt_data; create or replace table ${schema}.estimate_data_temp as - select * replace (due_date::integer as due_date) + select * replace (${epoch_due_date} as due_date) from ${schema}.estimate_data; drop table ${schema}.sales_receipt_data; @@ -33,4 +40,5 @@ alter table ${schema}.estimate_data_temp rename to estimate_data; SQL ## Run the dbt project. -dbt run \ No newline at end of file +dbt deps +DBT_STATIC_ANALYSIS=off dbt run \ No newline at end of file diff --git a/tasks/quickbooks001/solution.sh b/tasks/quickbooks001/solution.sh index 797da121..dcf75deb 100755 --- a/tasks/quickbooks001/solution.sh +++ b/tasks/quickbooks001/solution.sh @@ -1,33 +1,21 @@ #!/bin/bash -## Introduce an error by changing the data type of the underlying table. +## Solution: Override Fivetran staging models to handle unix epoch timestamps. +## Instead of modifying the underlying data, we create custom versions of the +## staging models that convert the epoch integers back to proper dates. -## Get the schema based on the database type. -if [[ "$*" == *"--db-type=duckdb"* ]]; then - schema='main' -else - schema='public' -fi +# Get the directory where this script is located +SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE}")")" -# Execute SQL using the run_sql utility. -/scripts/run_sql.sh "$@" << SQL -create or replace table ${schema}.refund_receipt_data_temp as - select * replace (transaction_date::date as transaction_date) - from ${schema}.refund_receipt_data; +# Disable the package models in dbt_project.yml +yq -i '.models.quickbooks_source.stg_quickbooks__refund_receipt."+enabled" = false' dbt_project.yml +yq -i '.models.quickbooks_source.stg_quickbooks__sales_receipt."+enabled" = false' dbt_project.yml +yq -i '.models.quickbooks_source.stg_quickbooks__estimate."+enabled" = false' dbt_project.yml -create or replace table ${schema}.sales_receipt_data_temp as - select * replace (transaction_date::date as transaction_date) - from ${schema}.sales_receipt_data; +# Create staging directory if it doesn't exist +mkdir -p models/staging -create or replace table ${schema}.estimate_data_temp as - select * replace (due_date::date as due_date) - from ${schema}.estimate_data; - -drop table ${schema}.sales_receipt_data; -drop table ${schema}.refund_receipt_data; -drop table ${schema}.estimate_data; - -alter table ${schema}.sales_receipt_data_temp rename to sales_receipt_data; -alter table ${schema}.refund_receipt_data_temp rename to refund_receipt_data; -alter table ${schema}.estimate_data_temp rename to estimate_data; -SQL \ No newline at end of file +# Copy our override models that handle the epoch-to-date conversion +cp $SCRIPT_DIR/solutions/stg_quickbooks__refund_receipt.sql models/staging/ +cp $SCRIPT_DIR/solutions/stg_quickbooks__sales_receipt.sql models/staging/ +cp $SCRIPT_DIR/solutions/stg_quickbooks__estimate.sql models/staging/ diff --git a/tasks/quickbooks001/solutions/stg_quickbooks__estimate.sql b/tasks/quickbooks001/solutions/stg_quickbooks__estimate.sql new file mode 100644 index 00000000..22bbc04f --- /dev/null +++ b/tasks/quickbooks001/solutions/stg_quickbooks__estimate.sql @@ -0,0 +1,61 @@ +--To disable this model, set the using_estimate variable within your dbt_project.yml file to False. +{{ config(enabled=var('using_estimate', True)) }} + +with base as ( + + select * + from {{ ref('stg_quickbooks__estimate_tmp') }} + +), + +fields as ( + + select + /* + The below macro is used to generate the correct SQL for package staging models. It takes a list of columns + that are expected/needed (staging_columns from dbt_quickbooks_source/models/tmp/) and compares it with columns + in the source (source_columns from dbt_quickbooks_source/macros/). + For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git). + */ + + {{ + fivetran_utils.fill_staging_columns( + source_columns=adapter.get_columns_in_relation(ref('stg_quickbooks__estimate_tmp')), + staging_columns=quickbooks_source.get_estimate_columns() + ) + }} + + {{ + fivetran_utils.source_relation( + union_schema_variable='quickbooks_union_schemas', + union_database_variable='quickbooks_union_databases' + ) + }} + + from base +), + +final as ( + + select + cast(id as {{ dbt.type_string() }}) as estimate_id, + cast(class_id as {{ dbt.type_string() }}) as class_id, + created_at, + currency_id, + customer_id, + cast(department_id as {{ dbt.type_string() }}) as department_id, + -- Convert unix epoch to timestamp, then truncate to date + cast( {{ dbt.date_trunc('day', 'to_timestamp(due_date)') }} as date) as due_date, + exchange_rate, + total_amount, + cast( {{ dbt.date_trunc('day', 'transaction_date') }} as date) as transaction_date, + transaction_status, + _fivetran_deleted, + source_relation + from fields +) + +select * +from final +where not coalesce(_fivetran_deleted, false) + diff --git a/tasks/quickbooks001/solutions/stg_quickbooks__refund_receipt.sql b/tasks/quickbooks001/solutions/stg_quickbooks__refund_receipt.sql new file mode 100644 index 00000000..15d8ba96 --- /dev/null +++ b/tasks/quickbooks001/solutions/stg_quickbooks__refund_receipt.sql @@ -0,0 +1,62 @@ +--To disable this model, set the using_refund_receipt variable within your dbt_project.yml file to False. +{{ config(enabled=var('using_refund_receipt', True)) }} + +with base as ( + + select * + from {{ ref('stg_quickbooks__refund_receipt_tmp') }} + +), + +fields as ( + + select + /* + The below macro is used to generate the correct SQL for package staging models. It takes a list of columns + that are expected/needed (staging_columns from dbt_quickbooks_source/models/tmp/) and compares it with columns + in the source (source_columns from dbt_quickbooks_source/macros/). + For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git). + */ + + {{ + fivetran_utils.fill_staging_columns( + source_columns=adapter.get_columns_in_relation(ref('stg_quickbooks__refund_receipt_tmp')), + staging_columns=quickbooks_source.get_refund_receipt_columns() + ) + }} + + {{ + fivetran_utils.source_relation( + union_schema_variable='quickbooks_union_schemas', + union_database_variable='quickbooks_union_databases' + ) + }} + + from base +), + +final as ( + + select + cast(id as {{ dbt.type_string() }}) as refund_id, + balance, + cast(doc_number as {{ dbt.type_string() }}) as doc_number, + total_amount, + cast(class_id as {{ dbt.type_string() }}) as class_id, + cast(deposit_to_account_id as {{ dbt.type_string() }}) as deposit_to_account_id, + created_at, + cast(department_id as {{ dbt.type_string() }}) as department_id, + cast(customer_id as {{ dbt.type_string() }}) as customer_id, + currency_id, + exchange_rate, + -- Convert unix epoch to timestamp, then truncate to date + cast( {{ dbt.date_trunc('day', 'to_timestamp(transaction_date)') }} as date) as transaction_date, + _fivetran_deleted, + source_relation + from fields +) + +select * +from final +where not coalesce(_fivetran_deleted, false) + diff --git a/tasks/quickbooks001/solutions/stg_quickbooks__sales_receipt.sql b/tasks/quickbooks001/solutions/stg_quickbooks__sales_receipt.sql new file mode 100644 index 00000000..b52e9794 --- /dev/null +++ b/tasks/quickbooks001/solutions/stg_quickbooks__sales_receipt.sql @@ -0,0 +1,60 @@ +{{ config(enabled=var('using_sales_receipt', True)) }} + +with base as ( + + select * + from {{ ref('stg_quickbooks__sales_receipt_tmp') }} + +), + +fields as ( + + select + /* + The below macro is used to generate the correct SQL for package staging models. It takes a list of columns + that are expected/needed (staging_columns from dbt_quickbooks_source/models/tmp/) and compares it with columns + in the source (source_columns from dbt_quickbooks_source/macros/). + For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git). + */ + + {{ + fivetran_utils.fill_staging_columns( + source_columns=adapter.get_columns_in_relation(ref('stg_quickbooks__sales_receipt_tmp')), + staging_columns=quickbooks_source.get_sales_receipt_columns() + ) + }} + + {{ + fivetran_utils.source_relation( + union_schema_variable='quickbooks_union_schemas', + union_database_variable='quickbooks_union_databases' + ) + }} + from base +), + +final as ( + + select + cast(id as {{ dbt.type_string() }}) as sales_receipt_id, + balance, + cast(doc_number as {{ dbt.type_string() }}) as doc_number, + total_amount, + cast(deposit_to_account_id as {{ dbt.type_string() }}) as deposit_to_account_id, + created_at, + cast(customer_id as {{ dbt.type_string() }}) as customer_id, + cast(department_id as {{ dbt.type_string() }}) as department_id, + cast(class_id as {{ dbt.type_string() }}) as class_id, + currency_id, + exchange_rate, + -- Convert unix epoch to timestamp, then truncate to date + cast( {{ dbt.date_trunc('day', 'to_timestamp(transaction_date)') }} as date) as transaction_date, + _fivetran_deleted, + source_relation + from fields +) + +select * +from final +where not coalesce(_fivetran_deleted, false) + diff --git a/tasks/quickbooks001/task.yaml b/tasks/quickbooks001/task.yaml index 30ed49bb..83f68a41 100644 --- a/tasks/quickbooks001/task.yaml +++ b/tasks/quickbooks001/task.yaml @@ -1,8 +1,8 @@ task_id: quickbooks001 status: ready -description: Fix a data type issue in the Quickbooks source. +description: Fix a data type issue where date columns are stored as unix timestamps, not the datetimes expected by the Fivetran package. notes: |- - The issue is with a few columns being numbers and not dates. + The source tables have date columns (transaction_date, due_date) stored as unix epoch integers instead of proper datetime values. The solution requires overriding the Fivetran staging models to convert the epochs back to dates. prompts: - key: base prompt: |- @@ -20,8 +20,7 @@ tags: - model-refactor test_setup: |- - dbt deps - dbt run + DBT_STATIC_ANALYSIS=off dbt run # Tables to compare for solution solution_seeds: @@ -30,10 +29,27 @@ solution_seeds: - table_name: int_quickbooks__invoice_join exclude_columns: - source_relation + - table_name: stg_quickbooks__sales_receipt + - table_name: stg_quickbooks__refund_receipt + - table_name: stg_quickbooks__estimate + exclude_columns: + - source_relation # Variants for different database and project combinations variants: - db_type: duckdb db_name: quickbooks project_type: dbt - project_name: quickbooks \ No newline at end of file + project_name: quickbooks + +- db_type: snowflake + db_name: quickbooks + project_type: dbt + project_name: quickbooks + migration_directory: quickbooks__duckdb_to_snowflake + +- db_type: snowflake + db_name: quickbooks + project_type: dbt-fusion + project_name: quickbooks + migration_directory: quickbooks__duckdb_to_snowflake_dbtf \ No newline at end of file diff --git a/tasks/quickbooks001/tests/AUTO_stg_quickbooks__estimate_equality.sql b/tasks/quickbooks001/tests/AUTO_stg_quickbooks__estimate_equality.sql new file mode 100644 index 00000000..85e39cc1 --- /dev/null +++ b/tasks/quickbooks001/tests/AUTO_stg_quickbooks__estimate_equality.sql @@ -0,0 +1,33 @@ +-- Define columns to compare +{% set table_name = 'stg_quickbooks__estimate' %} + +{% set cols_to_include = [ + +] %} + +{% set cols_to_exclude = [ + 'source_relation' +] %} + + + +------------------------------------- +---- DO NOT EDIT BELOW THIS LINE ---- +{% set answer_key = 'solution__' + table_name %} + +-- depends_on: {{ ref(answer_key) }} +-- depends_on: {{ ref(table_name) }} + +{% set table_a = adapter.get_relation(database=target.database, schema=target.schema, identifier=answer_key) %} +{% set table_b = adapter.get_relation(database=target.database, schema=target.schema, identifier=table_name) %} + +{% if table_a is none or table_b is none %} + select 1 +{% else %} + {{ dbt_utils.test_equality( + model=ref(answer_key), + compare_model=ref(table_name), + compare_columns=cols_to_include, + exclude_columns=cols_to_exclude + ) }} +{% endif %} diff --git a/tasks/quickbooks001/tests/AUTO_stg_quickbooks__estimate_existence.sql b/tasks/quickbooks001/tests/AUTO_stg_quickbooks__estimate_existence.sql new file mode 100644 index 00000000..5b6721b9 --- /dev/null +++ b/tasks/quickbooks001/tests/AUTO_stg_quickbooks__estimate_existence.sql @@ -0,0 +1,16 @@ +{% set table_name = 'stg_quickbooks__estimate' %} + + + +------------------------------------- +---- DO NOT EDIT BELOW THIS LINE ---- +{% set answer_key = 'solution__' + table_name %} + +{% set table_a = adapter.get_relation(database=target.database, schema=target.schema, identifier=answer_key) %} +{% set table_b = adapter.get_relation(database=target.database, schema=target.schema, identifier=table_name) %} + +{% if table_a is none or table_b is none %} + select 1 +{% else %} + select 1 where false +{% endif %} diff --git a/tasks/quickbooks001/tests/AUTO_stg_quickbooks__refund_receipt_equality.sql b/tasks/quickbooks001/tests/AUTO_stg_quickbooks__refund_receipt_equality.sql new file mode 100644 index 00000000..fe51453f --- /dev/null +++ b/tasks/quickbooks001/tests/AUTO_stg_quickbooks__refund_receipt_equality.sql @@ -0,0 +1,33 @@ +-- Define columns to compare +{% set table_name = 'stg_quickbooks__refund_receipt' %} + +{% set cols_to_include = [ + +] %} + +{% set cols_to_exclude = [ + +] %} + + + +------------------------------------- +---- DO NOT EDIT BELOW THIS LINE ---- +{% set answer_key = 'solution__' + table_name %} + +-- depends_on: {{ ref(answer_key) }} +-- depends_on: {{ ref(table_name) }} + +{% set table_a = adapter.get_relation(database=target.database, schema=target.schema, identifier=answer_key) %} +{% set table_b = adapter.get_relation(database=target.database, schema=target.schema, identifier=table_name) %} + +{% if table_a is none or table_b is none %} + select 1 +{% else %} + {{ dbt_utils.test_equality( + model=ref(answer_key), + compare_model=ref(table_name), + compare_columns=cols_to_include, + exclude_columns=cols_to_exclude + ) }} +{% endif %} diff --git a/tasks/quickbooks001/tests/AUTO_stg_quickbooks__refund_receipt_existence.sql b/tasks/quickbooks001/tests/AUTO_stg_quickbooks__refund_receipt_existence.sql new file mode 100644 index 00000000..f0d686a5 --- /dev/null +++ b/tasks/quickbooks001/tests/AUTO_stg_quickbooks__refund_receipt_existence.sql @@ -0,0 +1,16 @@ +{% set table_name = 'stg_quickbooks__refund_receipt' %} + + + +------------------------------------- +---- DO NOT EDIT BELOW THIS LINE ---- +{% set answer_key = 'solution__' + table_name %} + +{% set table_a = adapter.get_relation(database=target.database, schema=target.schema, identifier=answer_key) %} +{% set table_b = adapter.get_relation(database=target.database, schema=target.schema, identifier=table_name) %} + +{% if table_a is none or table_b is none %} + select 1 +{% else %} + select 1 where false +{% endif %} diff --git a/tasks/quickbooks001/tests/AUTO_stg_quickbooks__sales_receipt_equality.sql b/tasks/quickbooks001/tests/AUTO_stg_quickbooks__sales_receipt_equality.sql new file mode 100644 index 00000000..7800b5c0 --- /dev/null +++ b/tasks/quickbooks001/tests/AUTO_stg_quickbooks__sales_receipt_equality.sql @@ -0,0 +1,33 @@ +-- Define columns to compare +{% set table_name = 'stg_quickbooks__sales_receipt' %} + +{% set cols_to_include = [ + +] %} + +{% set cols_to_exclude = [ + +] %} + + + +------------------------------------- +---- DO NOT EDIT BELOW THIS LINE ---- +{% set answer_key = 'solution__' + table_name %} + +-- depends_on: {{ ref(answer_key) }} +-- depends_on: {{ ref(table_name) }} + +{% set table_a = adapter.get_relation(database=target.database, schema=target.schema, identifier=answer_key) %} +{% set table_b = adapter.get_relation(database=target.database, schema=target.schema, identifier=table_name) %} + +{% if table_a is none or table_b is none %} + select 1 +{% else %} + {{ dbt_utils.test_equality( + model=ref(answer_key), + compare_model=ref(table_name), + compare_columns=cols_to_include, + exclude_columns=cols_to_exclude + ) }} +{% endif %} diff --git a/tasks/quickbooks001/tests/AUTO_stg_quickbooks__sales_receipt_existence.sql b/tasks/quickbooks001/tests/AUTO_stg_quickbooks__sales_receipt_existence.sql new file mode 100644 index 00000000..c43e0b81 --- /dev/null +++ b/tasks/quickbooks001/tests/AUTO_stg_quickbooks__sales_receipt_existence.sql @@ -0,0 +1,16 @@ +{% set table_name = 'stg_quickbooks__sales_receipt' %} + + + +------------------------------------- +---- DO NOT EDIT BELOW THIS LINE ---- +{% set answer_key = 'solution__' + table_name %} + +{% set table_a = adapter.get_relation(database=target.database, schema=target.schema, identifier=answer_key) %} +{% set table_b = adapter.get_relation(database=target.database, schema=target.schema, identifier=table_name) %} + +{% if table_a is none or table_b is none %} + select 1 +{% else %} + select 1 where false +{% endif %} diff --git a/tasks/quickbooks002/task.yaml b/tasks/quickbooks002/task.yaml index 3259f82c..883967d3 100644 --- a/tasks/quickbooks002/task.yaml +++ b/tasks/quickbooks002/task.yaml @@ -37,4 +37,10 @@ variants: - db_type: duckdb db_name: quickbooks project_type: dbt - project_name: quickbooks \ No newline at end of file + project_name: quickbooks + +- db_type: snowflake + db_name: quickbooks + project_type: dbt + project_name: quickbooks + migration_directory: quickbooks__duckdb_to_snowflake \ No newline at end of file diff --git a/tasks/quickbooks003/setup.sh b/tasks/quickbooks003/setup.sh index 95eb8ce4..4f6ff47f 100644 --- a/tasks/quickbooks003/setup.sh +++ b/tasks/quickbooks003/setup.sh @@ -1,24 +1,33 @@ #!/bin/bash -## Get the schema based on the database type. +## Introduce an error by converting date columns to unix epoch integers. + +## Get the schema and epoch function based on the database type. +## Using created_at column to populate the date columns because the sample data is null +## and I need something realistic for the agent to work with. if [[ "$*" == *"--db-type=duckdb"* ]]; then schema='main' + # DuckDB: epoch() returns seconds since 1970-01-01 + epoch_txn_date='epoch(created_at)::integer' + epoch_due_date='epoch(created_at)::integer' else schema='public' + # Snowflake: DATEDIFF to get seconds since epoch + epoch_txn_date="DATEDIFF('second', '1970-01-01'::timestamp, created_at::timestamp)::integer" + epoch_due_date="DATEDIFF('second', '1970-01-01'::timestamp, created_at::timestamp)::integer" fi -# Execute SQL using the run_sql utility. /scripts/run_sql.sh "$@" << SQL create or replace table ${schema}.refund_receipt_data_temp as - select * replace (transaction_date::integer as transaction_date) + select * replace (${epoch_txn_date} as transaction_date) from ${schema}.refund_receipt_data; create or replace table ${schema}.sales_receipt_data_temp as - select * replace (transaction_date::integer as transaction_date) + select * replace (${epoch_txn_date} as transaction_date) from ${schema}.sales_receipt_data; create or replace table ${schema}.estimate_data_temp as - select * replace (due_date::integer as due_date) + select * replace (${epoch_due_date} as due_date) from ${schema}.estimate_data; drop table ${schema}.sales_receipt_data; @@ -30,5 +39,6 @@ alter table ${schema}.refund_receipt_data_temp rename to refund_receipt_data; alter table ${schema}.estimate_data_temp rename to estimate_data; SQL -# Run dbt to create the models -dbt run +## Run the dbt project. +dbt deps +DBT_STATIC_ANALYSIS=off dbt run \ No newline at end of file diff --git a/tasks/quickbooks003/solution.sh b/tasks/quickbooks003/solution.sh index 3f4b81ef..9e4d68a0 100755 --- a/tasks/quickbooks003/solution.sh +++ b/tasks/quickbooks003/solution.sh @@ -1,34 +1,5 @@ #!/bin/bash -## Fix the data issue -if [[ "$*" == *"--db-type=duckdb"* ]]; then - schema='main' -else - schema='public' -fi - -# Execute SQL using the run_sql utility. -/scripts/run_sql.sh "$@" << SQL -create or replace table ${schema}.refund_receipt_data_temp as - select * replace (transaction_date::date as transaction_date) - from ${schema}.refund_receipt_data; - -create or replace table ${schema}.sales_receipt_data_temp as - select * replace (transaction_date::date as transaction_date) - from ${schema}.sales_receipt_data; - -create or replace table ${schema}.estimate_data_temp as - select * replace (due_date::date as due_date) - from ${schema}.estimate_data; - -drop table ${schema}.sales_receipt_data; -drop table ${schema}.refund_receipt_data; -drop table ${schema}.estimate_data; - -alter table ${schema}.sales_receipt_data_temp rename to sales_receipt_data; -alter table ${schema}.refund_receipt_data_temp rename to refund_receipt_data; -alter table ${schema}.estimate_data_temp rename to estimate_data; -SQL ## Remove the using_department variable @@ -51,5 +22,20 @@ SOLUTIONS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE}")")/solutions" cp $SOLUTIONS_DIR/int_quickbooks__expenses_union.sql models/intermediate/int_quickbooks__expenses_union.sql cp $SOLUTIONS_DIR/int_quickbooks__sales_union.sql models/intermediate/int_quickbooks__sales_union.sql + ## Not in intermediate directory -cp $SOLUTIONS_DIR/quickbooks__ap_ar_enhanced.sql models/quickbooks__ap_ar_enhanced.sql \ No newline at end of file +cp $SOLUTIONS_DIR/quickbooks__ap_ar_enhanced.sql models/quickbooks__ap_ar_enhanced.sql + +## Fix the data issue from quickbooks001 +# Disable the package models in dbt_project.yml +yq -i '.models.quickbooks_source.stg_quickbooks__refund_receipt."+enabled" = false' dbt_project.yml +yq -i '.models.quickbooks_source.stg_quickbooks__sales_receipt."+enabled" = false' dbt_project.yml +yq -i '.models.quickbooks_source.stg_quickbooks__estimate."+enabled" = false' dbt_project.yml + +# Create staging directory if it doesn't exist +mkdir -p models/staging + +# Copy our override models that handle the epoch-to-date conversion +cp $SOLUTIONS_DIR/stg_quickbooks__refund_receipt.sql models/staging/ +cp $SOLUTIONS_DIR/stg_quickbooks__sales_receipt.sql models/staging/ +cp $SOLUTIONS_DIR/stg_quickbooks__estimate.sql models/staging/ diff --git a/tasks/quickbooks003/solutions/stg_quickbooks__estimate.sql b/tasks/quickbooks003/solutions/stg_quickbooks__estimate.sql new file mode 100644 index 00000000..22bbc04f --- /dev/null +++ b/tasks/quickbooks003/solutions/stg_quickbooks__estimate.sql @@ -0,0 +1,61 @@ +--To disable this model, set the using_estimate variable within your dbt_project.yml file to False. +{{ config(enabled=var('using_estimate', True)) }} + +with base as ( + + select * + from {{ ref('stg_quickbooks__estimate_tmp') }} + +), + +fields as ( + + select + /* + The below macro is used to generate the correct SQL for package staging models. It takes a list of columns + that are expected/needed (staging_columns from dbt_quickbooks_source/models/tmp/) and compares it with columns + in the source (source_columns from dbt_quickbooks_source/macros/). + For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git). + */ + + {{ + fivetran_utils.fill_staging_columns( + source_columns=adapter.get_columns_in_relation(ref('stg_quickbooks__estimate_tmp')), + staging_columns=quickbooks_source.get_estimate_columns() + ) + }} + + {{ + fivetran_utils.source_relation( + union_schema_variable='quickbooks_union_schemas', + union_database_variable='quickbooks_union_databases' + ) + }} + + from base +), + +final as ( + + select + cast(id as {{ dbt.type_string() }}) as estimate_id, + cast(class_id as {{ dbt.type_string() }}) as class_id, + created_at, + currency_id, + customer_id, + cast(department_id as {{ dbt.type_string() }}) as department_id, + -- Convert unix epoch to timestamp, then truncate to date + cast( {{ dbt.date_trunc('day', 'to_timestamp(due_date)') }} as date) as due_date, + exchange_rate, + total_amount, + cast( {{ dbt.date_trunc('day', 'transaction_date') }} as date) as transaction_date, + transaction_status, + _fivetran_deleted, + source_relation + from fields +) + +select * +from final +where not coalesce(_fivetran_deleted, false) + diff --git a/tasks/quickbooks003/solutions/stg_quickbooks__refund_receipt.sql b/tasks/quickbooks003/solutions/stg_quickbooks__refund_receipt.sql new file mode 100644 index 00000000..15d8ba96 --- /dev/null +++ b/tasks/quickbooks003/solutions/stg_quickbooks__refund_receipt.sql @@ -0,0 +1,62 @@ +--To disable this model, set the using_refund_receipt variable within your dbt_project.yml file to False. +{{ config(enabled=var('using_refund_receipt', True)) }} + +with base as ( + + select * + from {{ ref('stg_quickbooks__refund_receipt_tmp') }} + +), + +fields as ( + + select + /* + The below macro is used to generate the correct SQL for package staging models. It takes a list of columns + that are expected/needed (staging_columns from dbt_quickbooks_source/models/tmp/) and compares it with columns + in the source (source_columns from dbt_quickbooks_source/macros/). + For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git). + */ + + {{ + fivetran_utils.fill_staging_columns( + source_columns=adapter.get_columns_in_relation(ref('stg_quickbooks__refund_receipt_tmp')), + staging_columns=quickbooks_source.get_refund_receipt_columns() + ) + }} + + {{ + fivetran_utils.source_relation( + union_schema_variable='quickbooks_union_schemas', + union_database_variable='quickbooks_union_databases' + ) + }} + + from base +), + +final as ( + + select + cast(id as {{ dbt.type_string() }}) as refund_id, + balance, + cast(doc_number as {{ dbt.type_string() }}) as doc_number, + total_amount, + cast(class_id as {{ dbt.type_string() }}) as class_id, + cast(deposit_to_account_id as {{ dbt.type_string() }}) as deposit_to_account_id, + created_at, + cast(department_id as {{ dbt.type_string() }}) as department_id, + cast(customer_id as {{ dbt.type_string() }}) as customer_id, + currency_id, + exchange_rate, + -- Convert unix epoch to timestamp, then truncate to date + cast( {{ dbt.date_trunc('day', 'to_timestamp(transaction_date)') }} as date) as transaction_date, + _fivetran_deleted, + source_relation + from fields +) + +select * +from final +where not coalesce(_fivetran_deleted, false) + diff --git a/tasks/quickbooks003/solutions/stg_quickbooks__sales_receipt.sql b/tasks/quickbooks003/solutions/stg_quickbooks__sales_receipt.sql new file mode 100644 index 00000000..b52e9794 --- /dev/null +++ b/tasks/quickbooks003/solutions/stg_quickbooks__sales_receipt.sql @@ -0,0 +1,60 @@ +{{ config(enabled=var('using_sales_receipt', True)) }} + +with base as ( + + select * + from {{ ref('stg_quickbooks__sales_receipt_tmp') }} + +), + +fields as ( + + select + /* + The below macro is used to generate the correct SQL for package staging models. It takes a list of columns + that are expected/needed (staging_columns from dbt_quickbooks_source/models/tmp/) and compares it with columns + in the source (source_columns from dbt_quickbooks_source/macros/). + For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git). + */ + + {{ + fivetran_utils.fill_staging_columns( + source_columns=adapter.get_columns_in_relation(ref('stg_quickbooks__sales_receipt_tmp')), + staging_columns=quickbooks_source.get_sales_receipt_columns() + ) + }} + + {{ + fivetran_utils.source_relation( + union_schema_variable='quickbooks_union_schemas', + union_database_variable='quickbooks_union_databases' + ) + }} + from base +), + +final as ( + + select + cast(id as {{ dbt.type_string() }}) as sales_receipt_id, + balance, + cast(doc_number as {{ dbt.type_string() }}) as doc_number, + total_amount, + cast(deposit_to_account_id as {{ dbt.type_string() }}) as deposit_to_account_id, + created_at, + cast(customer_id as {{ dbt.type_string() }}) as customer_id, + cast(department_id as {{ dbt.type_string() }}) as department_id, + cast(class_id as {{ dbt.type_string() }}) as class_id, + currency_id, + exchange_rate, + -- Convert unix epoch to timestamp, then truncate to date + cast( {{ dbt.date_trunc('day', 'to_timestamp(transaction_date)') }} as date) as transaction_date, + _fivetran_deleted, + source_relation + from fields +) + +select * +from final +where not coalesce(_fivetran_deleted, false) + diff --git a/tasks/quickbooks003/task.yaml b/tasks/quickbooks003/task.yaml index e9167b03..18181cfe 100644 --- a/tasks/quickbooks003/task.yaml +++ b/tasks/quickbooks003/task.yaml @@ -45,4 +45,10 @@ variants: - db_type: duckdb db_name: quickbooks project_type: dbt - project_name: quickbooks \ No newline at end of file + project_name: quickbooks + +- db_type: snowflake + db_name: quickbooks + project_type: dbt + project_name: quickbooks + migration_directory: quickbooks__duckdb_to_snowflake \ No newline at end of file diff --git a/tasks/quickbooks004/task.yaml b/tasks/quickbooks004/task.yaml index 6f503b1d..0dfbf594 100644 --- a/tasks/quickbooks004/task.yaml +++ b/tasks/quickbooks004/task.yaml @@ -83,4 +83,10 @@ variants: - db_type: duckdb db_name: quickbooks project_type: dbt - project_name: quickbooks \ No newline at end of file + project_name: quickbooks + +- db_type: snowflake + db_name: quickbooks + project_type: dbt + project_name: quickbooks + migration_directory: quickbooks__duckdb_to_snowflake \ No newline at end of file