-
Notifications
You must be signed in to change notification settings - Fork 229
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
add support for text item type and text to sql job writer
- Loading branch information
Showing
12 changed files
with
111 additions
and
9 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
# test the sql insert job loader, works only on duckdb for now | ||
|
||
from typing import Any | ||
|
||
import dlt | ||
|
||
from dlt.common.destination.dataset import SupportsReadableDataset | ||
|
||
from tests.pipeline.utils import load_table_counts | ||
|
||
|
||
def test_sql_job() -> None: | ||
# populate a table with 10 items and retrieve dataset | ||
pipeline = dlt.pipeline( | ||
pipeline_name="example_pipeline", destination="duckdb", dataset_name="example_dataset" | ||
) | ||
pipeline.run([{"a": i} for i in range(10)], table_name="example_table") | ||
dataset = pipeline.dataset() | ||
|
||
# create a resource that generates sql statements to create 2 new tables | ||
@dlt.resource(file_format="sql") | ||
def copied_table() -> Any: | ||
query = dataset["example_table"].limit(5).query() | ||
yield f"CREATE OR REPLACE TABLE copied_table AS {query};" | ||
query = dataset["example_table"].limit(7).query() | ||
yield f"CREATE OR REPLACE TABLE copied_table2 AS {query};" | ||
|
||
# run sql jobs | ||
pipeline.run(copied_table()) | ||
|
||
# the two tables where created | ||
assert load_table_counts(pipeline, "example_table", "copied_table", "copied_table2") == { | ||
"example_table": 10, | ||
"copied_table": 5, | ||
"copied_table2": 7, | ||
} | ||
|
||
# we have a table entry for the main table "copied_table" | ||
assert "copied_table" in pipeline.default_schema.tables | ||
# but no columns, it's up to the user to provide a schema | ||
assert len(pipeline.default_schema.tables["copied_table"]["columns"]) == 0 |