Skip to content

Commit

Permalink
added tests for ExternalTableDataset
Browse files Browse the repository at this point in the history
Signed-off-by: Minura Punchihewa <[email protected]>
  • Loading branch information
MinuraPunchihewa committed Oct 11, 2024
1 parent e9d097e commit b896464
Showing 1 changed file with 42 additions and 0 deletions.
42 changes: 42 additions & 0 deletions kedro-datasets/tests/databricks/test_external_table_dataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import pytest
from kedro.io.core import DatasetError
from pyspark.sql import DataFrame

from kedro_datasets_experimental.databricks.external_table_dataset import ExternalTableDataset


class TestExternalTableDataset:
def test_location_for_non_existing_table(self):
with pytest.raises(DatasetError):
ExternalTableDataset(table="test")

def test_invalid_upsert_write_mode(self):
with pytest.raises(DatasetError):
ExternalTableDataset(table="test", write_mode="upsert", format="parquet")

def test_invalid_overwrite_write_mode(self):
with pytest.raises(DatasetError):
ExternalTableDataset(table="test", write_mode="overwrite", format="parquet")

def test_save_overwrite_without_location(self):
with pytest.raises(DatasetError):
ExternalTableDataset(table="test", write_mode="overwrite", format="delta")

def test_save_overwrite(
self,
sample_spark_df: DataFrame,
append_spark_df: DataFrame,
external_location: str,
):
unity_ds = ExternalTableDataset(
database="test",
table="test_save",
write_mode="overwrite",
location=f"{external_location}/test_save_overwrite_external",
)
unity_ds.save(sample_spark_df)
unity_ds.save(append_spark_df)

overwritten_table = unity_ds.load()

assert append_spark_df.exceptAll(overwritten_table).count() == 0

0 comments on commit b896464

Please sign in to comment.