Skip to content

Commit

Permalink
Update dataset_generator_test.py
Browse files Browse the repository at this point in the history
  • Loading branch information
VanyaBK authored Dec 8, 2023
1 parent 2d81bc3 commit 16fdc0e
Showing 1 changed file with 16 additions and 16 deletions.
32 changes: 16 additions & 16 deletions tests/dataset_generator_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@
"train": datasets.Dataset.from_dict(
{
"input_col": ["foo", "bar"],
"explain_col": ["abc","xyz"],
"explain_col": ["abc", "xyz"],
"output_col": ["baz", "qux"],
}
),
"test": datasets.Dataset.from_dict(
{
"input_col": ["foo", "bar"],
"explain_col": ["abc","xyz"],
"explain_col": ["abc", "xyz"],
"output_col": ["baz", "qux"],
}
),
Expand All @@ -37,14 +37,14 @@
"train": datasets.Dataset.from_dict(
{
"input_col": ["spam", "eggs"],
"explain_col": ["lmn","opq"],
"explain_col": ["lmn", "opq"],
"output_col": ["ham", "sau"],
}
),
"val": datasets.Dataset.from_dict(
{
"input_col": ["spam", "eggs"],
"explain_col": ["lmn","opq"],
"explain_col": ["lmn", "opq"],
"output_col": ["ham", "sau"],
}
),
Expand All @@ -60,14 +60,14 @@
datasets.DatasetDict(
{
"full": datasets.Dataset.from_dict(
{"input_col": ["foo", "bar"], "explain_col": ["abc","xyz"], "output_col": ["baz", "qux"]}
{"input_col": ["foo", "bar"], "explain_col": ["abc", "xyz"], "output_col": ["baz", "qux"]} # noqa E501
)
}
),
datasets.DatasetDict(
{
"train": datasets.Dataset.from_dict(
{"input_col": ["spam", "eggs"], "explain_col": ["lmn","opq"], "output_col": ["ham", "sau"]}
{"input_col": ["spam", "eggs"], "explain_col": ["lmn", "opq"], "output_col": ["ham", "sau"]} # noqa E501
)
}
),
Expand All @@ -78,14 +78,14 @@
datasets.DatasetDict(
{
"train": datasets.Dataset.from_dict(
{"input_col": ["foo", "bar"], "explain_col": ["abc","xyz"], "output_col": ["baz", "qux"]}
{"input_col": ["foo", "bar"], "explain_col": ["abc", "xyz"], "output_col": ["baz", "qux"]} # noqa E501
)
}
),
datasets.DatasetDict(
{
"train": datasets.Dataset.from_dict(
{"input_col": ["spam", "eggs"], "explain_col": ["lmn","opq"], "output": ["ham", "sau"]}
{"input_col": ["spam", "eggs"], "explain_col": ["lmn", "opq"], "output": ["ham", "sau"]} # noqa E501
)
}
),
Expand Down Expand Up @@ -198,14 +198,14 @@ def test_dataset_processor_with_numerical_column():
"train": datasets.Dataset.from_dict(
{
"input_col": ["foo", "bar"],
"explain_col": ["abc","xyz"],
"explain_col": ["abc", "xyz"],
"output_col": ["baz", "qux"],
}
),
"test": datasets.Dataset.from_dict(
{
"input_col": ["spam", "eggs"],
"explain_col": ["lmn","opq"],
"explain_col": ["lmn", "opq"],
"output_col": ["ham", "sau"],
}
),
Expand All @@ -216,14 +216,14 @@ def test_dataset_processor_with_numerical_column():
"train": datasets.Dataset.from_dict(
{
"input_col": ["foo", "bar"],
"explain_col": ["abc","xyz"],
"explain_col": ["abc", "xyz"],
"output_col": [0, 1],
}
),
"test": datasets.Dataset.from_dict(
{
"input_col": ["spam", "eggs"],
"explain_col": ["lmn","opq"],
"explain_col": ["lmn", "opq"],
"output_col": [1, 2],
}
),
Expand Down Expand Up @@ -371,7 +371,7 @@ def test_unexpected_columns():
INSTRUCTION, UNEXPECTED_DATASET_DICTS_WITH_WRONG_COLUMNS
)
assert str(exc_info.value) == (
"Example dictionary must have 'input_col', 'explain_col' and 'output_col' keys."
"Example dictionary must have 'input_col', 'explain_col' and 'output_col' keys." # noqa E501
)
gc.collect()

Expand All @@ -382,14 +382,14 @@ def test_unexpected_columns():
"train": datasets.Dataset.from_dict(
{
"input_col": ["foo", "", "test"],
"explain_col": ["abc","","xyz"],
"explain_col": ["abc", "", "xyz"],
"output_col": ["", "qux", "key"],
}
),
"test": datasets.Dataset.from_dict(
{
"input_col": ["foo", ""],
"explain_col": ["abc",""],
"explain_col": ["abc", ""],
"output_col": ["baz", "qux"],
}
),
Expand All @@ -400,7 +400,7 @@ def test_unexpected_columns():
"train": datasets.Dataset.from_dict(
{
"input_col": ["", ""],
"explain_col": ["abc","xyz"],
"explain_col": ["abc", "xyz"],
"output_col": ["ham", "sau"],
}
),
Expand Down

0 comments on commit 16fdc0e

Please sign in to comment.