Skip to content

Commit

Permalink
Refactor test project (#205)
Browse files Browse the repository at this point in the history
  • Loading branch information
yoshidan authored Oct 30, 2023
1 parent 337506d commit 00d3365
Show file tree
Hide file tree
Showing 16 changed files with 311 additions and 358 deletions.
2 changes: 1 addition & 1 deletion bigquery/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ google-cloud-auth = { optional = true, version = "0.13", path="../foundation/aut
[dev-dependencies]
tokio = { version="1.32", features=["rt-multi-thread"] }
serial_test = "0.9"
tracing-subscriber = "0.3.17"
tracing-subscriber = { version="0.3.17", features=["env-filter"] }
ctor = "0.1.26"
tokio-util = {version ="0.7", features = ["codec"] }
google-cloud-auth = { path = "../foundation/auth", default-features=false }
Expand Down
86 changes: 55 additions & 31 deletions bigquery/src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -551,15 +551,16 @@ mod tests {
use serial_test::serial;
use std::ops::AddAssign;
use std::time::Duration;
use time::macros::datetime;

use time::{Date, OffsetDateTime, Time};

use google_cloud_googleapis::cloud::bigquery::storage::v1::read_session::TableReadOptions;

use crate::client::{Client, ClientConfig, ReadTableOption};
use crate::http::bigquery_client::test::TestData;
use crate::http::bigquery_client::test::{create_table_schema, dataset_name, TestData};
use crate::http::job::query::QueryRequest;
use crate::http::table::TableReference;
use crate::http::table::{Table, TableReference};
use crate::http::tabledata::insert_all::{InsertAllRequest, Row};
use crate::query;
use crate::query::QueryOption;

Expand Down Expand Up @@ -730,15 +731,38 @@ mod tests {
test_query_table(Some(1), QueryOption::default()).await
}

async fn insert(client: &Client, project: &str, dataset: &str, table: &str, size: usize, now: &OffsetDateTime) {
let mut table1 = Table::default();
table1.table_reference.dataset_id = dataset.to_string();
table1.table_reference.project_id = project.to_string();
table1.table_reference.table_id = table.to_string();
table1.schema = Some(create_table_schema());
let _table1 = client.table_client.create(&table1).await.unwrap();
let mut req = InsertAllRequest::<TestData>::default();
for i in 0..size {
req.rows.push(Row {
insert_id: None,
json: TestData::default(i, *now + Duration::from_secs(i as u64)),
});
}
client.tabledata().insert(project, dataset, table, &req).await.unwrap();
}

async fn test_query_table(max_results: Option<i64>, option: QueryOption) {
let dataset = dataset_name("table");
let (client, project_id) = create_client().await;
let now = OffsetDateTime::from_unix_timestamp(OffsetDateTime::now_utc().unix_timestamp()).unwrap();
let table = format!("test_query_table_{}", now.unix_timestamp());
insert(&client, &project_id, &dataset, &table, 3, &now).await;

// query
let mut data_as_row: Vec<TestData> = vec![];
let mut iterator_as_row = client
.query_with_option::<query::row::Row>(
&project_id,
QueryRequest {
max_results,
query: "SELECT * FROM rust_test_job.table_data_1686707863".to_string(),
query: format!("SELECT * FROM {}.{}", dataset, table),
..Default::default()
},
option.clone(),
Expand All @@ -763,7 +787,7 @@ mod tests {
.query_with_option::<TestData>(
&project_id,
QueryRequest {
query: "SELECT * FROM rust_test_job.table_data_1686707863".to_string(),
query: format!("SELECT * FROM {}.{}", dataset, table),
..Default::default()
},
option,
Expand All @@ -778,22 +802,23 @@ mod tests {
assert_eq!(data_as_struct.len(), 3);
assert_eq!(data_as_row.len(), 3);

for (i, d) in data_as_struct.iter().enumerate() {
assert_data(i, d.clone());
}
for (i, d) in data_as_row.iter().enumerate() {
assert_data(i, d.clone());
}
assert_data(&now, data_as_struct);
assert_data(&now, data_as_row);
}

#[tokio::test(flavor = "multi_thread")]
#[serial]
async fn test_read_table() {
let dataset = dataset_name("table");
let (client, project_id) = create_client().await;
let now = OffsetDateTime::from_unix_timestamp(OffsetDateTime::now_utc().unix_timestamp()).unwrap();
let table = format!("test_read_table_{}", now.unix_timestamp());
insert(&client, &project_id, &dataset, &table, 3, &now).await;

let table = TableReference {
project_id,
dataset_id: "rust_test_job".to_string(),
table_id: "table_data_1686707863".to_string(),
dataset_id: dataset.to_string(),
table_id: table.to_string(),
};
let mut iterator_as_struct = client.read_table::<TestData>(&table, None).await.unwrap();

Expand Down Expand Up @@ -853,10 +878,8 @@ mod tests {
assert_eq!(data_as_struct.len(), 3);
assert_eq!(data_as_row.len(), 1);

for (i, d) in data_as_struct.iter().enumerate() {
assert_data(i, d.clone());
}
assert_data(0, data_as_row[0].clone());
assert_data(&now, data_as_struct);
assert_data(&now, data_as_row);
}

#[tokio::test(flavor = "multi_thread")]
Expand All @@ -872,16 +895,22 @@ mod tests {
}

async fn test_query_job_incomplete(max_results: Option<i64>, option: QueryOption) {
let dataset = dataset_name("table");
let (client, project_id) = create_client().await;
let mut data: Vec<TestData> = vec![];
let now = OffsetDateTime::now_utc();
let table = format!("test_query_job_incomplete_{}", now.unix_timestamp());
const SIZE: usize = 10000;
insert(&client, &project_id, &dataset, &table, SIZE, &now).await;

let mut data: Vec<query::row::Row> = vec![];
let mut iter = client
.query_with_option::<TestData>(
.query_with_option::<query::row::Row>(
&project_id,
QueryRequest {
timeout_ms: Some(5), // pass wait_for_query
use_query_cache: Some(false),
max_results,
query: "SELECT * FROM rust_test_job.table_data_10000v2".to_string(),
query: format!("SELECT 1 FROM {}.{}", dataset, table),
..Default::default()
},
option,
Expand All @@ -891,18 +920,13 @@ mod tests {
while let Some(row) = iter.next().await.unwrap() {
data.push(row);
}
assert_eq!(iter.total_size, 10000);
assert_eq!(data.len(), 10000);
assert_eq!(iter.total_size, SIZE as i64);
assert_eq!(data.len(), SIZE);
}

fn assert_data(index: usize, d: TestData) {
let now = if index == 0 {
datetime!(2023-06-14 01:57:43.438086 UTC)
} else if index == 1 {
datetime!(2023-06-14 01:57:43.438296 UTC)
} else {
datetime!(2023-06-14 01:57:43.438410 UTC)
};
assert_eq!(TestData::default(index, now), d);
fn assert_data(now: &OffsetDateTime, data: Vec<TestData>) {
for (i, d) in data.iter().enumerate() {
assert_eq!(&TestData::default(i, *now + Duration::from_secs(i as u64)), d);
}
}
}
15 changes: 15 additions & 0 deletions bigquery/src/http/bigquery_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,21 @@ pub(crate) mod test {

base64_serde_type!(Base64Standard, STANDARD);

#[ctor::ctor]
fn init() {
let filter = tracing_subscriber::filter::EnvFilter::from_default_env()
.add_directive("google_cloud_bigquery=trace".parse().unwrap());
let _ = tracing_subscriber::fmt().with_env_filter(filter).try_init();
}

pub fn dataset_name(name: &str) -> String {
format!("gcrbq_{}", name)
}

pub fn bucket_name(project: &str, name: &str) -> String {
format!("{}_gcrbq_{}", project, name)
}

pub async fn create_client() -> (BigqueryClient, String) {
let tsp = DefaultTokenSourceProvider::new(Config {
audience: None,
Expand Down
21 changes: 8 additions & 13 deletions bigquery/src/http/bigquery_dataset_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,35 +73,24 @@ mod test {

use serial_test::serial;

use crate::http::bigquery_client::test::create_client;
use crate::http::bigquery_client::test::{create_client, dataset_name};
use crate::http::bigquery_dataset_client::BigqueryDatasetClient;
use crate::http::dataset::list::ListDatasetsRequest;
use crate::http::dataset::{Access, Dataset, DatasetReference, SpecialGroup, StorageBillingModel};
use crate::http::types::{Collation, EncryptionConfiguration};

#[ctor::ctor]
fn init() {
let _ = tracing_subscriber::fmt::try_init();
}

#[tokio::test]
#[serial]
pub async fn crud_dataset() {
let (client, project) = create_client().await;
let client = BigqueryDatasetClient::new(Arc::new(client));

// minimum dataset
let mut ds1 = Dataset::default();
ds1.dataset_reference.dataset_id = "rust_test_empty".to_string();
ds1.dataset_reference.project_id = project.clone();
ds1 = client.create(&ds1).await.unwrap();

// full prop dataset
let mut labels = HashMap::new();
labels.insert("key".to_string(), "value".to_string());
let ds2 = Dataset {
dataset_reference: DatasetReference {
dataset_id: "rust_test_full".to_string(),
dataset_id: dataset_name("crud_full"),
project_id: project.to_string(),
},
friendly_name: Some("gcr_test_friendly_name".to_string()),
Expand Down Expand Up @@ -129,6 +118,12 @@ mod test {
};
let ds2 = client.create(&ds2).await.unwrap();

// minimum dataset
let mut ds1 = Dataset::default();
ds1.dataset_reference.dataset_id = dataset_name("crud_empty");
ds1.dataset_reference.project_id = project.clone();
ds1 = client.create(&ds1).await.unwrap();

// test get
let mut res1 = client
.get(project.as_str(), &ds1.dataset_reference.dataset_id)
Expand Down
Loading

0 comments on commit 00d3365

Please sign in to comment.