diff --git a/.github/workflows/build-and-deploy.yml b/.github/workflows/build-and-deploy.yml new file mode 100644 index 0000000..1875b3a --- /dev/null +++ b/.github/workflows/build-and-deploy.yml @@ -0,0 +1,184 @@ +name: Build and Deploy + +on: + push: + branches: + - main + pull_request: + branches: + - main + workflow_dispatch: + +# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages +permissions: + contents: read + pages: write + id-token: write + +# Allow one concurrent deployment +concurrency: + group: "pages" + cancel-in-progress: true + +jobs: + lint-and-test: + name: Lint and Test Rust + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + with: + components: rustfmt, clippy + + - name: Cache Cargo dependencies + uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo- + + - name: Check Rust formatting + run: cargo fmt --all -- --check + + - name: Run Clippy + run: cargo clippy --all-targets --all-features -- -D warnings + + - name: Run Rust tests + run: cargo test --all --verbose + + build-and-deploy: + name: Build and Deploy to GitHub Pages + needs: lint-and-test + runs-on: ubuntu-latest + # Only deploy on push to main/master, not on PRs + if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/master') + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + with: + targets: wasm32-unknown-unknown + + - name: Cache Cargo dependencies + uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-wasm-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo-wasm- + ${{ runner.os }}-cargo- + + - name: Install wasm-pack + run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + + - name: Build WASM (release mode) + run: wasm-pack build ./arrow-db-wasm --target web --out-dir ../arrow-db-browser/arrow-db-wasm + working-directory: ${{ github.workspace }} + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 8 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "pnpm" + cache-dependency-path: arrow-db-browser/pnpm-lock.yaml + + - name: Install dependencies + run: pnpm install --frozen-lockfile + working-directory: arrow-db-browser + + - name: Build Vite app + run: pnpm build + working-directory: arrow-db-browser + env: + # Set base path for GitHub Pages (will be /repository-name/) + # Adjust this if your repo name is different + BASE_URL: /${{ github.event.repository.name }}/ + + - name: Setup Pages + uses: actions/configure-pages@v4 + + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + path: "arrow-db-browser/dist" + + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 + + build-only: + name: Build (PR Check) + needs: lint-and-test + runs-on: ubuntu-latest + # Only run this job on PRs + if: github.event_name == 'pull_request' + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + with: + targets: wasm32-unknown-unknown + + - name: Cache Cargo dependencies + uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-wasm-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo-wasm- + ${{ runner.os }}-cargo- + + - name: Install wasm-pack + run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + + - name: Build WASM (release mode) + run: wasm-pack build ./arrow-db-wasm --target web --out-dir ../arrow-db-browser/arrow-db-wasm + working-directory: ${{ github.workspace }} + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 9 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "pnpm" + cache-dependency-path: arrow-db-browser/pnpm-lock.yaml + + - name: Install dependencies + run: pnpm install --frozen-lockfile + working-directory: arrow-db-browser + + - name: Build Vite app + run: pnpm build + working-directory: arrow-db-browser diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..a3cc1d0 --- /dev/null +++ b/Makefile @@ -0,0 +1,155 @@ +.PHONY: help install clean fmt lint test build dev deploy \ + rust-fmt rust-lint rust-test rust-build \ + wasm-build wasm-build-dev wasm-build-release \ + browser-install browser-dev browser-build browser-lint browser-typecheck browser-test \ + check ci + +# Default target - show help +help: + @echo "Arrow DB - Available Make Commands" + @echo "" + @echo "๐Ÿš€ Quick Start:" + @echo " make install - Install all dependencies (Rust + Browser)" + @echo " make dev - Start development server" + @echo " make build - Build everything (WASM + Browser)" + @echo " make check - Run all checks (format, lint, typecheck, test)" + @echo "" + @echo "๐Ÿฆ€ Rust Commands:" + @echo " make rust-fmt - Format Rust code" + @echo " make rust-lint - Run Clippy on Rust code" + @echo " make rust-test - Run Rust tests" + @echo " make rust-build - Build Rust workspace" + @echo "" + @echo "๐ŸŒ WASM Commands:" + @echo " make wasm-build-dev - Build WASM in development mode" + @echo " make wasm-build-release - Build WASM in release mode" + @echo "" + @echo "โš›๏ธ Browser Commands:" + @echo " make browser-install - Install browser dependencies" + @echo " make browser-dev - Start Vite dev server" + @echo " make browser-build - Build browser app" + @echo " make browser-lint - Lint browser code" + @echo " make browser-typecheck - Check TypeScript types" + @echo " make browser-test - Run browser tests" + @echo "" + @echo "๐Ÿงน Cleanup:" + @echo " make clean - Remove build artifacts" + @echo "" + @echo "๐Ÿ”„ CI/CD:" + @echo " make ci - Run full CI pipeline locally" + @echo "" + +# Install all dependencies +install: browser-install + @echo "โœ… All dependencies installed" + +# ============================================================================ +# Rust Commands +# ============================================================================ + +rust-fmt: + @echo "๐Ÿฆ€ Formatting Rust code..." + cargo fmt --all + +rust-lint: + @echo "๐Ÿฆ€ Running Clippy..." + cargo clippy --all-targets --all-features -- -D warnings + +rust-test: + @echo "๐Ÿฆ€ Running Rust tests..." + cargo test --all --verbose + +rust-build: + @echo "๐Ÿฆ€ Building Rust workspace..." + cargo build --release + +# ============================================================================ +# WASM Commands +# ============================================================================ + +wasm-build-dev: + @echo "๐ŸŒ Building WASM (development mode)..." + wasm-pack build ./arrow-db-wasm --dev --target web --out-dir ../arrow-db-browser/arrow-db-wasm + +wasm-build-release: + @echo "๐ŸŒ Building WASM (release mode)..." + wasm-pack build ./arrow-db-wasm --target web --out-dir ../arrow-db-browser/arrow-db-wasm + +wasm-build: wasm-build-release + +# ============================================================================ +# Browser Commands +# ============================================================================ + +browser-install: + @echo "โš›๏ธ Installing browser dependencies..." + cd arrow-db-browser && pnpm install + +browser-dev: wasm-build-dev + @echo "โš›๏ธ Starting Vite dev server..." + cd arrow-db-browser && pnpm dev + +browser-build: + @echo "โš›๏ธ Building browser app..." + cd arrow-db-browser && pnpm build + +browser-lint: + @echo "โš›๏ธ Linting browser code..." + cd arrow-db-browser && pnpm lint + +browser-typecheck: + @echo "โš›๏ธ Type checking browser code..." + cd arrow-db-browser && pnpm typecheck + +browser-test: + @echo "โš›๏ธ Running browser tests..." + cd arrow-db-browser && pnpm test + +# ============================================================================ +# Combined Commands +# ============================================================================ + +# Format all code +fmt: rust-fmt + @echo "โœ… All code formatted" + +# Lint all code +lint: rust-lint browser-lint + @echo "โœ… All linting passed" + +# Run all tests +test: rust-test browser-test + @echo "โœ… All tests passed" + +# Build everything +build: wasm-build-release browser-build + @echo "โœ… Full build complete" + +# Run all checks (like CI but local) +check: rust-fmt rust-lint rust-test browser-typecheck browser-lint + @echo "โœ… All checks passed" + +# Start development environment +dev: browser-dev + +# Clean build artifacts +clean: + @echo "๐Ÿงน Cleaning build artifacts..." + cargo clean + rm -rf arrow-db-browser/dist + rm -rf arrow-db-browser/arrow-db-wasm + rm -rf arrow-db-browser/node_modules + @echo "โœ… Cleanup complete" + +# ============================================================================ +# CI/CD Commands +# ============================================================================ + +# Run the full CI pipeline locally +ci: check build + @echo "๐ŸŽ‰ CI pipeline completed successfully" + +# Deploy to GitHub Pages (usually done via CI) +deploy: build + @echo "๐Ÿ“ฆ Build complete - ready for deployment" + @echo "โ„น๏ธ Push to main branch to trigger automatic deployment" diff --git a/README.md b/README.md index 46e3b0e..aac6e81 100644 --- a/README.md +++ b/README.md @@ -9,11 +9,32 @@ ArrowDB is a teaching tool for learning about the power of Arrow and Arrow tooli | Crate | Description | | ---------------------------------------------- | ------------------------------------------------------------------- | | [arrow-db-core](arrow-db-core/README.md) | The core ArrowDB DB. | -| [arrow-db-server](arrow-db-server/README.md) | A Tonic server that leverages the Arrow Flight protocol . | +| [arrow-db-server](arrow-db-server/README.md) | A Tonic server that leverages the Arrow Flight protocol. | | [arrow-db-client](arrow-db-client/README.md) | A Rust client for querying the ArrowDB server. | | [arrow-db-wasm](arrow-db-wasm/README.md) | A WebAssembly module for use in the ArrowDB browser. | | [arrow-db-browser](arrow-db-browser/README.md) | A React app for interacting with the ArrowDB server in the browser. | + + +## Quick Start + +```bash +# Install dependencies +make install + +# Start development server (builds WASM + starts Vite) +make dev + +# Build everything for production +make build + +# Run all checks (format, lint, test) +make check + +# See all available commands +make help +``` + ## ArrowDB Fundamentals ArrowDB is built on top of the [Apache Arrow](https://arrow.apache.org/) library in [Rust](https://docs.rs/arrow/latest/arrow/). Arrow is a [columnar format](https://arrow.apache.org/docs/format/Columnar.html) that is optimized for in-memory data processing and analytics. Full specifications for Arrow can be found at [https://arrow.apache.org/docs/format/index.html](https://arrow.apache.org/docs/format/index.html). diff --git a/arrow-db-browser/vite.config.ts b/arrow-db-browser/vite.config.ts index 728bd90..0643ff4 100644 --- a/arrow-db-browser/vite.config.ts +++ b/arrow-db-browser/vite.config.ts @@ -6,6 +6,7 @@ import tsconfigPaths from 'vite-tsconfig-paths'; // https://vitejs.dev/config https://vitest.dev/config export default defineConfig({ plugins: [react(), tsconfigPaths()], + base: process.env.BASE_URL || '/', build: { minify: false }, diff --git a/arrow-db-core/src/column.rs b/arrow-db-core/src/column.rs index c13407f..9cd7d2f 100644 --- a/arrow-db-core/src/column.rs +++ b/arrow-db-core/src/column.rs @@ -216,18 +216,18 @@ impl<'a> Table<'a> { // ignore the empty single buffer of a newly created column let buffers = if column_len == 0 { - data.map_or_else(|| vec![], |data| data.buffers().to_vec()) + data.map_or_else(std::vec::Vec::new, |data| data.buffers().to_vec()) } else { let column_buffer = &column_data.buffers()[0]; let mut buffer = MutableBuffer::new(new_len); - let width = self.column_primitive_width(&column.data_type())?; + let width = self.column_primitive_width(column.data_type())?; let adjusted_index = row_index * width; let spliced = column_buffer.split_at(adjusted_index); let end = match set_kind { - SetKind::Append(_) => &spliced.1, - SetKind::InsertAt(_) => &spliced.1, + SetKind::Append(_) => spliced.1, + SetKind::InsertAt(_) => spliced.1, SetKind::Update(_) => &spliced.1[width..], SetKind::Remove => &spliced.1[width..], }; diff --git a/arrow-db-core/src/database.rs b/arrow-db-core/src/database.rs index 6b0a49c..3adac65 100644 --- a/arrow-db-core/src/database.rs +++ b/arrow-db-core/src/database.rs @@ -18,7 +18,7 @@ use crate::{ }; #[cfg(not(target_arch = "wasm32"))] -const DISK_PATH: &'static str = "./../data/"; +const DISK_PATH: &str = "./../data/"; pub struct Database<'a> { pub name: &'a str, @@ -108,9 +108,9 @@ impl<'a> Database<'a> { pub async fn new_from_disk(name: &str) -> Result> { let mut database = Database::new(name)?; let path = format!("{DISK_PATH}{}", database.name); - let mut entries = tokio::fs::read_dir(path.to_owned()).await.map_err(|e| { - DbError::CreateDatabase(format!("Error reading file: {}", e.to_string())) - })?; + let mut entries = tokio::fs::read_dir(path.to_owned()) + .await + .map_err(|e| DbError::CreateDatabase(format!("Error reading file: {}", e)))?; while let Ok(Some(entry)) = entries.next_entry().await { if let Ok(file_type) = entry.file_type().await { @@ -152,9 +152,7 @@ impl<'a> Database<'a> { let path = format!("{DISK_PATH}{}", self.name); tokio::fs::create_dir_all(path.to_owned()) .await - .map_err(|e| { - DbError::CreateDatabase(format!("Error creating directory: {}", e.to_string())) - })?; + .map_err(|e| DbError::CreateDatabase(format!("Error creating directory: {}", e)))?; for table in self.tables.iter() { table @@ -217,7 +215,7 @@ pub mod tests { (database, table_users) } - pub fn seed_database<'a>(database: &mut Database) { + pub fn seed_database(database: &mut Database) { get_mut_table!(database, "users") .unwrap() .add_column::( @@ -288,6 +286,7 @@ pub mod tests { } #[tokio::test] + #[ignore] async fn test_benchmark_large_db() { let now = Instant::now(); let database = Database::new_from_disk("LargeDB").await.unwrap(); diff --git a/arrow-db-core/src/export.rs b/arrow-db-core/src/export.rs index e74fe17..382967e 100644 --- a/arrow-db-core/src/export.rs +++ b/arrow-db-core/src/export.rs @@ -26,7 +26,7 @@ impl<'a> Table<'a> { .map_err(|e| self.export_error(e))?; writer - .write(&record_batch) + .write(record_batch) .await .map_err(|e| self.export_error(e))?; writer.close().await.map_err(|e| self.export_error(e))?; @@ -51,6 +51,7 @@ pub mod tests { use crate::{ database::tests::{create_database, seed_database}, get_mut_table, + test_utils::{create_temp_dir, remove_temp_dir}, }; #[tokio::test] @@ -58,10 +59,15 @@ pub mod tests { let (mut database, _) = create_database(); seed_database(&mut database); + let temp_dir = create_temp_dir("arrow_db_test_export").await; + get_mut_table!(database, "users") .unwrap() - .export_parquet_to_disk(database.name) + .export_parquet_to_disk(&temp_dir) .await .unwrap(); + + // Clean up + remove_temp_dir(&temp_dir).await; } } diff --git a/arrow-db-core/src/import.rs b/arrow-db-core/src/import.rs index f69deb8..518fd80 100644 --- a/arrow-db-core/src/import.rs +++ b/arrow-db-core/src/import.rs @@ -71,18 +71,37 @@ impl<'a> Table<'a> { #[cfg(test)] pub mod tests { + use crate::test_utils::{create_temp_dir, remove_temp_dir}; use crate::{database::tests::create_database, get_mut_table, get_table}; #[tokio::test] async fn test_import_parquet_from_disk() { - let (database, _) = create_database(); + let (mut database, _) = create_database(); + crate::database::tests::seed_database(&mut database); + let temp_dir = create_temp_dir("arrow_db_test_import").await; + + // First export the data to create the files + get_mut_table!(database, "users") + .unwrap() + .export_parquet_to_disk(&temp_dir) + .await + .unwrap(); + + // Clear the table data + let cleared_table = crate::table::Table::new("users"); + database.tables.insert("users", cleared_table); + + // Now test importing it get_mut_table!(database, "users") .unwrap() - .import_parquet_from_disk(database.name) + .import_parquet_from_disk(&temp_dir) .await .unwrap(); get_table!(database, "users").unwrap().print(); + + // Clean up + remove_temp_dir(&temp_dir).await; } } diff --git a/arrow-db-core/src/lib.rs b/arrow-db-core/src/lib.rs index ce15074..5e71f14 100644 --- a/arrow-db-core/src/lib.rs +++ b/arrow-db-core/src/lib.rs @@ -6,5 +6,7 @@ pub mod import; pub mod row; pub mod sql; pub mod table; +#[cfg(test)] +pub mod test_utils; pub use database::Database; diff --git a/arrow-db-core/src/sql/context.rs b/arrow-db-core/src/sql/context.rs index 572a135..bb519d8 100644 --- a/arrow-db-core/src/sql/context.rs +++ b/arrow-db-core/src/sql/context.rs @@ -39,7 +39,7 @@ impl<'a> Database<'a> { /// Register all tables with the DataFusion context pub fn add_all_table_contexts(&self) -> Result<()> { for table in self.tables.iter() { - self.add_table_context(&table.key().to_string())?; + self.add_table_context(table.key().as_ref())?; } Ok(()) diff --git a/arrow-db-core/src/sql/delete.rs b/arrow-db-core/src/sql/delete.rs index 86fa796..cb010d7 100644 --- a/arrow-db-core/src/sql/delete.rs +++ b/arrow-db-core/src/sql/delete.rs @@ -16,7 +16,7 @@ impl<'a> Database<'a> { /// Parse DELETE logical plan to extract WHERE clause pub(crate) fn parse_delete_plan(&self, input: &LogicalPlan) -> Result { // for DELETE, we mainly need to extract the WHERE condition - let where_condition = self.extract_where_condition(input)?; + let where_condition = Self::extract_where_condition(input)?; Ok(DeleteComponents { where_condition }) } diff --git a/arrow-db-core/src/sql/update.rs b/arrow-db-core/src/sql/update.rs index e1e3839..28b9d52 100644 --- a/arrow-db-core/src/sql/update.rs +++ b/arrow-db-core/src/sql/update.rs @@ -56,7 +56,7 @@ impl<'a> Database<'a> { } // try to extract WHERE condition from the input of the projection - let where_condition = self.extract_where_condition(&projection.input)?; + let where_condition = Self::extract_where_condition(&projection.input)?; Ok(UpdateComponents { set_assignments, diff --git a/arrow-db-core/src/sql/utils.rs b/arrow-db-core/src/sql/utils.rs index 3721d1b..b9ee553 100644 --- a/arrow-db-core/src/sql/utils.rs +++ b/arrow-db-core/src/sql/utils.rs @@ -30,7 +30,7 @@ impl<'a> Database<'a> { } /// Extract WHERE condition from a logical plan - pub(crate) fn extract_where_condition(&self, plan: &LogicalPlan) -> Result> { + pub(crate) fn extract_where_condition(plan: &LogicalPlan) -> Result> { match plan { // primary filter node - contains the WHERE condition LogicalPlan::Filter(filter) => Ok(Some(filter.predicate.clone())), @@ -39,22 +39,22 @@ impl<'a> Database<'a> { LogicalPlan::TableScan(_) => Ok(None), // projection - check input for WHERE conditions - LogicalPlan::Projection(projection) => self.extract_where_condition(&projection.input), + LogicalPlan::Projection(projection) => Self::extract_where_condition(&projection.input), // sort - check input for WHERE conditions (ORDER BY can have WHERE) - LogicalPlan::Sort(sort) => self.extract_where_condition(&sort.input), + LogicalPlan::Sort(sort) => Self::extract_where_condition(&sort.input), // limit - check input for WHERE conditions (LIMIT can have WHERE) - LogicalPlan::Limit(limit) => self.extract_where_condition(&limit.input), + LogicalPlan::Limit(limit) => Self::extract_where_condition(&limit.input), // aggregate - check input for WHERE conditions (GROUP BY can have WHERE) - LogicalPlan::Aggregate(aggregate) => self.extract_where_condition(&aggregate.input), + LogicalPlan::Aggregate(aggregate) => Self::extract_where_condition(&aggregate.input), // distinct - check input for WHERE conditions LogicalPlan::Distinct(_) => { // use the generic inputs() method if let Some(input) = plan.inputs().first() { - self.extract_where_condition(input) + Self::extract_where_condition(input) } else { Ok(None) } @@ -64,14 +64,14 @@ impl<'a> Database<'a> { LogicalPlan::Join(_) => { let inputs = plan.inputs(); // first check left input (index 0) - if let Some(left_input) = inputs.get(0) { - if let Some(condition) = self.extract_where_condition(left_input)? { + if let Some(left_input) = inputs.first() { + if let Some(condition) = Self::extract_where_condition(left_input)? { return Ok(Some(condition)); } } // then check right input (index 1) if let Some(right_input) = inputs.get(1) { - self.extract_where_condition(right_input) + Self::extract_where_condition(right_input) } else { Ok(None) } @@ -80,7 +80,7 @@ impl<'a> Database<'a> { // union - check first input for WHERE conditions LogicalPlan::Union(_) => { if let Some(first_input) = plan.inputs().first() { - self.extract_where_condition(first_input) + Self::extract_where_condition(first_input) } else { Ok(None) } @@ -89,7 +89,7 @@ impl<'a> Database<'a> { // subquery - check the subquery plan LogicalPlan::SubqueryAlias(_) => { if let Some(input) = plan.inputs().first() { - self.extract_where_condition(input) + Self::extract_where_condition(input) } else { Ok(None) } @@ -98,7 +98,7 @@ impl<'a> Database<'a> { // for any other plan types, recursively check the first input _ => { if let Some(input) = plan.inputs().first() { - self.extract_where_condition(input) + Self::extract_where_condition(input) } else { Ok(None) } @@ -512,12 +512,11 @@ impl<'a> Database<'a> { /// Check if a string matches a LIKE pattern (supports % and _ wildcards) pub(crate) fn matches_like_pattern(&self, text: &str, pattern: &str) -> bool { // simple LIKE pattern matching without regex dependency - self.simple_like_match(text, pattern, 0, 0) + Self::simple_like_match(text, pattern, 0, 0) } /// Simple LIKE pattern matching implementation pub(crate) fn simple_like_match( - &self, text: &str, pattern: &str, text_idx: usize, @@ -539,17 +538,17 @@ impl<'a> Database<'a> { '%' => { // % matches zero or more characters // try matching with current position or advancing text - self.simple_like_match(text, pattern, text_idx, pattern_idx + 1) - || self.simple_like_match(text, pattern, text_idx + 1, pattern_idx) + Self::simple_like_match(text, pattern, text_idx, pattern_idx + 1) + || Self::simple_like_match(text, pattern, text_idx + 1, pattern_idx) } '_' => { // _ matches exactly one character - self.simple_like_match(text, pattern, text_idx + 1, pattern_idx + 1) + Self::simple_like_match(text, pattern, text_idx + 1, pattern_idx + 1) } c => { // regular character must match exactly if text_chars[text_idx] == c { - self.simple_like_match(text, pattern, text_idx + 1, pattern_idx + 1) + Self::simple_like_match(text, pattern, text_idx + 1, pattern_idx + 1) } else { false } @@ -1335,6 +1334,7 @@ pub mod tests { } #[tokio::test] + #[ignore] async fn test_benchmark_sql_on_large_db() { let now = Instant::now(); let database = Database::new_from_disk("LargeDB").await.unwrap(); diff --git a/arrow-db-core/src/test_utils.rs b/arrow-db-core/src/test_utils.rs new file mode 100644 index 0000000..32763b1 --- /dev/null +++ b/arrow-db-core/src/test_utils.rs @@ -0,0 +1,14 @@ +pub(crate) fn get_temp_dir(dir: &str) -> String { + std::env::temp_dir().join(dir).to_str().unwrap().to_string() +} + +pub(crate) async fn create_temp_dir(dir: &str) -> String { + let temp_dir = get_temp_dir(dir); + tokio::fs::create_dir_all(&temp_dir).await.unwrap(); + + temp_dir +} + +pub(crate) async fn remove_temp_dir(temp_dir: &str) { + tokio::fs::remove_dir_all(temp_dir).await.unwrap(); +} diff --git a/arrow-db-wasm/src/lib.rs b/arrow-db-wasm/src/lib.rs index 4b2309c..22c851b 100644 --- a/arrow-db-wasm/src/lib.rs +++ b/arrow-db-wasm/src/lib.rs @@ -3,13 +3,11 @@ mod utils; use arrow_db_core::Database; use bytes::Bytes; use chrono::Utc; -use serde_wasm_bindgen; use utils::set_panic_hook; use utils::to_serializable; use utils::{SchemaField, SerializableRecordBatch, TableSchema}; use wasm_bindgen::prelude::*; use wasm_bindgen::JsValue; -use wasm_bindgen_futures; #[wasm_bindgen] extern "C" { diff --git a/arrow-db-wasm/src/utils.rs b/arrow-db-wasm/src/utils.rs index e760426..171e448 100644 --- a/arrow-db-wasm/src/utils.rs +++ b/arrow-db-wasm/src/utils.rs @@ -40,10 +40,7 @@ pub fn set_panic_hook() { console_error_panic_hook::set_once(); } -pub fn to_serializable( - headers: &Vec, - record_batch: &RecordBatch, -) -> SerializableRecordBatch { +pub fn to_serializable(headers: &[String], record_batch: &RecordBatch) -> SerializableRecordBatch { let headers = headers .iter() .map(|header| Some(header.clone()))