diff --git a/Cargo.lock b/Cargo.lock
index 8844e46b..b8d9af5f 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -852,6 +852,16 @@ dependencies = [
  "windows-sys 0.52.0",
 ]
 
+[[package]]
+name = "console_error_panic_hook"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc"
+dependencies = [
+ "cfg-if",
+ "wasm-bindgen",
+]
+
 [[package]]
 name = "const-oid"
 version = "0.9.6"
@@ -1438,8 +1448,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
 dependencies = [
  "cfg-if",
+ "js-sys",
  "libc",
  "wasi",
+ "wasm-bindgen",
 ]
 
 [[package]]
@@ -2353,7 +2365,6 @@ dependencies = [
  "pgt_console",
  "pgt_diagnostics",
  "pgt_query_ext",
- "pgt_schema_cache",
  "pgt_text_size",
  "rustc-hash 2.1.0",
  "schemars",
@@ -2431,10 +2442,6 @@ dependencies = [
  "pgt_test_utils",
  "pgt_text_size",
  "pgt_treesitter_queries",
- "schemars",
- "serde",
- "serde_json",
- "sqlx",
  "tokio",
  "tree-sitter",
  "tree_sitter_sql",
@@ -2739,6 +2746,25 @@ dependencies = [
  "tree_sitter_sql",
 ]
 
+[[package]]
+name = "pgt_wasm"
+version = "0.0.0"
+dependencies = [
+ "biome_js_factory",
+ "biome_js_formatter",
+ "biome_rowan",
+ "console_error_panic_hook",
+ "getrandom",
+ "js-sys",
+ "pgt_console",
+ "pgt_diagnostics",
+ "pgt_workspace",
+ "quote",
+ "serde",
+ "serde-wasm-bindgen",
+ "wasm-bindgen",
+]
+
 [[package]]
 name = "pgt_workspace"
 version = "0.0.0"
@@ -3366,6 +3392,17 @@ dependencies = [
  "serde_derive",
 ]
 
+[[package]]
+name = "serde-wasm-bindgen"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8302e169f0eddcc139c70f139d19d6467353af16f9fce27e8c30158036a1e16b"
+dependencies = [
+ "js-sys",
+ "serde",
+ "wasm-bindgen",
+]
+
 [[package]]
 name = "serde_derive"
 version = "1.0.215"
@@ -4497,6 +4534,8 @@ checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396"
 dependencies = [
  "cfg-if",
  "once_cell",
+ "serde",
+ "serde_json",
  "wasm-bindgen-macro",
 ]
 
diff --git a/crates/pgt_analyse/Cargo.toml b/crates/pgt_analyse/Cargo.toml
index 4d30784c..a2976516 100644
--- a/crates/pgt_analyse/Cargo.toml
+++ b/crates/pgt_analyse/Cargo.toml
@@ -16,7 +16,6 @@ version              = "0.0.0"
 pgt_console.workspace      = true
 pgt_diagnostics.workspace  = true
 pgt_query_ext.workspace    = true
-pgt_schema_cache.workspace = true
 rustc-hash                 = { workspace = true }
 
 biome_deserialize        = { workspace = true, optional = true }
diff --git a/crates/pgt_cli/Cargo.toml b/crates/pgt_cli/Cargo.toml
index ca61cf65..3740499d 100644
--- a/crates/pgt_cli/Cargo.toml
+++ b/crates/pgt_cli/Cargo.toml
@@ -28,7 +28,7 @@ pgt_flags                = { workspace = true }
 pgt_fs                   = { workspace = true }
 pgt_lsp                  = { workspace = true }
 pgt_text_edit            = { workspace = true }
-pgt_workspace            = { workspace = true }
+pgt_workspace            = { workspace = true, features = ["db-connection"] }
 quick-junit              = "0.5.0"
 rayon                    = { workspace = true }
 rustc-hash               = { workspace = true }
diff --git a/crates/pgt_completions/Cargo.toml b/crates/pgt_completions/Cargo.toml
index dba88f41..09109886 100644
--- a/crates/pgt_completions/Cargo.toml
+++ b/crates/pgt_completions/Cargo.toml
@@ -10,30 +10,18 @@ name                 = "pgt_completions"
 repository.workspace = true
 version              = "0.0.0"
 
-
 [dependencies]
 async-std = "1.12.0"
 
-pgt_text_size.workspace = true
-
-
 pgt_schema_cache.workspace       = true
+pgt_text_size.workspace          = true
 pgt_treesitter_queries.workspace = true
-schemars                         = { workspace = true, optional = true }
-serde                            = { workspace = true, features = ["derive"] }
-serde_json                       = { workspace = true }
 tree-sitter.workspace            = true
 tree_sitter_sql.workspace        = true
 
-sqlx.workspace = true
-
-tokio = { version = "1.41.1", features = ["full"] }
-
 [dev-dependencies]
 pgt_test_utils.workspace = true
+tokio                    = { version = "1.41.1", features = ["full"] }
 
 [lib]
 doctest = false
-
-[features]
-schema = ["dep:schemars"]
diff --git a/crates/pgt_completions/src/complete.rs b/crates/pgt_completions/src/complete.rs
index 4acc1a24..adb93b45 100644
--- a/crates/pgt_completions/src/complete.rs
+++ b/crates/pgt_completions/src/complete.rs
@@ -1,5 +1,4 @@
 use pgt_text_size::TextSize;
-use serde::{Deserialize, Serialize};
 
 use crate::{
     builder::CompletionBuilder,
@@ -18,10 +17,9 @@ pub struct CompletionParams<'a> {
     pub tree: Option<&'a tree_sitter::Tree>,
 }
 
-#[derive(Debug, Default, Serialize, Deserialize)]
-#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
+#[derive(Debug, Default)]
 pub struct CompletionResult {
-    pub(crate) items: Vec<CompletionItem>,
+    pub items: Vec<CompletionItem>,
 }
 
 impl IntoIterator for CompletionResult {
diff --git a/crates/pgt_completions/src/item.rs b/crates/pgt_completions/src/item.rs
index 8f0e3b95..a18ac91d 100644
--- a/crates/pgt_completions/src/item.rs
+++ b/crates/pgt_completions/src/item.rs
@@ -1,19 +1,14 @@
-use serde::{Deserialize, Serialize};
-
-#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
-#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
-#[serde(rename_all = "camelCase")]
+#[derive(Debug, PartialEq, Eq)]
 pub enum CompletionItemKind {
     Table,
     Function,
     Column,
 }
 
-#[derive(Debug, Serialize, Deserialize)]
-#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
+#[derive(Debug)]
 pub struct CompletionItem {
     pub label: String,
-    pub(crate) score: i32,
+    pub score: i32,
     pub description: String,
     pub preselected: bool,
     pub kind: CompletionItemKind,
diff --git a/crates/pgt_lsp/Cargo.toml b/crates/pgt_lsp/Cargo.toml
index 56086da0..fde61e5e 100644
--- a/crates/pgt_lsp/Cargo.toml
+++ b/crates/pgt_lsp/Cargo.toml
@@ -24,7 +24,7 @@ pgt_fs                  = { workspace = true }
 pgt_lsp_converters      = { workspace = true }
 pgt_text_edit           = { workspace = true }
 pgt_text_size.workspace = true
-pgt_workspace           = { workspace = true }
+pgt_workspace           = { workspace = true, features = ["db-connection"] }
 rustc-hash              = { workspace = true }
 serde                   = { workspace = true, features = ["derive"] }
 serde_json              = { workspace = true }
diff --git a/crates/pgt_lsp/src/handlers/completions.rs b/crates/pgt_lsp/src/handlers/completions.rs
index 4eb0436a..0a49db33 100644
--- a/crates/pgt_lsp/src/handlers/completions.rs
+++ b/crates/pgt_lsp/src/handlers/completions.rs
@@ -62,11 +62,15 @@ pub fn get_completions(
 }
 
 fn to_lsp_types_completion_item_kind(
-    pg_comp_kind: pgt_completions::CompletionItemKind,
+    pg_comp_kind: pgt_workspace::workspace::CompletionItemKind,
 ) -> lsp_types::CompletionItemKind {
     match pg_comp_kind {
-        pgt_completions::CompletionItemKind::Function => lsp_types::CompletionItemKind::FUNCTION,
-        pgt_completions::CompletionItemKind::Table => lsp_types::CompletionItemKind::CLASS,
-        pgt_completions::CompletionItemKind::Column => lsp_types::CompletionItemKind::FIELD,
+        pgt_workspace::workspace::CompletionItemKind::Function => {
+            lsp_types::CompletionItemKind::FUNCTION
+        }
+        pgt_workspace::workspace::CompletionItemKind::Table => lsp_types::CompletionItemKind::CLASS,
+        pgt_workspace::workspace::CompletionItemKind::Column => {
+            lsp_types::CompletionItemKind::FIELD
+        }
     }
 }
diff --git a/crates/pgt_wasm/Cargo.toml b/crates/pgt_wasm/Cargo.toml
new file mode 100644
index 00000000..da59a3be
--- /dev/null
+++ b/crates/pgt_wasm/Cargo.toml
@@ -0,0 +1,44 @@
+[package]
+authors.workspace    = true
+categories.workspace = true
+description          = "<DESCRIPTION>"
+edition.workspace    = true
+homepage.workspace   = true
+keywords.workspace   = true
+license.workspace    = true
+name                 = "pgt_wasm"
+repository.workspace = true
+version              = "0.0.0"
+
+[dependencies]
+pgt_console      = { workspace = true }
+pgt_diagnostics  = { workspace = true }
+pgt_workspace      = { workspace = true  }
+js-sys             = "0.3.72"
+serde              = { workspace = true }
+serde-wasm-bindgen = "0.6.5"
+wasm-bindgen       = { version = "0.2.95", features = ["serde-serialize"] }
+getrandom         = { version = "0.2.15", features = ["js"] }
+
+# The `console_error_panic_hook` crate provides better debugging of panics by
+# logging them with `console.error`. This is great for development, but requires
+# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
+# code size when deploying.
+console_error_panic_hook = { version = "0.1.7", optional = true }
+
+[build-dependencies]
+biome_js_factory   = { workspace = true }
+biome_rowan   = { workspace = true }
+biome_js_formatter = { workspace = true }
+pgt_workspace      = { workspace = true, features = ["schema"] }
+quote              = "1.0.14"
+
+[lib]
+crate-type = ["cdylib", "rlib"]
+
+[features]
+default           = ["console_error_panic_hook"]
+
+[package.metadata.wasm-pack.profile.profiling]
+wasm-opt = false
+
diff --git a/crates/pgt_wasm/build.rs b/crates/pgt_wasm/build.rs
new file mode 100644
index 00000000..88f4c896
--- /dev/null
+++ b/crates/pgt_wasm/build.rs
@@ -0,0 +1,102 @@
+use std::{env, fs, io, path::PathBuf};
+
+use quote::{format_ident, quote};
+
+use biome_js_factory::syntax::JsFileSource;
+use biome_js_factory::{
+    make,
+    syntax::{AnyJsDeclaration, AnyJsModuleItem, AnyJsStatement},
+};
+use biome_js_formatter::{context::JsFormatOptions, format_node};
+use biome_rowan::AstNode;
+use pgt_workspace::workspace_types::{generate_type, methods, ModuleQueue};
+
+fn main() -> io::Result<()> {
+    let methods = methods();
+
+    let mut items = Vec::new();
+    let mut queue = ModuleQueue::default();
+
+    for method in &methods {
+        generate_type(&mut items, &mut queue, &method.params);
+        generate_type(&mut items, &mut queue, &method.result);
+    }
+
+    let module = make::js_module(
+        make::js_directive_list(None),
+        make::js_module_item_list(items.into_iter().map(|(decl, _)| {
+            AnyJsModuleItem::AnyJsStatement(match decl {
+                AnyJsDeclaration::JsClassDeclaration(decl) => {
+                    AnyJsStatement::JsClassDeclaration(decl)
+                }
+                AnyJsDeclaration::JsFunctionDeclaration(decl) => {
+                    AnyJsStatement::JsFunctionDeclaration(decl)
+                }
+                AnyJsDeclaration::JsVariableDeclaration(decl) => {
+                    AnyJsStatement::JsVariableStatement(make::js_variable_statement(decl).build())
+                }
+                AnyJsDeclaration::TsDeclareFunctionDeclaration(decl) => {
+                    AnyJsStatement::TsDeclareFunctionDeclaration(decl)
+                }
+                AnyJsDeclaration::TsEnumDeclaration(decl) => {
+                    AnyJsStatement::TsEnumDeclaration(decl)
+                }
+                AnyJsDeclaration::TsExternalModuleDeclaration(decl) => {
+                    AnyJsStatement::TsExternalModuleDeclaration(decl)
+                }
+                AnyJsDeclaration::TsGlobalDeclaration(decl) => {
+                    AnyJsStatement::TsGlobalDeclaration(decl)
+                }
+                AnyJsDeclaration::TsImportEqualsDeclaration(decl) => {
+                    AnyJsStatement::TsImportEqualsDeclaration(decl)
+                }
+                AnyJsDeclaration::TsInterfaceDeclaration(decl) => {
+                    AnyJsStatement::TsInterfaceDeclaration(decl)
+                }
+                AnyJsDeclaration::TsModuleDeclaration(decl) => {
+                    AnyJsStatement::TsModuleDeclaration(decl)
+                }
+                AnyJsDeclaration::TsTypeAliasDeclaration(decl) => {
+                    AnyJsStatement::TsTypeAliasDeclaration(decl)
+                }
+            })
+        })),
+        make::eof(),
+    )
+    .build();
+
+    // Wasm-bindgen will paste the generated TS code as-is into the final .d.ts file,
+    // ensure it looks good by running it through the formatter
+    let formatted = format_node(JsFormatOptions::new(JsFileSource::ts()), module.syntax()).unwrap();
+    let printed = formatted.print().unwrap();
+    let definitions = printed.into_code();
+
+    // Generate wasm-bindgen extern type imports for all the types defined in the TS code
+    let types = queue.visited().iter().map(|name| {
+        let ident = format_ident!("I{name}");
+        quote! {
+            #[wasm_bindgen(typescript_type = #name)]
+            #[allow(non_camel_case_types)]
+            pub type #ident;
+        }
+    });
+
+    let tokens = quote! {
+        #[wasm_bindgen(typescript_custom_section)]
+        const TS_TYPEDEFS: &'static str = #definitions;
+
+        #[wasm_bindgen]
+        extern "C" {
+            #( #types )*
+        }
+    };
+
+    let out_dir = env::var("OUT_DIR").unwrap();
+    fs::write(
+        PathBuf::from(out_dir).join("ts_types.rs"),
+        tokens.to_string(),
+    )?;
+
+    Ok(())
+}
+
diff --git a/crates/pgt_wasm/src/lib.rs b/crates/pgt_wasm/src/lib.rs
new file mode 100644
index 00000000..56c2a4d8
--- /dev/null
+++ b/crates/pgt_wasm/src/lib.rs
@@ -0,0 +1,102 @@
+use js_sys::Error;
+use wasm_bindgen::prelude::*;
+
+use pgt_workspace::workspace::{
+    self, ChangeFileParams, CloseFileParams, GetFileContentParams, OpenFileParams,
+    PullDiagnosticsParams, UpdateSettingsParams, GetCompletionsParams
+};
+
+mod utils;
+
+pub use crate::utils::DiagnosticPrinter;
+use crate::utils::{into_error, set_panic_hook};
+
+#[wasm_bindgen(start)]
+pub fn main() {
+    set_panic_hook();
+}
+
+include!(concat!(env!("OUT_DIR"), "/ts_types.rs"));
+
+#[wasm_bindgen]
+pub struct Workspace {
+    inner: Box<dyn workspace::Workspace>,
+}
+
+#[wasm_bindgen]
+impl Workspace {
+    #[wasm_bindgen(constructor)]
+    #[allow(clippy::new_without_default)]
+    pub fn new() -> Workspace {
+        Workspace {
+            inner: workspace::server(),
+        }
+    }
+
+    #[wasm_bindgen(js_name = updateSettings)]
+    pub fn update_settings(&self, params: IUpdateSettingsParams) -> Result<(), Error> {
+        let params: UpdateSettingsParams =
+            serde_wasm_bindgen::from_value(params.into()).map_err(into_error)?;
+        self.inner.update_settings(params).map_err(into_error)
+    }
+
+    #[wasm_bindgen(js_name = openFile)]
+    pub fn open_file(&self, params: IOpenFileParams) -> Result<(), Error> {
+        let params: OpenFileParams =
+            serde_wasm_bindgen::from_value(params.into()).map_err(into_error)?;
+        self.inner.open_file(params).map_err(into_error)
+    }
+
+    #[wasm_bindgen(js_name = getFileContent)]
+    pub fn get_file_content(&self, params: IGetFileContentParams) -> Result<String, Error> {
+        let params: GetFileContentParams =
+            serde_wasm_bindgen::from_value(params.into()).map_err(into_error)?;
+        self.inner.get_file_content(params).map_err(into_error)
+    }
+
+    #[wasm_bindgen(js_name = changeFile)]
+    pub fn change_file(&self, params: IChangeFileParams) -> Result<(), Error> {
+        let params: ChangeFileParams =
+            serde_wasm_bindgen::from_value(params.into()).map_err(into_error)?;
+        self.inner.change_file(params).map_err(into_error)
+    }
+
+    #[wasm_bindgen(js_name = closeFile)]
+    pub fn close_file(&self, params: ICloseFileParams) -> Result<(), Error> {
+        let params: CloseFileParams =
+            serde_wasm_bindgen::from_value(params.into()).map_err(into_error)?;
+        self.inner.close_file(params).map_err(into_error)
+    }
+
+    #[wasm_bindgen(js_name = pullDiagnostics)]
+    pub fn pull_diagnostics(
+        &self,
+        params: IPullDiagnosticsParams,
+    ) -> Result<IPullDiagnosticsResult, Error> {
+        let params: PullDiagnosticsParams =
+            serde_wasm_bindgen::from_value(params.into()).map_err(into_error)?;
+        let result = self.inner.pull_diagnostics(params).map_err(into_error)?;
+        to_value(&result)
+            .map(IPullDiagnosticsResult::from)
+            .map_err(into_error)
+    }
+
+    #[wasm_bindgen(js_name = getCompletions)]
+    pub fn get_completions(
+        &self,
+        params: IGetCompletionsParams,
+    ) -> Result<IGetCompletionsResult, Error> {
+        let params: GetCompletionsParams =
+            serde_wasm_bindgen::from_value(params.into()).map_err(into_error)?;
+        let result = self.inner.get_completions(params).map_err(into_error)?;
+        to_value(&result)
+            .map(IGetCompletionsResult::from)
+            .map_err(into_error)
+    }
+}
+
+fn to_value<T: serde::ser::Serialize + ?Sized>(
+    value: &T,
+) -> Result<JsValue, serde_wasm_bindgen::Error> {
+    value.serialize(&serde_wasm_bindgen::Serializer::new().serialize_missing_as_null(true))
+}
diff --git a/crates/pgt_wasm/src/utils.rs b/crates/pgt_wasm/src/utils.rs
new file mode 100644
index 00000000..37928870
--- /dev/null
+++ b/crates/pgt_wasm/src/utils.rs
@@ -0,0 +1,80 @@
+use std::fmt::Display;
+
+use js_sys::Error;
+use wasm_bindgen::prelude::*;
+
+use pgt_console::fmt::HTML;
+use pgt_console::{fmt::Formatter, markup};
+use pgt_diagnostics::serde::Diagnostic;
+use pgt_diagnostics::{DiagnosticExt, LineIndexBuf, PrintDiagnostic, SourceCode};
+
+use super::IDiagnostic;
+
+pub(crate) fn set_panic_hook() {
+    // When the `console_error_panic_hook` feature is enabled, we can call the
+    // `set_panic_hook` function at least once during initialization, and then
+    // we will get better error messages if our code ever panics.
+    //
+    // For more details see
+    // https://github.com/rustwasm/console_error_panic_hook#readme
+    #[cfg(feature = "console_error_panic_hook")]
+    console_error_panic_hook::set_once();
+}
+
+#[wasm_bindgen]
+pub struct DiagnosticPrinter {
+    file_name: String,
+    file_source: SourceCode<String, LineIndexBuf>,
+    buffer: Vec<u8>,
+}
+
+#[wasm_bindgen]
+impl DiagnosticPrinter {
+    #[wasm_bindgen(constructor)]
+    pub fn new(file_name: String, file_source: String) -> Self {
+        let line_starts = LineIndexBuf::from_source_text(&file_source);
+        Self {
+            file_name,
+            file_source: SourceCode {
+                text: file_source,
+                line_starts: Some(line_starts),
+            },
+            buffer: Vec::new(),
+        }
+    }
+
+    pub fn print_simple(&mut self, diagnostic: IDiagnostic) -> Result<(), Error> {
+        self.print(diagnostic, |err| PrintDiagnostic::simple(err))
+    }
+
+    pub fn print_verbose(&mut self, diagnostic: IDiagnostic) -> Result<(), Error> {
+        self.print(diagnostic, |err| PrintDiagnostic::verbose(err))
+    }
+
+    fn print(
+        &mut self,
+        diagnostic: IDiagnostic,
+        printer: fn(&pgt_diagnostics::Error) -> PrintDiagnostic<pgt_diagnostics::Error>,
+    ) -> Result<(), Error> {
+        let diag: Diagnostic =
+            serde_wasm_bindgen::from_value(diagnostic.into()).map_err(into_error)?;
+        let err = diag
+            .with_file_path(&self.file_name)
+            .with_file_source_code(&self.file_source);
+
+        let mut html = HTML::new(&mut self.buffer);
+        Formatter::new(&mut html)
+            .write_markup(markup!({ printer(&err) }))
+            .map_err(into_error)?;
+
+        Ok(())
+    }
+
+    pub fn finish(self) -> Result<String, Error> {
+        String::from_utf8(self.buffer).map_err(into_error)
+    }
+}
+
+pub(crate) fn into_error<E: Display>(err: E) -> Error {
+    Error::new(&err.to_string())
+}
diff --git a/crates/pgt_workspace/Cargo.toml b/crates/pgt_workspace/Cargo.toml
index 33b0b231..4df2c4b6 100644
--- a/crates/pgt_workspace/Cargo.toml
+++ b/crates/pgt_workspace/Cargo.toml
@@ -14,35 +14,38 @@ version              = "0.0.0"
 [dependencies]
 biome_deserialize         = "0.6.0"
 dashmap                   = "5.5.3"
-futures                   = "0.3.31"
 ignore                    = { workspace = true }
 pgt_analyse               = { workspace = true, features = ["serde"] }
 pgt_analyser              = { workspace = true }
-pgt_completions           = { workspace = true }
 pgt_configuration         = { workspace = true }
 pgt_console               = { workspace = true }
 pgt_diagnostics           = { workspace = true }
 pgt_fs                    = { workspace = true, features = ["serde"] }
 pgt_query_ext             = { workspace = true }
-pgt_schema_cache          = { workspace = true }
 pgt_statement_splitter    = { workspace = true }
 pgt_text_size.workspace   = true
-pgt_typecheck             = { workspace = true }
 rustc-hash                = { workspace = true }
 schemars                  = { workspace = true, optional = true }
 serde                     = { workspace = true, features = ["derive"] }
 serde_json                = { workspace = true, features = ["raw_value"] }
 sqlx.workspace            = true
-tokio                     = { workspace = true, features = ["rt", "rt-multi-thread"] }
 tracing                   = { workspace = true, features = ["attributes", "log"] }
 tree-sitter.workspace     = true
 tree_sitter_sql.workspace = true
 
+# these dependencies require a database connection
+futures                   = { version = "0.3.31", optional = true }
+tokio                     = { workspace = true, features = ["rt", "rt-multi-thread"], optional = true }
+pgt_completions  = { workspace = true, optional = true }
+pgt_schema_cache = { workspace = true, optional = true }
+pgt_typecheck    = { workspace = true, optional = true }
+
 biome_js_factory = { workspace = true, optional = true }
 biome_js_syntax  = { workspace = true, optional = true }
 biome_rowan      = { workspace = true, optional = true }
 
 [features]
+db-connection = ["dep:pgt_completions", "dep:pgt_schema_cache", "dep:pgt_typecheck", "dep:tokio", "dep:futures"]
 schema = [
   "dep:schemars",
   "dep:biome_rowan",
@@ -52,7 +55,6 @@ schema = [
   "pgt_diagnostics/schema",
   "pgt_fs/schema",
   "pgt_analyse/schema",
-  "pgt_completions/schema",
 ]
 
 [dev-dependencies]
diff --git a/crates/pgt_workspace/src/workspace.rs b/crates/pgt_workspace/src/workspace.rs
index 8b192bf7..3fc13ec1 100644
--- a/crates/pgt_workspace/src/workspace.rs
+++ b/crates/pgt_workspace/src/workspace.rs
@@ -53,6 +53,73 @@ pub struct GetCompletionsParams {
     pub position: TextSize,
 }
 
+#[derive(Debug, Default, Serialize, Deserialize)]
+#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
+pub struct CompletionResult {
+    pub(crate) items: Vec<CompletionItem>,
+}
+
+#[cfg(feature = "db-connection")]
+impl IntoIterator for CompletionResult {
+    type Item = CompletionItem;
+    type IntoIter = <Vec<CompletionItem> as IntoIterator>::IntoIter;
+    fn into_iter(self) -> Self::IntoIter {
+        self.items.into_iter()
+    }
+}
+
+#[cfg(feature = "db-connection")]
+impl From<pgt_completions::CompletionResult> for CompletionResult {
+    fn from(external: pgt_completions::CompletionResult) -> Self {
+        CompletionResult {
+            items: external.items.into_iter().map(Into::into).collect(),
+        }
+    }
+}
+
+#[cfg(feature = "db-connection")]
+impl From<pgt_completions::CompletionItem> for CompletionItem {
+    fn from(external: pgt_completions::CompletionItem) -> Self {
+        CompletionItem {
+            label: external.label,
+            score: external.score,
+            description: external.description,
+            preselected: external.preselected,
+            kind: external.kind.into(),
+        }
+    }
+}
+
+#[cfg(feature = "db-connection")]
+impl From<pgt_completions::CompletionItemKind> for CompletionItemKind {
+    fn from(external: pgt_completions::CompletionItemKind) -> Self {
+        match external {
+            pgt_completions::CompletionItemKind::Table => CompletionItemKind::Table,
+            pgt_completions::CompletionItemKind::Function => CompletionItemKind::Function,
+            pgt_completions::CompletionItemKind::Column => CompletionItemKind::Column,
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
+#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
+#[serde(rename_all = "camelCase")]
+pub enum CompletionItemKind {
+    Table,
+    Function,
+    Column,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
+pub struct CompletionItem {
+    pub label: String,
+    pub(crate) score: i32,
+    pub description: String,
+    pub preselected: bool,
+    pub kind: CompletionItemKind,
+}
+
 #[derive(Debug, serde::Serialize, serde::Deserialize)]
 #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
 pub struct PullDiagnosticsResult {
@@ -118,7 +185,7 @@ pub trait Workspace: Send + Sync + RefUnwindSafe {
     fn get_completions(
         &self,
         params: GetCompletionsParams,
-    ) -> Result<pgt_completions::CompletionResult, WorkspaceError>;
+    ) -> Result<CompletionResult, WorkspaceError>;
 
     /// Update the global settings for this workspace
     fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError>;
diff --git a/crates/pgt_workspace/src/workspace/client.rs b/crates/pgt_workspace/src/workspace/client.rs
index a6955c1e..13a9291c 100644
--- a/crates/pgt_workspace/src/workspace/client.rs
+++ b/crates/pgt_workspace/src/workspace/client.rs
@@ -127,7 +127,7 @@ where
     fn get_completions(
         &self,
         params: super::GetCompletionsParams,
-    ) -> Result<pgt_completions::CompletionResult, WorkspaceError> {
+    ) -> Result<super::CompletionResult, WorkspaceError> {
         self.request("pgt/get_completions", params)
     }
 }
diff --git a/crates/pgt_workspace/src/workspace/server.rs b/crates/pgt_workspace/src/workspace/server.rs
index 942ae34d..e56b9cbb 100644
--- a/crates/pgt_workspace/src/workspace/server.rs
+++ b/crates/pgt_workspace/src/workspace/server.rs
@@ -1,22 +1,20 @@
 use std::{fs, panic::RefUnwindSafe, path::Path, sync::RwLock};
 
 use analyser::AnalyserVisitorBuilder;
-use async_helper::run_async;
 use change::StatementChange;
 use dashmap::DashMap;
-use db_connection::DbConnection;
 use document::{Document, Statement};
-use futures::{StreamExt, stream};
 use pg_query::PgQueryStore;
 use pgt_analyse::{AnalyserOptions, AnalysisFilter};
 use pgt_analyser::{Analyser, AnalyserConfig, AnalyserContext};
 use pgt_diagnostics::{Diagnostic, DiagnosticExt, Severity, serde::Diagnostic as SDiagnostic};
 use pgt_fs::{ConfigName, PgTPath};
-use pgt_typecheck::TypecheckParams;
-use schema_cache_manager::SchemaCacheManager;
 use tracing::info;
 use tree_sitter::TreeSitterStore;
 
+#[cfg(feature = "db-connection")]
+use futures::StreamExt;
+
 use crate::{
     WorkspaceError,
     configuration::to_analyser_rules,
@@ -25,26 +23,31 @@ use crate::{
 };
 
 use super::{
-    GetFileContentParams, IsPathIgnoredParams, OpenFileParams, ServerInfo, UpdateSettingsParams,
-    Workspace,
+    CompletionResult, GetFileContentParams, IsPathIgnoredParams, OpenFileParams, ServerInfo,
+    UpdateSettingsParams, Workspace,
 };
 
 mod analyser;
-mod async_helper;
 mod change;
-mod db_connection;
 mod document;
 mod migration;
 mod pg_query;
-mod schema_cache_manager;
 mod tree_sitter;
 
+#[cfg(feature = "db-connection")]
+mod async_helper;
+#[cfg(feature = "db-connection")]
+mod db_connection;
+#[cfg(feature = "db-connection")]
+mod schema_cache_manager;
+
 pub(super) struct WorkspaceServer {
     /// global settings object for this workspace
     settings: RwLock<Settings>,
 
+    #[cfg(feature = "db-connection")]
     /// Stores the schema cache for this workspace
-    schema_cache: SchemaCacheManager,
+    schema_cache: schema_cache_manager::SchemaCacheManager,
 
     /// Stores the document (text content + version number) associated with a URL
     documents: DashMap<PgTPath, Document>,
@@ -52,7 +55,8 @@ pub(super) struct WorkspaceServer {
     tree_sitter: TreeSitterStore,
     pg_query: PgQueryStore,
 
-    connection: RwLock<DbConnection>,
+    #[cfg(feature = "db-connection")]
+    connection: RwLock<db_connection::DbConnection>,
 }
 
 /// The `Workspace` object is long-lived, so we want it to be able to cross
@@ -75,8 +79,10 @@ impl WorkspaceServer {
             documents: DashMap::default(),
             tree_sitter: TreeSitterStore::new(),
             pg_query: PgQueryStore::new(),
-            schema_cache: SchemaCacheManager::default(),
+            #[cfg(feature = "db-connection")]
             connection: RwLock::default(),
+            #[cfg(feature = "db-connection")]
+            schema_cache: schema_cache_manager::SchemaCacheManager::default(),
         }
     }
 
@@ -157,11 +163,14 @@ impl Workspace for WorkspaceServer {
 
         tracing::info!("Updated settings in workspace");
 
-        if !params.skip_db {
-            self.connection
-                .write()
-                .unwrap()
-                .set_conn_settings(&self.settings().as_ref().db);
+        #[cfg(feature = "db-connection")]
+        {
+            if !params.skip_db {
+                self.connection
+                    .write()
+                    .unwrap()
+                    .set_conn_settings(&self.settings().as_ref().db);
+            }
         }
 
         tracing::info!("Updated Db connection settings");
@@ -289,55 +298,58 @@ impl Workspace for WorkspaceServer {
 
         let mut diagnostics: Vec<SDiagnostic> = doc.diagnostics().to_vec();
 
-        if let Some(pool) = self
-            .connection
-            .read()
-            .expect("DbConnection RwLock panicked")
-            .get_pool()
+        #[cfg(feature = "db-connection")]
         {
-            let typecheck_params: Vec<_> = doc
-                .iter_statements_with_text_and_range()
-                .map(|(stmt, range, text)| {
-                    let ast = self.pg_query.get_ast(&stmt);
-                    let tree = self.tree_sitter.get_parse_tree(&stmt);
-                    (text.to_string(), ast, tree, *range)
-                })
-                .collect();
-
-            // run diagnostics for each statement in parallel if its mostly i/o work
-            let path_clone = params.path.clone();
-            let async_results = run_async(async move {
-                stream::iter(typecheck_params)
-                    .map(|(text, ast, tree, range)| {
-                        let pool = pool.clone();
-                        let path = path_clone.clone();
-                        async move {
-                            if let Some(ast) = ast {
-                                pgt_typecheck::check_sql(TypecheckParams {
-                                    conn: &pool,
-                                    sql: &text,
-                                    ast: &ast,
-                                    tree: tree.as_deref(),
-                                })
-                                .await
-                                .map(|d| {
-                                    let r = d.location().span.map(|span| span + range.start());
-
-                                    d.with_file_path(path.as_path().display().to_string())
-                                        .with_file_span(r.unwrap_or(range))
-                                })
-                            } else {
-                                None
-                            }
-                        }
+            if let Some(pool) = self
+                .connection
+                .read()
+                .expect("DbConnection RwLock panicked")
+                .get_pool()
+            {
+                let typecheck_params: Vec<_> = doc
+                    .iter_statements_with_text_and_range()
+                    .map(|(stmt, range, text)| {
+                        let ast = self.pg_query.get_ast(&stmt);
+                        let tree = self.tree_sitter.get_parse_tree(&stmt);
+                        (text.to_string(), ast, tree, *range)
                     })
-                    .buffer_unordered(10)
-                    .collect::<Vec<_>>()
-                    .await
-            })?;
-
-            for result in async_results.into_iter().flatten() {
-                diagnostics.push(SDiagnostic::new(result));
+                    .collect();
+
+                // run diagnostics for each statement in parallel if its mostly i/o work
+                let path_clone = params.path.clone();
+                let async_results = async_helper::run_async(async move {
+                    futures::stream::iter(typecheck_params)
+                        .map(|(text, ast, tree, range)| {
+                            let pool = pool.clone();
+                            let path = path_clone.clone();
+                            async move {
+                                if let Some(ast) = ast {
+                                    pgt_typecheck::check_sql(pgt_typecheck::TypecheckParams {
+                                        conn: &pool,
+                                        sql: &text,
+                                        ast: &ast,
+                                        tree: tree.as_deref(),
+                                    })
+                                    .await
+                                    .map(|d| {
+                                        let r = d.location().span.map(|span| span + range.start());
+
+                                        d.with_file_path(path.as_path().display().to_string())
+                                            .with_file_span(r.unwrap_or(range))
+                                    })
+                                } else {
+                                    None
+                                }
+                            }
+                        })
+                        .buffer_unordered(10)
+                        .collect::<Vec<_>>()
+                        .await
+                })?;
+
+                for result in async_results.into_iter().flatten() {
+                    diagnostics.push(SDiagnostic::new(result));
+                }
             }
         }
 
@@ -398,63 +410,70 @@ impl Workspace for WorkspaceServer {
     fn get_completions(
         &self,
         params: super::GetCompletionsParams,
-    ) -> Result<pgt_completions::CompletionResult, WorkspaceError> {
-        tracing::debug!(
-            "Getting completions for file {:?} at position {:?}",
-            &params.path,
-            &params.position
-        );
-
-        let pool = match self.connection.read().unwrap().get_pool() {
-            Some(pool) => pool,
-            None => return Ok(pgt_completions::CompletionResult::default()),
-        };
-
-        let doc = self
-            .documents
-            .get(&params.path)
-            .ok_or(WorkspaceError::not_found())?;
-
-        tracing::debug!(
-            "Found the document. Looking for statement in file {:?} at position: {:?}",
-            &params.path,
-            &params.position
-        );
-
-        let (statement, stmt_range, text) = match doc
-            .iter_statements_with_text_and_range()
-            .find(|(_, r, _)| r.contains(params.position))
+    ) -> Result<CompletionResult, WorkspaceError> {
+        #[cfg(not(feature = "db-connection"))]
         {
-            Some(s) => s,
-            None => return Ok(pgt_completions::CompletionResult::default()),
-        };
-
-        // `offset` is the position in the document,
-        // but we need the position within the *statement*.
-        let position = params.position - stmt_range.start();
-
-        let tree = self.tree_sitter.get_parse_tree(&statement);
-
-        tracing::debug!(
-            "Found the statement. We're looking for position {:?}. Statement Range {:?} to {:?}. Statement: {}",
-            position,
-            stmt_range.start(),
-            stmt_range.end(),
-            text
-        );
-
-        let schema_cache = self.schema_cache.load(pool)?;
-
-        tracing::debug!("Loaded schema cache for completions");
-
-        let result = pgt_completions::complete(pgt_completions::CompletionParams {
-            position,
-            schema: schema_cache.as_ref(),
-            tree: tree.as_deref(),
-            text: text.to_string(),
-        });
-
-        Ok(result)
+            return Ok(pgt_completions::CompletionResult::default());
+        }
+        #[cfg(feature = "db-connection")]
+        {
+            tracing::debug!(
+                "Getting completions for file {:?} at position {:?}",
+                &params.path,
+                &params.position
+            );
+
+            let pool = match self.connection.read().unwrap().get_pool() {
+                Some(pool) => pool,
+                None => return Ok(CompletionResult::default()),
+            };
+
+            let doc = self
+                .documents
+                .get(&params.path)
+                .ok_or(WorkspaceError::not_found())?;
+
+            tracing::debug!(
+                "Found the document. Looking for statement in file {:?} at position: {:?}",
+                &params.path,
+                &params.position
+            );
+
+            let (statement, stmt_range, text) = match doc
+                .iter_statements_with_text_and_range()
+                .find(|(_, r, _)| r.contains(params.position))
+            {
+                Some(s) => s,
+                None => return Ok(CompletionResult::default()),
+            };
+
+            // `offset` is the position in the document,
+            // but we need the position within the *statement*.
+            let position = params.position - stmt_range.start();
+
+            let tree = self.tree_sitter.get_parse_tree(&statement);
+
+            tracing::debug!(
+                "Found the statement. We're looking for position {:?}. Statement Range {:?} to {:?}. Statement: {}",
+                position,
+                stmt_range.start(),
+                stmt_range.end(),
+                text
+            );
+
+            let schema_cache = self.schema_cache.load(pool)?;
+
+            tracing::debug!("Loaded schema cache for completions");
+
+            let result = pgt_completions::complete(pgt_completions::CompletionParams {
+                position,
+                schema: schema_cache.as_ref(),
+                tree: tree.as_deref(),
+                text: text.to_string(),
+            });
+
+            Ok(result.into())
+        }
     }
 }
 
diff --git a/docs/codegen/Cargo.toml b/docs/codegen/Cargo.toml
index 96092a7a..30080391 100644
--- a/docs/codegen/Cargo.toml
+++ b/docs/codegen/Cargo.toml
@@ -27,7 +27,7 @@ pgt_analyse = { workspace = true }
 pgt_analyser = { workspace = true }
 pgt_diagnostics = { workspace = true }
 pgt_query_ext = { workspace = true }
-pgt_workspace = { workspace = true }
+pgt_workspace = { workspace = true, features = ["db-connection"] }
 pgt_statement_splitter = { workspace = true }
 pgt_console = { workspace = true }
 biome_string_case = { workspace = true }
diff --git a/justfile b/justfile
index 6a008560..b4418fd2 100644
--- a/justfile
+++ b/justfile
@@ -9,7 +9,7 @@ alias t := test
 # Installs the tools needed to develop
 install-tools:
 	cargo install cargo-binstall
-	cargo binstall cargo-insta taplo-cli
+	cargo binstall cargo-insta taplo-cli wasm-pack wasm-tools
 	cargo binstall --git "https://github.com/astral-sh/uv" uv
 	bun install
 
diff --git a/packages/@postgrestools/wasm-bundler/package.json b/packages/@postgrestools/wasm-bundler/package.json
new file mode 100644
index 00000000..c72ed29b
--- /dev/null
+++ b/packages/@postgrestools/wasm-bundler/package.json
@@ -0,0 +1,26 @@
+{
+	"name": "@postgrestools/wasm-bundler",
+	"collaborators": ["Supabase Community"],
+	"description": "WebAssembly bindings to the Postgres Tools workspace API",
+	"version": "0.1.2",
+	"license": "MIT",
+	"repository": {
+		"type": "git",
+		"url": "git+https://github.com/supabase-community/postgres-language-server.git",
+		"directory": "packages/@postgrestools/wasm-bundler"
+	},
+	"files": [
+		"postgrestools_wasm_bg.wasm",
+		"postgrestools_wasm.js",
+		"postgrestools_wasm_bg.js",
+		"postgrestools_wasm.d.ts"
+	],
+	"module": "postgrestools_wasm.js",
+	"homepage": "https://pgtools.dev/",
+	"publishConfig": {
+		"provenance": true
+	},
+	"types": "postgrestools_wasm.d.ts",
+	"sideEffects": ["./postgrestools_wasm.js", "./snippets/*"],
+	"keywords": ["parser", "linter", "wasm"]
+}
diff --git a/packages/@postgrestools/wasm-node/package.json b/packages/@postgrestools/wasm-node/package.json
new file mode 100644
index 00000000..982462c6
--- /dev/null
+++ b/packages/@postgrestools/wasm-node/package.json
@@ -0,0 +1,21 @@
+{
+	"name": "@postgrestools/wasm-nodejs",
+	"collaborators": ["Supabase Community"],
+	"description": "WebAssembly bindings to the Postgres Tools workspace API",
+	"version": "0.1.2",
+	"license": "MIT",
+	"repository": {
+		"type": "git",
+		"url": "git+https://github.com/supabase-community/postgres-language-server.git",
+		"directory": "packages/@postgrestools/wasm-nodejs"
+	},
+	"files": [
+		"postgrestools_wasm_bg.wasm",
+		"postgrestools_wasm.js",
+		"postgrestools_wasm.d.ts"
+	],
+	"main": "postgrestools_wasm.js",
+	"homepage": "https://pgtools.dev/",
+	"types": "postgrestools_wasm.d.ts",
+	"keywords": ["parser", "linter", "wasm"]
+}
diff --git a/packages/@postgrestools/wasm-web/package.json b/packages/@postgrestools/wasm-web/package.json
new file mode 100644
index 00000000..7fefeba1
--- /dev/null
+++ b/packages/@postgrestools/wasm-web/package.json
@@ -0,0 +1,25 @@
+{
+	"name": "@postgrestools/wasm-web",
+	"collaborators": ["Supabase Community"],
+	"description": "WebAssembly bindings to the Postgres Tools workspace API",
+	"version": "0.1.2",
+	"license": "MIT",
+	"repository": {
+		"type": "git",
+		"url": "git+https://github.com/supabase-community/postgres-language-server.git",
+		"directory": "packages/@postgrestools/wasm-web"
+	},
+	"files": [
+		"postgrestools_wasm_bg.wasm",
+		"postgrestools_wasm.js",
+		"postgrestools_wasm.d.ts"
+	],
+	"module": "postgrestools_wasm.js",
+	"homepage": "https://pgtools.dev/",
+	"publishConfig": {
+		"provenance": true
+	},
+	"types": "postgrestools_wasm.d.ts",
+	"sideEffects": ["./snippets/*"],
+	"keywords": ["parser", "linter", "wasm"]
+}
diff --git a/xtask/codegen/Cargo.toml b/xtask/codegen/Cargo.toml
index b5497b2c..ba7ab5cf 100644
--- a/xtask/codegen/Cargo.toml
+++ b/xtask/codegen/Cargo.toml
@@ -14,7 +14,7 @@ biome_string_case  = { workspace = true }
 bpaf               = { workspace = true, features = ["derive"] }
 pgt_analyse        = { workspace = true }
 pgt_analyser       = { workspace = true }
-pgt_workspace      = { workspace = true, features = ["schema"] }
+pgt_workspace      = { workspace = true, features = ["schema", "db-connection"] }
 proc-macro2        = { workspace = true, features = ["span-locations"] }
 pulldown-cmark     = { version = "0.12.2" }
 quote              = "1.0.36"
diff --git a/xtask/rules_check/Cargo.toml b/xtask/rules_check/Cargo.toml
index 3f0198d1..846e3cf0 100644
--- a/xtask/rules_check/Cargo.toml
+++ b/xtask/rules_check/Cargo.toml
@@ -13,5 +13,5 @@ pgt_console            = { workspace = true }
 pgt_diagnostics        = { workspace = true }
 pgt_query_ext          = { workspace = true }
 pgt_statement_splitter = { workspace = true }
-pgt_workspace          = { workspace = true }
+pgt_workspace          = { workspace = true, features = ["db-connection"] }
 pulldown-cmark         = "0.12.2"