Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/scripts/generate-benchmark-matrix.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ const ALL_COMPONENTS = [
"codegen",
"formatter",
"linter",
"language_server",
];

// Files that when changed affect all benchmarks
Expand Down Expand Up @@ -59,6 +60,8 @@ function checkGlobalChanges(changedFiles) {
function getFeatureForComponent(component) {
if (component === "linter") {
return "linter";
} else if (component === "language_server") {
return "language_server";
}
return "compiler";
}
Expand Down
3 changes: 3 additions & 0 deletions .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,9 @@ jobs:
if [ "${{ matrix.component }}" = "linter" ]; then
cargo build --release -p oxc_benchmark --bench linter \
--no-default-features --features ${{ matrix.feature }} --features codspeed
elif [ "${{ matrix.component }}" = "language_server" ]; then
cargo build --release -p oxc_benchmark --bench language_server \
--no-default-features --features ${{ matrix.feature }} --features codspeed
else
cargo build --release -p oxc_benchmark \
--bench lexer --bench parser --bench transformer --bench semantic \
Expand Down
5 changes: 5 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions crates/oxc_language_server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -63,3 +63,4 @@ formatter = [
#
"dep:ignore",
]
benchmark = ["linter"]
11 changes: 10 additions & 1 deletion crates/oxc_language_server/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use rustc_hash::FxBuildHasher;
use tower_lsp_server::{LspService, Server, ls_types::ServerInfo};
use tower_lsp_server::{Client, LspService, Server, ls_types::ServerInfo};

mod backend;
mod capabilities;
Expand All @@ -24,6 +24,15 @@ pub use crate::tool::{Tool, ToolBuilder, ToolRestartChanges, ToolShutdownChanges

pub type ConcurrentHashMap<K, V> = papaya::HashMap<K, V, FxBuildHasher>;

#[cfg(feature = "benchmark")]
pub fn build_backend(
client: Client,
server_name: String,
server_version: String,
tools: Vec<Box<dyn ToolBuilder>>,
) -> Backend {
Backend::new(client, ServerInfo { name: server_name, version: Some(server_version) }, tools)
}
/// Run the language server
pub async fn run_server(
server_name: String,
Expand Down
27 changes: 22 additions & 5 deletions tasks/benchmark/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,10 @@ harness = false
name = "minifier"
harness = false

[[bench]]
name = "language_server"
harness = false

# Only run in CI
[[bench]]
name = "parser_napi"
Expand All @@ -65,6 +69,7 @@ oxc_ast_visit = { workspace = true, optional = true, features = ["serialize"] }
oxc_codegen = { workspace = true, optional = true }
oxc_formatter = { workspace = true, optional = true }
oxc_isolated_declarations = { workspace = true, optional = true }
oxc_language_server = { workspace = true, features = ["benchmark"], optional = true }
oxc_linter = { workspace = true, optional = true }
oxc_mangler = { workspace = true, optional = true }
oxc_minifier = { workspace = true, optional = true }
Expand All @@ -74,17 +79,21 @@ oxc_span = { workspace = true, optional = true, features = ["schemars", "seriali
oxc_tasks_common = { workspace = true, optional = true }
oxc_transformer = { workspace = true, optional = true }

criterion2 = { workspace = true }
criterion2 = { workspace = true, features = ["default", "async_tokio"] }

# For NAPI & language server benchmark
serde = { workspace = true, optional = true }
serde_json = { workspace = true, optional = true }

# Only for lexer benchmark
cow-utils = { workspace = true, optional = true }

# Only for NAPI benchmark
serde = { workspace = true, optional = true }
serde_json = { workspace = true, optional = true }
# Only for language server benchmark
tokio = { workspace = true, features = ["io-util"], optional = true }
tower-lsp-server = { workspace = true, features = ["proposed"], optional = true }

[features]
default = ["compiler", "linter"]
default = ["compiler", "linter", "language_server"]
codspeed = ["criterion2/codspeed"]
codspeed_napi = ["criterion2/codspeed", "dep:serde", "dep:serde_json"]

Expand Down Expand Up @@ -116,6 +125,14 @@ linter = [
"dep:oxc_tasks_common",
"oxc_semantic/cfg",
]
language_server = [
"dep:oxc_language_server",
"dep:oxc_tasks_common",
"dep:serde",
"dep:serde_json",
"dep:tokio",
"dep:tower-lsp-server",
]

[dev-dependencies]
rustc-hash = { workspace = true }
176 changes: 176 additions & 0 deletions tasks/benchmark/benches/language_server.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,176 @@
use std::collections::VecDeque;

use oxc_benchmark::{BenchmarkId, Criterion, criterion_group, criterion_main};
use oxc_language_server::{ServerLinterBuilder, build_backend};
use oxc_tasks_common::TestFiles;
use serde_json::json;
use tokio::io::{AsyncReadExt, AsyncWriteExt, DuplexStream};
use tower_lsp_server::{
Client, LanguageServer, LspService, Server,
jsonrpc::{Request, Response},
ls_types::{
DidOpenTextDocumentParams, InitializeParams, InitializedParams, TextDocumentItem,
WorkspaceFolder,
},
};

/// Creates an initialize request with the given parameters.
/// Uses a single workspace folder at WORKSPACE.
///
/// # Panics
/// - If the workspace URI is not a valid URI.
fn initialize_request() -> Request {
let params = InitializeParams {
workspace_folders: Some(vec![WorkspaceFolder {
uri: WORKSPACE.parse().unwrap(),
name: "workspace".to_string(),
}]),
..Default::default()
};

Request::build("initialize").params(json!(params)).id(1).finish()
}

fn initialized_notification() -> Request {
let params = InitializedParams {};

Request::build("initialized").params(json!(params)).finish()
}

pub fn shutdown_request(id: i64) -> Request {
Request::build("shutdown").id(id).finish()
}

/// Creates a didOpen notification for the given URI and text.
///
/// # Panics
/// - If the URI is not a valid URI.
fn did_open(uri: &str, text: &str) -> Request {
let params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.parse().unwrap(),
language_id: "plaintext".to_string(),
version: 1,
text: text.to_string(),
},
};

Request::build("textDocument/didOpen").params(json!(params)).finish()
}

const WORKSPACE: &str = "file:///path/to/workspace";

// A test server that can send requests and receive responses.
// Copied from <https://github.com/veryl-lang/veryl/blob/888d83abaa58ca5a7ffef501a1c557e48c750b92/crates/languageserver/src/tests.rs>
struct TestServer {
req_stream: DuplexStream,
res_stream: DuplexStream,
responses: VecDeque<String>,
}

impl TestServer {
fn new<F, S>(init: F) -> Self
where
F: FnOnce(Client) -> S,
S: LanguageServer,
{
let (req_client, req_server) = tokio::io::duplex(1024);
let (res_server, res_client) = tokio::io::duplex(1024);

let (service, socket) = LspService::build(init).finish();

tokio::spawn(Server::new(req_server, res_server, socket).serve(service));

Self { req_stream: req_client, res_stream: res_client, responses: VecDeque::new() }
}

fn encode(payload: &str) -> String {
format!("Content-Length: {}\r\n\r\n{}", payload.len(), payload)
}

fn decode(text: &str) -> Vec<String> {
let mut ret = Vec::new();
let mut temp = text;

while !temp.is_empty() {
let p = temp.find("\r\n\r\n").unwrap();
let (header, body) = temp.split_at(p + 4);
let len =
header.strip_prefix("Content-Length: ").unwrap().strip_suffix("\r\n\r\n").unwrap();
let len: usize = len.parse().unwrap();
let (body, rest) = body.split_at(len);
ret.push(body.to_string());
temp = rest;
}

ret
}

/// Sends a request to the server.
///
/// # Panics
/// - If the stream cannot be written to.
pub async fn send_request(&mut self, req: Request) {
let req = serde_json::to_string(&req).unwrap();
let req = Self::encode(&req);
self.req_stream.write_all(req.as_bytes()).await.unwrap();
}

/// Receives a response from the server.
///
/// # Panics
/// - If the stream cannot be read.
/// - If the response cannot be deserialized.
pub async fn recv_response(&mut self) -> Response {
if self.responses.is_empty() {
let mut buf = vec![0; 1024];
let n = self.res_stream.read(&mut buf).await.unwrap();
let ret = String::from_utf8(buf[..n].to_vec()).unwrap();
for x in Self::decode(&ret) {
self.responses.push_front(x);
}
}
let res = self.responses.pop_back().unwrap();
serde_json::from_str(&res).unwrap()
}
}

fn bench_linter(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("language_server");

for file in TestFiles::minimal().files() {
let id = BenchmarkId::from_parameter(format!("{}-linter", file.file_name));
let source_text = &file.source_text;
let uri = format!("file:///{WORKSPACE}{}", file.file_name);

group.bench_function(id, |b| {
b.to_async(tokio::runtime::Runtime::new().unwrap()).iter(|| async {
let mut server = TestServer::new(|client| {
build_backend(
client,
"benchmark".to_string(),
"0.0.0".to_string(),
vec![Box::new(ServerLinterBuilder)],
)
});
// Send initialize request
server.send_request(initialize_request()).await;
let _ = server.recv_response().await;

// Send initialized notification
server.send_request(initialized_notification()).await;

// Send didOpen notification, expecting the linter to run
server.send_request(did_open(&uri, source_text)).await;

// Shutdown the server
server.send_request(shutdown_request(2)).await;
let _ = server.recv_response().await;
});
});
}
group.finish();
}

criterion_group!(language_server, bench_linter);
criterion_main!(language_server);
Loading