Skip to content

Commit 397b391

Browse files
authored
Merge branch 'master' into fix/empty_labels_check
2 parents a400919 + 15e3dc0 commit 397b391

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+5784
-587
lines changed

.github/workflows/node-clickhouse.js.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ jobs:
2626
runs-on: ubuntu-latest
2727
strategy:
2828
matrix:
29-
node-version: [18, 16, 20]
29+
node-version: [18, 20, 22]
3030
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
3131
services:
3232
clickhouse:

.gitignore

+3
Original file line numberDiff line numberDiff line change
@@ -9,3 +9,6 @@ node_modules
99
/wasm_parts/vendor/
1010
/wasm_parts/main.wasm
1111
/wasm_parts/wasm_parts.iml
12+
/test/qryn_test_env/clickhouse/_data/
13+
/test/qryn_test_env/grafana/_data/
14+
/test/qryn_test_cluster_env/grafana/_data/

Dockerfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# qryn
2-
FROM node:20.10-slim
2+
FROM node:20.17-slim
33

44
COPY . /app
55
WORKDIR /app

lib/db/maintain/scripts.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,7 @@ module.exports.traces = [
198198
]
199199

200200
module.exports.overall_dist = [
201-
`CREATE TABLE {{DB}}.metrics_15s_dist {{{OnCluster}}} (
201+
`CREATE TABLE IF NOT EXISTS {{DB}}.metrics_15s_dist {{{OnCluster}}} (
202202
\`fingerprint\` UInt64,
203203
\`timestamp_ns\` Int64 CODEC(DoubleDelta),
204204
\`last\` AggregateFunction(argMax, Float64, Int64),

lib/handlers/push.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ function processStream (stream, labels, bulkLabels, bulk, toJSON, fingerPrint) {
7171
values.push([
7272
finger,
7373
ts,
74-
(typeof entry.value === 'undefined') ? null : entry.value,
74+
(typeof entry.value !== 'number') ? 0 : entry.value,
7575
entry.line || '',
7676
type === 3 ? bothType : type
7777
])
@@ -98,7 +98,7 @@ function processStream (stream, labels, bulkLabels, bulk, toJSON, fingerPrint) {
9898
values.push([
9999
finger,
100100
BigInt(value[0]),
101-
(typeof value[2] === 'undefined') ? null : value[2],
101+
(typeof value[2] !== 'number') ? 0 : value[2],
102102
value[1] || '',
103103
type === 3 ? bothType : type
104104
])

package-lock.json

+3-3
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

parser/registry/common.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -445,7 +445,7 @@ module.exports.preJoinLabels = (token, query, dist) => {
445445
dist = dist || ''
446446
const timeSeriesReq = new Sql.Select()
447447
.select('fingerprint', 'labels')
448-
.from([`${DATABASE_NAME()}.time_series${dist}`, 'time_series'])
448+
.from([`${DATABASE_NAME()}.time_series`, 'time_series'])
449449
.where(new Sql.And(
450450
new Sql.In('time_series.fingerprint', 'in', inRightSide),
451451
Sql.Gte(new Sql.Raw('date'), sqlFrom),

parser/registry/smart_optimizations/optimization_v3_2.js

+9-3
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
const { getDuration, preJoinLabels, dist } = require('../common')
1+
const { getDuration, preJoinLabels, dist, sharedParamNames } = require('../common')
22
const reg = require('./log_range_agg_reg_v3_2')
33
const Sql = require('@cloki/clickhouse-sql')
44
const { DATABASE_NAME, checkVersion } = require('../../../lib/utils')
@@ -44,24 +44,30 @@ function isLogPipeline (token) {
4444
*/
4545
module.exports.apply = (token, fromNS, toNS, stepNS) => {
4646
fromNS = Math.floor(fromNS / 15000000000) * 15000000000
47+
const fromParam = new Sql.Parameter(sharedParamNames.from)
48+
const toParam = new Sql.Parameter(sharedParamNames.to)
4749
const tsClause = toNS
4850
? Sql.between('samples.timestamp_ns', fromNS, toNS)
4951
: Sql.Gt('samples.timestamp_ns', fromNS)
5052
let q = (new Sql.Select())
5153
.select(['samples.fingerprint', 'fingerprint'])
5254
.from([`${DATABASE_NAME()}.metrics_15s${_dist}`, 'samples'])
5355
.where(tsClause)
56+
.addParam(fromParam)
57+
.addParam(toParam)
58+
fromParam.set(fromNS)
59+
toParam.set(toNS)
5460

5561
q.ctx = {
5662
step: stepNS / 1000000000,
5763
inline: !!clusterName
5864
}
5965

60-
preJoinLabels(token, q, dist)
61-
6266
for (const streamSelectorRule of token.Children('log_stream_selector_rule')) {
6367
q = streamSelectorReg[streamSelectorRule.Child('operator').value](streamSelectorRule, q)
6468
}
69+
preJoinLabels(token, q, dist)
70+
q = q.groupBy('labels')
6571

6672
const lra = token.Child('log_range_aggregation')
6773
q = reg[lra.Child('log_range_aggregation_fn').value](lra, q)

promql/index.js

+3-1
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,9 @@ module.exports.series = async (query, fromMs, toMs) => {
6060
const data = await rawRequest(req.toString() + ' FORMAT JSON',
6161
null,
6262
DATABASE_NAME())
63-
return data.data.data.map(l => JSON.parse(l.labels))
63+
return data.data.data.map(l =>
64+
Object.fromEntries(Object.entries(JSON.parse(l.labels)).filter(e => e[1]))
65+
)
6466
} catch (e) {
6567
if (e instanceof prometheus.WasmError) {
6668
throw new PSQLError(e.message)

pyroscope/pprof-bin/Cargo.toml

+6
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,15 @@ flate2 = "1.0"
2727
# code size when deploying.
2828
console_error_panic_hook = { version = "0.1.7", optional = true }
2929
base64 = "0.22.1"
30+
memchr = "2.7.4"
3031

3132
[dev-dependencies]
3233
wasm-bindgen-test = "0.3.34"
34+
criterion = { version = "0.5.1", features = ["html_reports"] }
35+
36+
[[bench]]
37+
name = "my_benchmark"
38+
harness = false
3339

3440
[profile.release]
3541
# Tell `rustc` to optimize for small code size.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
use pprof_bin::merge_prof;
2+
use pprof_bin::utest::get_test_pprof_data;
3+
use criterion::{black_box, criterion_group, criterion_main, Criterion};
4+
5+
fn merge_bench(pprofs: &Vec<Vec<u8>>) {
6+
7+
for pprof in pprofs {
8+
merge_prof(0, pprof.as_slice(), "process_cpu:samples:count:cpu:nanoseconds".to_string());
9+
}
10+
}
11+
12+
fn criterion_benchmark(c: &mut Criterion) {
13+
let pprofs = get_test_pprof_data();
14+
c.bench_function("merge", |b| b.iter(|| merge_bench(&pprofs)));
15+
}
16+
17+
criterion_group!(benches, criterion_benchmark);
18+
criterion_main!(benches);
-6.83 KB
Binary file not shown.

0 commit comments

Comments
 (0)