Skip to content

Commit df80dd6

Browse files
committed
Merge remote-tracking branch 'origin/main' into siyuan/cdc-handle-schema-event
2 parents 766313c + f09f195 commit df80dd6

File tree

439 files changed

+8164
-2267
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

439 files changed

+8164
-2267
lines changed

Cargo.lock

+249-152
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

+12-7
Original file line numberDiff line numberDiff line change
@@ -121,19 +121,20 @@ aws-smithy-types = { version = "1", default-features = false, features = [
121121
aws-endpoint = "0.60"
122122
aws-types = "1"
123123
axum = "=0.7.4" # TODO: 0.7.5+ does not work with current toolchain
124-
etcd-client = { package = "madsim-etcd-client", version = "0.4" }
124+
etcd-client = { package = "madsim-etcd-client", version = "0.6" }
125125
futures-async-stream = "0.2.9"
126126
hytra = "0.1"
127127
rdkafka = { package = "madsim-rdkafka", version = "0.4.1", features = [
128128
"cmake-build",
129129
] }
130130
hashbrown = { version = "0.14", features = ["ahash", "inline-more", "nightly"] }
131131
criterion = { version = "0.5", features = ["async_futures"] }
132-
tonic = { package = "madsim-tonic", version = "0.4.1" }
133-
tonic-build = { package = "madsim-tonic-build", version = "0.4.2" }
134-
otlp-embedded = { git = "https://github.com/risingwavelabs/otlp-embedded", rev = "492c244e0be91feb659c0cd48a624bbd96045a33" }
135-
prost = { version = "0.12" }
136-
icelake = { git = "https://github.com/icelake-io/icelake", rev = "07d53893d7788b4e41fc11efad8a6be828405c31", features = [
132+
tonic = { package = "madsim-tonic", version = "0.5.1" }
133+
tonic-build = { package = "madsim-tonic-build", version = "0.5" }
134+
otlp-embedded = { git = "https://github.com/risingwavelabs/otlp-embedded", rev = "e6cd165b9bc85783b42c106e99186b86b73e3507" }
135+
prost = { version = "0.13" }
136+
prost-build = { version = "0.13" }
137+
icelake = { git = "https://github.com/risingwavelabs/icelake.git", rev = "1860eb315183a5f3f72b4097c1e40d49407f8373", features = [
137138
"prometheus",
138139
] }
139140
arrow-array-iceberg = { package = "arrow-array", version = "52" }
@@ -179,6 +180,7 @@ tikv-jemallocator = { git = "https://github.com/risingwavelabs/jemallocator.git"
179180
"profiling",
180181
"stats",
181182
], rev = "64a2d9" }
183+
# TODO(http-bump): bump to use tonic 0.12 once minitrace-opentelemetry is updated
182184
opentelemetry = "0.23"
183185
opentelemetry-otlp = "0.16"
184186
opentelemetry_sdk = { version = "0.23", default-features = false }
@@ -194,6 +196,7 @@ sea-orm = { version = "0.12.14", features = [
194196
"runtime-tokio-native-tls",
195197
] }
196198
sqlx = "0.7"
199+
tokio-stream = { git = "https://github.com/madsim-rs/tokio.git", rev = "0dd1055", features = ["net", "fs"] }
197200
tokio-util = "0.7"
198201
tracing-opentelemetry = "0.24"
199202
rand = { version = "0.8", features = ["small_rng"] }
@@ -334,7 +337,9 @@ opt-level = 2
334337
# Patch third-party crates for deterministic simulation.
335338
quanta = { git = "https://github.com/madsim-rs/quanta.git", rev = "948bdc3" }
336339
getrandom = { git = "https://github.com/madsim-rs/getrandom.git", rev = "e79a7ae" }
337-
tokio-stream = { git = "https://github.com/madsim-rs/tokio.git", rev = "fe39bb8e" }
340+
# Don't patch `tokio-stream`, but only use the madsim version for **direct** dependencies.
341+
# Imagine an unpatched dependency depends on the original `tokio` and the patched `tokio-stream`.
342+
# tokio-stream = { git = "https://github.com/madsim-rs/tokio.git", rev = "0dd1055" }
338343
tokio-retry = { git = "https://github.com/madsim-rs/rust-tokio-retry.git", rev = "95e2fd3" }
339344
tokio-postgres = { git = "https://github.com/madsim-rs/rust-postgres.git", rev = "ac00d88" }
340345
futures-timer = { git = "https://github.com/madsim-rs/futures-timer.git", rev = "05b33b4" }

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@
5656
5757
RisingWave is a Postgres-compatible SQL engine engineered to provide the <i><b>simplest</b></i> and <i><b>most cost-efficient</b></i> approach for <b>processing</b>, <b>analyzing</b>, and <b>managing</b> real-time event streaming data.
5858

59-
![RisingWave](https://github.com/risingwavelabs/risingwave/assets/41638002/10c44404-f78b-43ce-bbd9-3646690acc59)
59+
![RisingWave](./docs/dev/src/images/architecture_20240814.png)
6060

6161
## When to use RisingWave?
6262
RisingWave can ingest millions of events per second, continuously join live data streams with historical tables, and serve ad-hoc queries in real-time. Typical use cases include, but are not limited to:

ci/Dockerfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ ENV LANG en_US.utf8
1111
# Use AWS ubuntu mirror
1212
RUN sed -i 's|http://archive.ubuntu.com/ubuntu|http://us-east-2.ec2.archive.ubuntu.com/ubuntu/|g' /etc/apt/sources.list
1313
RUN apt-get update -yy && \
14-
DEBIAN_FRONTEND=noninteractive apt-get -y install sudo make build-essential cmake protobuf-compiler curl parallel python3 python3-pip python3-venv software-properties-common \
14+
DEBIAN_FRONTEND=noninteractive apt-get -y install sudo make build-essential cmake protobuf-compiler curl parallel python3 python3-pip python3-venv software-properties-common psmisc \
1515
openssl libssl-dev libsasl2-dev libcurl4-openssl-dev pkg-config bash openjdk-17-jdk wget unzip git tmux lld postgresql-client kcat netcat-openbsd mysql-client \
1616
maven zstd libzstd-dev locales \
1717
python3.12 python3.12-dev \

ci/build-ci-image.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ cat ../rust-toolchain
1010
# shellcheck disable=SC2155
1111

1212
# REMEMBER TO ALSO UPDATE ci/docker-compose.yml
13-
export BUILD_ENV_VERSION=v20240731
13+
export BUILD_ENV_VERSION=v20240812
1414

1515
export BUILD_TAG="public.ecr.aws/w1p7b4n3/rw-build-env:${BUILD_ENV_VERSION}"
1616

ci/docker-compose.yml

+5-6
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ services:
7171
retries: 5
7272

7373
source-test-env:
74-
image: public.ecr.aws/w1p7b4n3/rw-build-env:v20240731
74+
image: public.ecr.aws/w1p7b4n3/rw-build-env:v20240812
7575
depends_on:
7676
- mysql
7777
- sqlserver-server
@@ -85,7 +85,7 @@ services:
8585
- ..:/risingwave
8686

8787
sink-test-env:
88-
image: public.ecr.aws/w1p7b4n3/rw-build-env:v20240731
88+
image: public.ecr.aws/w1p7b4n3/rw-build-env:v20240812
8989
depends_on:
9090
- mysql
9191
- db
@@ -108,12 +108,12 @@ services:
108108

109109

110110
rw-build-env:
111-
image: public.ecr.aws/w1p7b4n3/rw-build-env:v20240731
111+
image: public.ecr.aws/w1p7b4n3/rw-build-env:v20240812
112112
volumes:
113113
- ..:/risingwave
114114

115115
ci-flamegraph-env:
116-
image: public.ecr.aws/w1p7b4n3/rw-build-env:v20240731
116+
image: public.ecr.aws/w1p7b4n3/rw-build-env:v20240812
117117
# NOTE(kwannoel): This is used in order to permit
118118
# syscalls for `nperf` (perf_event_open),
119119
# so it can do CPU profiling.
@@ -124,7 +124,7 @@ services:
124124
- ..:/risingwave
125125

126126
regress-test-env:
127-
image: public.ecr.aws/w1p7b4n3/rw-build-env:v20240731
127+
image: public.ecr.aws/w1p7b4n3/rw-build-env:v20240812
128128
depends_on:
129129
db:
130130
condition: service_healthy
@@ -266,7 +266,6 @@ services:
266266
SCHEMA_REGISTRY_HOST_NAME: schemaregistry
267267
SCHEMA_REGISTRY_LISTENERS: http://schemaregistry:8082
268268
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: message_queue:29092
269-
SCHEMA_REGISTRY_DEBUG: 'true'
270269

271270
pulsar-server:
272271
container_name: pulsar-server

ci/scripts/common.sh

-1
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,6 @@ function filter_stack_trace() {
9797
touch tmp
9898
cat "$1" \
9999
| sed -E '/ [1-9][0-9]+:/d' \
100-
| sed -E '/ [3-9]+:/d' \
101100
| sed -E '/ at .rustc/d' \
102101
| sed -E '/ at ...cargo/d' > tmp
103102
cp tmp "$1"

ci/scripts/deterministic-recovery-test.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ echo "--- Download artifacts"
99
download-and-decompress-artifact risingwave_simulation .
1010
chmod +x ./risingwave_simulation
1111

12-
export RUST_LOG="risingwave_meta::barrier::recovery=debug,\
12+
export RUST_LOG="info,risingwave_meta::barrier::recovery=debug,\
1313
risingwave_meta::manager::catalog=debug,\
1414
risingwave_meta::rpc::ddl_controller=debug,\
1515
risingwave_meta::barrier::mod=debug,\

ci/scripts/gen-integration-test-yaml.py

+1
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@ def gen_pipeline_steps():
6565
env:
6666
GHCR_USERNAME: ghcr-username
6767
GHCR_TOKEN: ghcr-token
68+
RW_LICENSE_KEY: rw-license-key
6869
- ./ci/plugins/docker-compose-logs
6970
"""
7071
return pipeline_steps

ci/scripts/run-e2e-test.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ mv target/debug/risingwave_e2e_extended_mode_test-"$profile" target/debug/rising
7777
chmod +x ./target/debug/risingwave_e2e_extended_mode_test
7878

7979
echo "--- e2e, $mode, streaming"
80-
RUST_LOG="info,risingwave_stream=info,risingwave_batch=info,risingwave_storage=info" \
80+
RUST_LOG="info,risingwave_stream=info,risingwave_batch=info,risingwave_storage=info,risingwave_stream::common::table::state_table=warn" \
8181
cluster_start
8282
# Please make sure the regression is expected before increasing the timeout.
8383
sqllogictest -p 4566 -d dev './e2e_test/streaming/**/*.slt' --junit "streaming-${profile}"

ci/scripts/single-node-utils.sh

+1-2
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ start_single_node() {
1919
}
2020

2121
stop_single_node() {
22-
pkill risingwave
22+
killall --wait risingwave
2323
rm -rf "$HOME/.risingwave/state_store"
2424
rm -rf "$HOME/.risingwave/meta_store"
2525
}
@@ -47,7 +47,6 @@ wait_single_node() {
4747

4848
restart_single_node() {
4949
stop_single_node
50-
sleep 5
5150
start_single_node "$PREFIX_LOG"/single-node-restarted.log &
5251
wait_single_node
5352
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
DROP SINK nexmark_q0_temporal_filter;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
CREATE SINK nexmark_q0_temporal_filter
4+
AS
5+
SELECT auction, bidder, price, date_time
6+
FROM bid_filtered
7+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');

ci/scripts/sql/nexmark/q0.sql

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,4 @@ CREATE SINK nexmark_q0
44
AS
55
SELECT auction, bidder, price, date_time
66
FROM bid
7-
WITH ( connector = 'blackhole', type = 'append-only');
7+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
DROP SINK nexmark_q1_temporal_filter;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
CREATE SINK nexmark_q1_temporal_filter
4+
AS
5+
SELECT auction,
6+
bidder,
7+
0.908 * price as price,
8+
date_time
9+
FROM bid_filtered
10+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');

ci/scripts/sql/nexmark/q1.sql

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,4 +7,4 @@ SELECT auction,
77
0.908 * price as price,
88
date_time
99
FROM bid
10-
WITH ( connector = 'blackhole', type = 'append-only');
10+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
DROP SINK nexmark_q10_temporal_filter;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
CREATE SINK nexmark_q10_temporal_filter AS
4+
SELECT auction,
5+
bidder,
6+
price,
7+
date_time,
8+
TO_CHAR(date_time, 'YYYY-MM-DD') as date,
9+
TO_CHAR(date_time, 'HH:MI') as time
10+
FROM bid_filtered
11+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');

ci/scripts/sql/nexmark/q10.sql

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,4 @@ SELECT auction,
88
TO_CHAR(date_time, 'YYYY-MM-DD') as date,
99
TO_CHAR(date_time, 'HH:MI') as time
1010
FROM bid
11-
WITH ( connector = 'blackhole', type = 'append-only');
11+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
DROP SINK nexmark_q101_temporal_filter;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
CREATE SINK nexmark_q101_temporal_filter AS
4+
SELECT
5+
a.id AS auction_id,
6+
a.item_name AS auction_item_name,
7+
b.max_price AS current_highest_bid
8+
FROM auction a
9+
LEFT OUTER JOIN (
10+
SELECT
11+
b1.auction,
12+
MAX(b1.price) max_price
13+
FROM bid_filtered b1
14+
GROUP BY b1.auction
15+
) b ON a.id = b.auction
16+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
DROP SINK nexmark_q102_temporal_filter;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
CREATE SINK nexmark_q102_temporal_filter AS
4+
SELECT
5+
a.id AS auction_id,
6+
a.item_name AS auction_item_name,
7+
COUNT(b.auction) AS bid_count
8+
FROM auction a
9+
JOIN bid_filtered b ON a.id = b.auction
10+
GROUP BY a.id, a.item_name
11+
HAVING COUNT(b.auction) >= (
12+
SELECT COUNT(*) / COUNT(DISTINCT auction) FROM bid_filtered
13+
)
14+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
DROP SINK nexmark_q103_temporal_filter;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
CREATE SINK nexmark_q103_temporal_filter AS
4+
SELECT
5+
a.id AS auction_id,
6+
a.item_name AS auction_item_name
7+
FROM auction a
8+
WHERE a.id IN (
9+
SELECT b.auction FROM bid_filtered b
10+
GROUP BY b.auction
11+
HAVING COUNT(*) >= 20
12+
)
13+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
DROP SINK nexmark_q104_temporal_filter;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
CREATE SINK nexmark_q104_temporal_filter AS
4+
SELECT
5+
a.id AS auction_id,
6+
a.item_name AS auction_item_name
7+
FROM auction a
8+
WHERE a.id NOT IN (
9+
SELECT b.auction FROM bid_filtered b
10+
GROUP BY b.auction
11+
HAVING COUNT(*) < 20
12+
)
13+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
DROP SINK nexmark_q105_temporal_filter;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
CREATE SINK nexmark_q105_temporal_filter AS
4+
SELECT
5+
a.id AS auction_id,
6+
a.item_name AS auction_item_name,
7+
COUNT(b.auction) AS bid_count
8+
FROM auction a
9+
JOIN bid_filtered b ON a.id = b.auction
10+
GROUP BY a.id, a.item_name
11+
ORDER BY bid_count DESC
12+
LIMIT 1000
13+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
DROP SINK nexmark_q105_without_limit_temporal_filter;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
CREATE SINK nexmark_q105_without_limit_temporal_filter AS
4+
SELECT
5+
a.id AS auction_id,
6+
a.item_name AS auction_item_name,
7+
COUNT(b.auction) AS bid_count
8+
FROM auction a
9+
JOIN bid_filtered b ON a.id = b.auction
10+
GROUP BY a.id, a.item_name
11+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
DROP SINK nexmark_q105_without_limit;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
CREATE SINK nexmark_q105_without_limit AS
4+
SELECT
5+
a.id AS auction_id,
6+
a.item_name AS auction_item_name,
7+
COUNT(b.auction) AS bid_count
8+
FROM auction a
9+
JOIN bid b ON a.id = b.auction
10+
GROUP BY a.id, a.item_name
11+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');

ci/scripts/sql/nexmark/q106.drop.sql

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
DROP SINK nexmark_q106;

ci/scripts/sql/nexmark/q106.sql

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
CREATE SINK nexmark_q106
4+
AS
5+
SELECT
6+
MIN(final) AS min_final
7+
FROM
8+
(
9+
SELECT
10+
auction.id,
11+
MAX(price) AS final
12+
FROM
13+
auction,
14+
bid
15+
WHERE
16+
bid.auction = auction.id
17+
AND bid.date_time BETWEEN auction.date_time AND auction.expires
18+
GROUP BY
19+
auction.id
20+
)
21+
WITH ( connector = 'blackhole', type = 'append-only', force_append_only = 'true');

ci/scripts/sql/nexmark/q107.drop.sql

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-- noinspection SqlNoDataSourceInspectionForFile
2+
-- noinspection SqlResolveForFile
3+
DROP SINK nexmark_q107;

0 commit comments

Comments
 (0)