diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 0000000..f50ba7c
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,47 @@
+name: Build
+
+on:
+ push:
+ branches:
+ - master
+ - develop
+ paths-ignore:
+ - "**.md"
+ - "misc/**"
+
+ pull_request:
+ types:
+ - opened
+ - synchronize
+ - reopened
+ paths-ignore:
+ - "**.md"
+ - "misc/**"
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Setup Dependencies
+ env:
+ DEBIAN_FRONTEND: noninteractive
+ CH_REPO: deb [trusted=yes] https://repo.clickhouse.tech/deb/stable/ main/
+ CH_CONF_DIR: /etc/clickhouse-server/config.d
+ run: |
+ sudo apt-get update -y -qq
+ sudo apt-get install -y -qq --no-install-recommends ca-certificates dpkg fakeroot rpm
+ echo $CH_REPO | sudo tee -a /etc/apt/sources.list
+ sudo apt-get update -y -qq
+ sudo apt-get install -y -qq clickhouse-server
+ sudo mkdir -p $CH_CONF_DIR
+ sudo touch $CH_CONF_DIR/test.xml
+ echo "127.0.0.19019" | sudo tee -a $CH_CONF_DIR/test.xml
+ sudo /usr/sbin/clickhouse-server --config=/etc/clickhouse-server/config.xml &
+ - uses: actions/checkout@v2
+ - name: Set up JDK 1.8
+ uses: actions/setup-java@v1
+ with:
+ java-version: 1.8
+ - name: Build with Maven
+ run: mvn --batch-mode --update-snapshots -Prelease verify
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 0000000..a887c21
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,78 @@
+name: Release
+
+on:
+ workflow_dispatch:
+ inputs:
+ major:
+ description: "Major version"
+ required: true
+ default: "2"
+ minor:
+ description: "Minor version"
+ required: false
+ default: "0"
+ patch:
+ description: "Patch"
+ required: false
+ default: "0"
+ organization:
+ description: "DockerHub organization"
+ required: false
+ default: "yandex"
+
+jobs:
+ pre-release:
+ name: "Pre Release"
+ runs-on: "ubuntu-latest"
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Set up JDK 1.8
+ uses: actions/setup-java@v1
+ with:
+ java-version: 1.8
+ - name: Build with Maven
+ run: mvn -Dmaven.test.skip=true -Drevision=${{ github.event.inputs.major }}.${{ github.event.inputs.minor }}.${{ github.event.inputs.patch }} --batch-mode -Prelease package
+ - uses: "marvinpinto/action-automatic-releases@latest"
+ with:
+ repo_token: "${{ secrets.GITHUB_TOKEN }}"
+ automatic_release_tag: "v${{ github.event.inputs.major }}.${{ github.event.inputs.minor }}.${{ github.event.inputs.patch }}"
+ prerelease: true
+ title: "Release v${{ github.event.inputs.major }}.${{ github.event.inputs.minor }}.${{ github.event.inputs.patch }}"
+ files: |
+ LICENSE
+ NOTICE
+ target/clickhouse*.jar
+ target/*.deb
+ target/**/*.rpm
+
+ publish:
+ name: "Build and Publish Docker Image"
+ runs-on: ubuntu-latest
+ needs: pre-release
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v1
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v1
+ - name: Login to DockerHub
+ uses: docker/login-action@v1
+ with:
+ username: ${{ secrets.DOCKER_HUB_USER }}
+ password: ${{ secrets.DOCKER_HUB_PASSWD }}
+ - name: Build and Push Docker Image
+ uses: docker/build-push-action@v2
+ with:
+ context: .
+ file: ./Dockerfile
+ push: true
+ build-args: |
+ revision=${{ github.event.inputs.major }}.${{ github.event.inputs.minor }}.${{ github.event.inputs.patch }}
+ repository=${{ github.repository }}
+ tags: |
+ ${{ github.event.inputs.organization }}/clickhouse-jdbc-bridge:latest
+ ${{ github.event.inputs.organization }}/clickhouse-jdbc-bridge:${{ github.event.inputs.major }}
+ ${{ github.event.inputs.organization }}/clickhouse-jdbc-bridge:${{ github.event.inputs.major }}.${{ github.event.inputs.minor }}
+ ${{ github.event.inputs.organization }}/clickhouse-jdbc-bridge:${{ github.event.inputs.major }}.${{ github.event.inputs.minor }}.${{ github.event.inputs.patch }}
diff --git a/.gitignore b/.gitignore
index a6f8a7b..24a0006 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,3 +7,4 @@ dependency-reduced-pom.xml
.vscode/
demo/
target/
+test.sh
diff --git a/Dockerfile b/Dockerfile
index 8f9d614..af5e862 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -17,39 +17,17 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+FROM adoptopenjdk/openjdk8-openj9:jre8u275-b01_openj9-0.23.0-ubuntu
-# docker build --squash --build-arg revision=2.0.0 -t yandex/clickhouse-jdbc-bridge .
-ARG revision=2.0.0-SNAPSHOT
-
-#
-# Stage 1/2: Build
-#
-FROM maven:3-openjdk-8 as builder
-
-ARG revision
-
-COPY LICENSE NOTICE pom.xml /app/
-COPY docker /app/docker/
-COPY misc /app/misc/
-COPY src /app/src/
-
-WORKDIR /app
-
-RUN mvn -Drevision=${revision} package
-
-
-#
-# Stage 2/2: Pack
-#
-FROM adoptopenjdk/openjdk8-openj9:jre8u265-b01_openj9-0.21.0-ubuntu
-
-ARG revision
+ARG revision=2.0.0
+ARG repository=ClickHouse/clickhouse-jdbc-bridge
# Maintainer
LABEL maintainer="zhicwu@gmail.com"
# Environment variables
-ENV JDBC_BRIDGE_HOME=/app JDBC_BRIDGE_VERSION=${revision}
+ENV JDBC_BRIDGE_HOME=/app JDBC_BRIDGE_VERSION=${revision} \
+ JDBC_BRIDGE_REL_URL=https://github.com/${repository}/releases/download/v${revision}/
# Labels
LABEL app_name="ClickHouse JDBC Bridge" app_version="$JDBC_BRIDGE_VERSION"
@@ -59,10 +37,10 @@ RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-unauthenticated apache2-utils \
apt-transport-https curl htop iftop iptraf iputils-ping jq lsof net-tools tzdata wget \
&& apt-get clean \
+ && wget -q -P $JDBC_BRIDGE_HOME $JDBC_BRIDGE_REL_URL/LICENSE $JDBC_BRIDGE_REL_URL/NOTICE \
+ $JDBC_BRIDGE_REL_URL/clickhouse-jdbc-bridge-${revision}.jar \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
-COPY --from=builder /app/LICENSE /app/NOTICE \
- /app/target/clickhouse-jdbc-bridge-$JDBC_BRIDGE_VERSION.jar $JDBC_BRIDGE_HOME/
COPY --chown=root:root docker/ $JDBC_BRIDGE_HOME
RUN chmod +x $JDBC_BRIDGE_HOME/*.sh \
diff --git a/README.md b/README.md
index 30135e9..1432aca 100644
--- a/README.md
+++ b/README.md
@@ -10,13 +10,12 @@ JDBC bridge for ClickHouse. It acts as a stateless proxy passing queries to exte
## Known Issues / Limitation
-**Caution: this is not ready for production use, as it's still under development and lack of testing.**
-
-* ClickHouse server should be patched or you may run into issues like `JDBC bridge is not running` and `Timeout: connect timed out`
-* Pushdown is not supported
+* Use the most recent LTS/stable version of ClickHouse
+ Note: you may need to patch ClickHouse if you're using older version release before July, 2020. Please refer to [this](https://github.com/ClickHouse/ClickHouse/pull/11690) for details.
+* Pushdown is not supported by design
* Query may execute twice because of type inferring
-* Complex data types like Array and Tuple are not supported
-* Mutation is not fully supported - only insertion in simple cases
+* Complex data types like Array and Tuple are not supported - they'll be treated as String
+* Mutation is not fully supported by deisgn - only insertion in simple cases
* Scripting is experimental
@@ -32,9 +31,24 @@ Below is a rough performance comparison to help you understand overhead caused b
* Java CLI
```bash
+ # add named datasource
+ wget -P config/datasources https://raw.githubusercontent.com/ClickHouse/clickhouse-jdbc-bridge/master/misc/quick-start/jdbc-bridge/config/datasources/ch-server.json
+ # start jdbc bridge, and then you can issue below query in ClickHouse for testing
+ # select * from jdbc('ch-server', 'select 1')
java -jar clickhouse-jdbc-bridge-.jar
```
+ If you need adhoc datasource, which is disabled by default for security reason, you may try below:
+ ```bash
+ # download jdbc drivers and required libs if any
+ wget -P drivers https://repo1.maven.org/maven2/org/mariadb/jdbc/mariadb-java-client/2.7.1/mariadb-java-client-2.7.1.jar
+ # start jdbc bridge, and you'll be able to run query like below in ClickHouse:
+ # select * from jdbc('jdbc:mariadb://localhost:3306/test?user=root&password=root', 'select 1')
+ java -Djdbc-bridge.driver.loader=false \
+ -cp `pwd`/drivers/mariadb-java-client-2.7.1.jar:`pwd`/clickhouse-jdbc-bridge-.jar:. \
+ ru.yandex.clickhouse.jdbcbridge.JdbcBridgeVerticle
+ ```
+
* Docker CLI
It's simple to get started using all-in-one docker image:
@@ -212,7 +226,7 @@ Below is a rough performance comparison to help you understand overhead caused b
* Monitoring
- You can use [Prometheus](https://prometheus.io/) to monitor metrics exposed by JDBC brige.
+ You can use [Prometheus](https://prometheus.io/) to monitor metrics exposed by JDBC bridge.
```bash
curl -v http://jdbc-bridge:9019/metrics
```
@@ -256,11 +270,11 @@ Below is a rough performance comparison to help you understand overhead caused b
* Vert.x
- If you're familiar with [Vert.x](https://vertx.io/). You can customize its configuration by changing `config/httpd.json` and `config/vertx.json`.
+ If you're familiar with [Vert.x](https://vertx.io/), you can customize its configuration by changing `config/httpd.json` and `config/vertx.json`.
* Query Parameters
- All supported query parameters can be found at [here](src/main/java/ru/yandex/clickhouse/jdbcbridge/core/QueryPamraters.java). `datasource_column=true` can be simplied as `datasource_column`, for example:
+ All supported query parameters can be found at [here](src/main/java/ru/yandex/clickhouse/jdbcbridge/core/QueryParameters.java). `datasource_column=true` can be simplied as `datasource_column`, for example:
```sql
select * from jdbc('ch-server?datasource_column=true', 'select 1')
@@ -270,7 +284,7 @@ Below is a rough performance comparison to help you understand overhead caused b
* Timeout
Couple of timeout settings you should be aware of:
- 1. datasource timeout, for example: `max_execution_time` in MySQL and ClickHouse
+ 1. datasource timeout, for example: `max_execution_time` in MySQL
2. JDBC driver timeout, for example: `connectTimeout` and `socketTimeout` in [MySQL Connector/J](https://dev.mysql.com/doc/connector-j/8.0/en/connector-j-reference-configuration-properties.html)
3. Vertx timeout - see `config/server.json` and `config/vertx.json`
4. Client(ClickHouse JDBC driver) timeout - see timeout settings in ClickHouse JDBC driver
@@ -327,6 +341,7 @@ An extension for JDBC bridge is basically a Java class with 3 optional parts:
```
3. Instantiation Method
+
In order to create instance of your extension, in general you should define a static method like below so that JDBC bridge knows how(besides walking through all possible constructors):
```java
public static MyExtension newInstance(Object... args) {
diff --git a/all-in-one.Dockerfile b/all-in-one.Dockerfile
index 771a67d..074f44f 100644
--- a/all-in-one.Dockerfile
+++ b/all-in-one.Dockerfile
@@ -18,8 +18,8 @@
# specific language governing permissions and limitations
# under the License.
-# docker build --squash --build-arg revision=20.9.3 -f all-in-one.Dockerfile -t yandex/clickhouse-all-in-one:20.9.3 .
-ARG revision=20.9.3
+# docker build --squash --build-arg revision=20.8 -f all-in-one.Dockerfile -t yandex/clickhouse-all-in-one:20.8 .
+ARG revision=20.8
#
# Stage 1/2: Build
diff --git a/docker/config/datasource-schema.json b/docker/config/datasource.jschema
similarity index 74%
rename from docker/config/datasource-schema.json
rename to docker/config/datasource.jschema
index f9994ea..46ec97b 100644
--- a/docker/config/datasource-schema.json
+++ b/docker/config/datasource.jschema
@@ -1,7 +1,88 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
-
"definitions": {
+ "chType": {
+ "description": "Data type.",
+ "type": "string",
+ "enum": [
+ "Boolean",
+ "Int8",
+ "Int16",
+ "Int32",
+ "Int64",
+ "Int128",
+ "Int256",
+ "UInt8",
+ "UInt16",
+ "UInt32",
+ "UInt64",
+ "UInt128",
+ "UInt256",
+ "Float32",
+ "Float64",
+ "Date",
+ "DateTime",
+ "DateTime64",
+ "Decimal",
+ "Decimal32",
+ "Decimal64",
+ "Decimal128",
+ "Decimal256",
+ "Enum",
+ "Enum8",
+ "Enum16",
+ "IPv4",
+ "IPv6",
+ "FixedString",
+ "String",
+ "UUID"
+ ]
+ },
+ "jdbcType": {
+ "description": "JDBC data type.",
+ "type": "string",
+ "enum": [
+ "BIT",
+ "TINYINT",
+ "SMALLINT",
+ "INTEGER",
+ "BIGINT",
+ "FLOAT",
+ "REAL",
+ "DOUBLE",
+ "NUMERIC",
+ "DECIMAL",
+ "CHAR",
+ "VARCHAR",
+ "LONGVARCHAR",
+ "DATE",
+ "TIME",
+ "TIMESTAMP",
+ "BINARY",
+ "VARBINARY",
+ "LONGVARBINARY",
+ "NULL",
+ "OTHER",
+ "JAVA_OBJECT",
+ "DISTINCT",
+ "STRUCT",
+ "ARRAY",
+ "BLOB",
+ "CLOB",
+ "REF",
+ "DATALINK",
+ "BOOLEAN",
+ "ROWID",
+ "NCHAR",
+ "NVARCHAR",
+ "LONGNVARCHAR",
+ "NCLOB",
+ "SQLXML",
+ "REF_CURSOR",
+ "TIME_WITH_TIMEZONE",
+ "TIMESTAMP_WITH_TIMEZONE"
+ ]
+ },
"column": {
"description": "Column.",
"type": "object",
@@ -12,40 +93,7 @@
},
"type": {
"description": "Data type.",
- "type": "string",
- "enum": [
- "Boolean",
- "Int8",
- "Int16",
- "Int32",
- "Int64",
- "Int128",
- "Int256",
- "UInt8",
- "UInt16",
- "UInt32",
- "UInt64",
- "UInt128",
- "UInt256",
- "Float32",
- "Float64",
- "Date",
- "DateTime",
- "DateTime64",
- "Decimal",
- "Decimal32",
- "Decimal64",
- "Decimal128",
- "Decimal256",
- "Enum",
- "Enum8",
- "Enum16",
- "IPv4",
- "IPv6",
- "FixedString",
- "String",
- "UUID"
- ]
+ "$ref": "#definitions/chType"
},
"value": {
"default": "",
@@ -77,6 +125,59 @@
},
"minItems": 1
},
+ "converter": {
+ "description": "Custom data type converter.",
+ "type": "object",
+ "properties": {
+ "class": {
+ "description": "Optional Java class name of the custom type converter.",
+ "type": "string"
+ },
+ "mappings": {
+ "description": "Optional list of mapping rules.",
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "jdbcType": {
+ "description": "JDBC type.",
+ "$ref": "#definitions/jdbcType"
+ },
+ "nativeType": {
+ "description": "Native type.",
+ "type": "string"
+ },
+ "toType": {
+ "description": "Corresponding data type in ClickHouse.",
+ "$ref": "#definitions/chType"
+ }
+ },
+ "anyOf": [
+ {
+ "required": [
+ "jdbcType",
+ "toType"
+ ]
+ },
+ {
+ "required": [
+ "nativeType",
+ "toType"
+ ]
+ },
+ {
+ "required": [
+ "jdbcType",
+ "nativeType",
+ "toType"
+ ]
+ }
+ ]
+ },
+ "minItems": 1
+ }
+ }
+ },
"defaults": {
"description": "Default values.",
"type": "object",
@@ -101,6 +202,16 @@
"description": "Default value for Int64 columns.",
"type": "integer"
},
+ "Int128": {
+ "default": 0,
+ "description": "Default value for Int128 columns.",
+ "type": "number"
+ },
+ "Int256": {
+ "default": 0,
+ "description": "Default value for Int256 columns.",
+ "type": "number"
+ },
"UInt8": {
"default": 0,
"description": "Default value for UInt8 columns.",
@@ -121,36 +232,51 @@
"description": "Default value for UInt64 columns.",
"type": "integer"
},
+ "UInt128": {
+ "default": 0,
+ "description": "Default value for UInt128 columns.",
+ "type": "number"
+ },
+ "UInt256": {
+ "default": 0,
+ "description": "Default value for UInt256 columns.",
+ "type": "number"
+ },
"Float32": {
- "default": 0.0,
+ "default": 0,
"description": "Default value for Float32 columns.",
"type": "number"
},
"Float64": {
- "default": 0.0,
+ "default": 0,
"description": "Default value for Float64 columns.",
"type": "number"
},
"Decimal": {
- "default": 0.0,
+ "default": 0,
"description": "Default value for Decimal columns.",
"type": "number"
},
"Decimal32": {
- "default": 0.0,
+ "default": 0,
"description": "Default value for Decimal32 columns.",
"type": "number"
},
"Decimal64": {
- "default": 0.0,
+ "default": 0,
"description": "Default value for Decimal64 columns.",
"type": "number"
},
"Decimal128": {
- "default": 0.0,
+ "default": 0,
"description": "Default value for Decimal128 columns.",
"type": "number"
},
+ "Decimal256": {
+ "default": 0,
+ "description": "Default value for Decimal256 columns.",
+ "type": "number"
+ },
"Date": {
"default": 1,
"description": "Default value for Date columns.",
@@ -162,7 +288,7 @@
"type": "integer"
},
"DateTime64": {
- "default": 1.0,
+ "default": 1,
"description": "Default value for DateTime64 columns.",
"type": "number"
},
@@ -228,7 +354,10 @@
}
},
"additionalProperties": true,
- "required": ["user", "password"]
+ "required": [
+ "user",
+ "password"
+ ]
},
"hikaricp": {
"description": "HikariCP properties.",
@@ -355,30 +484,33 @@
"default": 0,
"description": "This property controls the amount of time that a connection can be out of the pool before a message is logged indicating a possible connection leak. Lowest acceptable value for enabling leak detection is 2000 (2 seconds).",
"type": "integer"
- },
- "columns": {
- "$ref": "#/definitions/columns"
- },
- "defaults": {
- "$ref": "#/definitions/defaults"
- },
- "parameters": {
- "$ref": "#/definitions/parameters"
}
},
"anyOf": [
- { "required": ["dataSource"] },
- { "required": ["jdbcUrl", "username", "password"] }
+ {
+ "required": [
+ "dataSource"
+ ]
+ },
+ {
+ "required": [
+ "jdbcUrl",
+ "username",
+ "password"
+ ]
+ }
]
},
- "general": {
+ "entity": {
"description": "Named datasource.",
"type": "object",
"properties": {
"type": {
"description": "Type of the datasource, defaults to jdbc.",
"type": "string",
- "examples": ["jdbc"]
+ "examples": [
+ "jdbc"
+ ]
},
"aliases": {
"description": "List of aliases for this datasource.",
@@ -402,14 +534,29 @@
"https://repo1.maven.org/maven2/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar"
]
]
+ },
+ "columns": {
+ "$ref": "#/definitions/columns"
+ },
+ "converter": {
+ "$ref": "#/definitions/converter"
+ },
+ "defaults": {
+ "$ref": "#/definitions/defaults"
+ },
+ "parameters": {
+ "$ref": "#/definitions/parameters"
}
},
- "anyOf": [{ "$ref": "#/definitions/hikaricp" }]
+ "anyOf": [
+ {
+ "$ref": "#/definitions/hikaricp"
+ }
+ ]
}
},
-
"type": "object",
"additionalProperties": {
- "$ref": "#/definitions/general"
+ "$ref": "#/definitions/entity"
}
}
diff --git a/docker/config/datasources/datasource.json.example b/docker/config/datasources/datasource.json.example
index 71473e3..7bf7b5b 100644
--- a/docker/config/datasources/datasource.json.example
+++ b/docker/config/datasources/datasource.json.example
@@ -1,5 +1,5 @@
{
- "$schema": "../datasource-schema.json",
+ "$schema": "../datasource.jschema",
"ch-server": {
"aliases": [
"self"
diff --git a/docker/config/query-schema.json b/docker/config/query.jschema
similarity index 94%
rename from docker/config/query-schema.json
rename to docker/config/query.jschema
index b3bcabb..ee81ea2 100644
--- a/docker/config/query-schema.json
+++ b/docker/config/query.jschema
@@ -1,6 +1,5 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
-
"definitions": {
"column": {
"description": "Column.",
@@ -118,7 +117,7 @@
}
}
},
- "query": {
+ "entity": {
"description": "Named query.",
"type": "object",
"properties": {
@@ -131,6 +130,10 @@
"scripts/query_some_table.sql"
]
},
+ "schema": {
+ "description": "Named schema which overrides columns.",
+ "type": "string"
+ },
"columns": {
"$ref": "#/definitions/columns"
},
@@ -138,12 +141,14 @@
"$ref": "#/definitions/parameters"
}
},
- "required": ["query", "columns"]
+ "required": [
+ "query",
+ "columns"
+ ]
}
},
-
"type": "object",
"additionalProperties": {
- "$ref": "#/definitions/query"
+ "$ref": "#/definitions/entity"
}
}
diff --git a/docker/config/schema.jschema b/docker/config/schema.jschema
new file mode 100644
index 0000000..43b92ad
--- /dev/null
+++ b/docker/config/schema.jschema
@@ -0,0 +1,96 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "definitions": {
+ "column": {
+ "description": "Column.",
+ "type": "object",
+ "properties": {
+ "name": {
+ "description": "Column name.",
+ "type": "string"
+ },
+ "type": {
+ "description": "Data type.",
+ "type": "string",
+ "enum": [
+ "Boolean",
+ "Int8",
+ "Int16",
+ "Int32",
+ "Int64",
+ "Int128",
+ "Int256",
+ "UInt8",
+ "UInt16",
+ "UInt32",
+ "UInt64",
+ "UInt128",
+ "UInt256",
+ "Float32",
+ "Float64",
+ "Date",
+ "DateTime",
+ "DateTime64",
+ "Decimal",
+ "Decimal32",
+ "Decimal64",
+ "Decimal128",
+ "Decimal256",
+ "Enum",
+ "Enum8",
+ "Enum16",
+ "IPv4",
+ "IPv6",
+ "FixedString",
+ "String",
+ "UUID"
+ ]
+ },
+ "value": {
+ "default": "",
+ "description": "Constant value in string format.",
+ "type": "string"
+ },
+ "nullable": {
+ "default": true,
+ "description": "Whether the columns may contain null value or not.",
+ "type": "boolean"
+ },
+ "precision": {
+ "default": 0,
+ "description": "Precision of decimal value.",
+ "type": "integer"
+ },
+ "scale": {
+ "default": 0,
+ "description": "Scale of decimal value.",
+ "type": "integer"
+ }
+ }
+ },
+ "columns": {
+ "description": "List of columns.",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/column"
+ },
+ "minItems": 1
+ },
+ "entity": {
+ "description": "Named schema.",
+ "type": "object",
+ "properties": {
+ "columns": {
+ "$ref": "#/definitions/columns"
+ }
+ },
+ "required": [
+ "columns"
+ ]
+ }
+ },
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/definitions/entity"
+ }
+}
diff --git a/docker/config/schemas/schema.json.example b/docker/config/schemas/schema.json.example
index b7c472d..11d2cd6 100644
--- a/docker/config/schemas/schema.json.example
+++ b/docker/config/schemas/schema.json.example
@@ -1,5 +1,6 @@
{
- "show-query-logs": {
+ "$schema": "../schema.jschema",
+ "query-log": {
"columns": [
{
"name": "query_id",
diff --git a/docker/config/server.json b/docker/config/server.json
index a3f05c8..d037623 100644
--- a/docker/config/server.json
+++ b/docker/config/server.json
@@ -2,6 +2,20 @@
"requestTimeout": 5000,
"queryTimeout": 30000,
"configScanPeriod": 5000,
+ "repositories": [
+ {
+ "entity": "ru.yandex.clickhouse.jdbcbridge.core.NamedDataSource",
+ "repository": "ru.yandex.clickhouse.jdbcbridge.impl.JsonFileRepository"
+ },
+ {
+ "entity": "ru.yandex.clickhouse.jdbcbridge.core.NamedSchema",
+ "repository": "ru.yandex.clickhouse.jdbcbridge.impl.JsonFileRepository"
+ },
+ {
+ "entity": "ru.yandex.clickhouse.jdbcbridge.core.NamedQuery",
+ "repository": "ru.yandex.clickhouse.jdbcbridge.impl.JsonFileRepository"
+ }
+ ],
"extensions": [
{
"class": "ru.yandex.clickhouse.jdbcbridge.impl.JdbcDataSource"
diff --git a/misc/perf-test/docker-compose.yml b/misc/perf-test/docker-compose.yml
index 834adb1..77c4eea 100644
--- a/misc/perf-test/docker-compose.yml
+++ b/misc/perf-test/docker-compose.yml
@@ -5,7 +5,7 @@ version: "3"
#
services:
clickhouse:
- image: yandex/clickhouse-server:20.9
+ image: yandex/clickhouse-server:20.8
hostname: ch-server
ports:
- "3307:3307"
@@ -87,12 +87,12 @@ services:
command: >
/bin/bash -cx '
export TEST_CONF_URL="https://raw.githubusercontent.com/ClickHouse/clickhouse-jdbc-bridge/develop/misc/perf-test/jdbc-bridge";
- export DRIVER_URL="https://repo1.maven.org/maven2/org/mariadb/jdbc/mariadb-java-client/2.7.0/mariadb-java-client-2.7.0.jar"
- mkdir -p /app/config/{datasources,queries} /app/drivers/mariadb /app/scripts;
+ mkdir -p /app/config/{datasources,queries,schemas} /app/drivers/mariadb /app/scripts;
curl -sSL -o /app/config/datasources/mariadb.json $$TEST_CONF_URL/config/datasources/mariadb.json;
curl -sSL -o /app/config/queries/constant.json $$TEST_CONF_URL/config/queries/constant.json;
curl -sSL -o /app/config/queries/small-table.json $$TEST_CONF_URL/config/queries/small-table.json;
- curl -sSL -o /app/drivers/mariadb/mariadb-java-client.jar $$DRIVER_URL;
+ curl -sSL -o /app/config/schemas/simple-num.json $$TEST_CONF_URL/config/schemas/simple-num.json;
+ curl -sSL -o /app/config/schemas/simple-row.json $$TEST_CONF_URL/config/schemas/simple-row.json;
curl -sSL -o /app/scripts/constant.sql $$TEST_CONF_URL/scripts/constant.sql;
curl -sSL -o /app/scripts/small-table.sql $$TEST_CONF_URL/scripts/small-table.sql;
./docker-entrypoint.sh
diff --git a/misc/perf-test/jdbc-bridge/config/datasources/mariadb.json b/misc/perf-test/jdbc-bridge/config/datasources/mariadb.json
index 407bf88..75df424 100644
--- a/misc/perf-test/jdbc-bridge/config/datasources/mariadb.json
+++ b/misc/perf-test/jdbc-bridge/config/datasources/mariadb.json
@@ -1,6 +1,9 @@
{
+ "$schema": "../../../../../docker/config/datasource.jschema",
"mariadb": {
- "driverUrls": ["/app/drivers/mariadb"],
+ "driverUrls": [
+ "https://repo1.maven.org/maven2/org/mariadb/jdbc/mariadb-java-client/2.7.0/mariadb-java-client-2.7.0.jar"
+ ],
"driverClassName": "org.mariadb.jdbc.Driver",
"jdbcUrl": "jdbc:mariadb://mariadb:3306/test?useSSL=false&useCompression=false",
"dataSource": {
@@ -9,6 +12,9 @@
},
"initializationFailTimeout": 0,
"minimumIdle": 0,
- "maximumPoolSize": 15
+ "maximumPoolSize": 15,
+ "parameters": {
+ "fetch_size": 10000
+ }
}
}
diff --git a/misc/perf-test/jdbc-bridge/config/queries/constant.json b/misc/perf-test/jdbc-bridge/config/queries/constant.json
index e171daa..6848491 100644
--- a/misc/perf-test/jdbc-bridge/config/queries/constant.json
+++ b/misc/perf-test/jdbc-bridge/config/queries/constant.json
@@ -1,10 +1,11 @@
{
+ "$schema": "../../../../../docker/config/query.jschema",
"constant": {
"query": "scripts/constant.sql",
"columns": [
{
"name": "1",
- "type": "Int32",
+ "type": "UInt8",
"nullable": false
}
]
diff --git a/misc/perf-test/jdbc-bridge/config/queries/small-table.json b/misc/perf-test/jdbc-bridge/config/queries/small-table.json
index 9b50a17..e80cf63 100644
--- a/misc/perf-test/jdbc-bridge/config/queries/small-table.json
+++ b/misc/perf-test/jdbc-bridge/config/queries/small-table.json
@@ -1,4 +1,5 @@
{
+ "$schema": "../../../../../docker/config/query.jschema",
"small-table": {
"query": "scripts/small-table.sql",
"columns": [
diff --git a/misc/perf-test/jdbc-bridge/config/schemas/simple-num.json b/misc/perf-test/jdbc-bridge/config/schemas/simple-num.json
new file mode 100644
index 0000000..d814f51
--- /dev/null
+++ b/misc/perf-test/jdbc-bridge/config/schemas/simple-num.json
@@ -0,0 +1,12 @@
+{
+ "$schema": "../../../../../docker/config/schema.jschema",
+ "simple-num": {
+ "columns": [
+ {
+ "name": "1",
+ "type": "UInt8",
+ "nullable": false
+ }
+ ]
+ }
+}
diff --git a/misc/perf-test/jdbc-bridge/config/schemas/simple-row.json b/misc/perf-test/jdbc-bridge/config/schemas/simple-row.json
new file mode 100644
index 0000000..7f33005
--- /dev/null
+++ b/misc/perf-test/jdbc-bridge/config/schemas/simple-row.json
@@ -0,0 +1,17 @@
+{
+ "$schema": "../../../../../docker/config/schema.jschema",
+ "simple-row": {
+ "columns": [
+ { "name": "id", "type": "Int64", "nullable": false },
+ { "name": "name", "type": "String", "nullable": false },
+ {
+ "name": "datetime",
+ "type": "DateTime64",
+ "nullable": true,
+ "scale": 3
+ },
+ { "name": "num", "type": "Int32", "nullable": true },
+ { "name": "value", "type": "Float32", "nullable": true }
+ ]
+ }
+}
diff --git a/misc/quick-start/docker-compose.yml b/misc/quick-start/docker-compose.yml
index ad3566b..a01d808 100644
--- a/misc/quick-start/docker-compose.yml
+++ b/misc/quick-start/docker-compose.yml
@@ -1,8 +1,20 @@
version: "2"
# Only 'ch-server' and 'jdbc-bridge' are mandatory.
-# Feel free to remove any db-xxx to save memory
+# You may remove any db-xxx to save memory.
services:
+ db-elasticsearch:
+ image: amazon/opendistro-for-elasticsearch:1.11.0
+ hostname: db-elasticsearch
+ environment:
+ # admin/admin
+ # curl -XGET https://localhost:9200 -u admin:admin --insecure
+ # curl -XGET https://localhost:9200/_cat/nodes?v -u admin:admin --insecure
+ # curl -XGET https://localhost:9200/_cat/plugins?v -u admin:admin --insecure
+ discovery.type: single-node
+ mem_limit: 512m
+ restart: always
+
db-mariadb10:
image: mariadb:10
hostname: db-mariadb10
@@ -41,7 +53,7 @@ services:
restart: always
ch-server:
- image: yandex/clickhouse-server
+ image: yandex/clickhouse-server:20.8
hostname: ch-server
volumes:
- ./ch-server/config:/etc/clickhouse-server/config.d
@@ -51,21 +63,21 @@ services:
restart: always
jdbc-bridge:
- image: yandex/clickhouse-jdbc-bridge
+ image: yandex/clickhouse-jdbc-bridge:2.0
hostname: jdbc-bridge
# In general you don't need to define any environment variable
# Below are all default settings just for demonstration
environment:
- CONFIG_DIR: config # configuration directory
- HTTPD_CONFIG_FILE: httpd.json # httpd configuration file
- SERVER_CONFIG_FILE: server.json # server configuration file
- VERTX_CONFIG_FILE: vertx.json # vertx configuration file
- DATASOURCE_CONFIG_DIR: datasources # named datasource directory
- DRIVER_DIR: drivers # driver directory
- EXTENSION_DIR: extensions # extension directory
- QUERY_CONFIG_DIR: queries # named query directory
- CUSTOM_DRIVER_LOADER: "true" # whether use custom driver loader or not
- JDBC_BRIDGE_JVM_OPTS: # use CPU and memory allocated by container
+ CONFIG_DIR: config # configuration directory
+ HTTPD_CONFIG_FILE: httpd.json # httpd configuration file
+ SERVER_CONFIG_FILE: server.json # server configuration file
+ VERTX_CONFIG_FILE: vertx.json # vertx configuration file
+ DATASOURCE_CONFIG_DIR: datasources # named datasource directory
+ DRIVER_DIR: drivers # driver directory
+ EXTENSION_DIR: extensions # extension directory
+ QUERY_CONFIG_DIR: queries # named query directory
+ CUSTOM_DRIVER_LOADER: "true" # whether use custom driver loader or not
+ JDBC_BRIDGE_JVM_OPTS: # use CPU and memory allocated by container
# You may want to keep datasources, queries, SQL scripts, and maybe drivers in a git repo
volumes:
diff --git a/misc/quick-start/jdbc-bridge/config/datasources/ch-server.json b/misc/quick-start/jdbc-bridge/config/datasources/ch-server.json
index 992f5f5..03e6a3e 100644
--- a/misc/quick-start/jdbc-bridge/config/datasources/ch-server.json
+++ b/misc/quick-start/jdbc-bridge/config/datasources/ch-server.json
@@ -1,4 +1,5 @@
{
+ "$schema": "../../../../../docker/config/datasource.jschema",
"ch-server": {
"aliases": ["self"],
"driverUrls": [
diff --git a/misc/quick-start/jdbc-bridge/config/datasources/elasticsearch.json b/misc/quick-start/jdbc-bridge/config/datasources/elasticsearch.json
new file mode 100644
index 0000000..0af16b7
--- /dev/null
+++ b/misc/quick-start/jdbc-bridge/config/datasources/elasticsearch.json
@@ -0,0 +1,17 @@
+{
+ "$schema": "../../../../../docker/config/datasource.jschema",
+ "elasticsearch": {
+ "driverUrls": [
+ "https://repo1.maven.org/maven2/com/amazon/opendistroforelasticsearch/client/opendistro-sql-jdbc/1.11.0.0/opendistro-sql-jdbc-1.11.0.0.jar"
+ ],
+ "readOnly": true,
+ "connectionTestQuery": "",
+ "driverClassName": "com.amazon.opendistroforelasticsearch.jdbc.Driver",
+ "jdbcUrl": "jdbc:elasticsearch://elasticsearch:9200?useSSL=true&trustSelfSigned=true&hostnameVerification=false",
+ "username": "admin",
+ "password": "admin",
+ "initializationFailTimeout": 0,
+ "minimumIdle": 0,
+ "maximumPoolSize": 10
+ }
+}
diff --git a/misc/quick-start/jdbc-bridge/config/datasources/mariadb10.json b/misc/quick-start/jdbc-bridge/config/datasources/mariadb10.json
index 4e420c7..fa73d28 100644
--- a/misc/quick-start/jdbc-bridge/config/datasources/mariadb10.json
+++ b/misc/quick-start/jdbc-bridge/config/datasources/mariadb10.json
@@ -1,4 +1,5 @@
{
+ "$schema": "../../../../../docker/config/datasource.jschema",
"mariadb10": {
"driverUrls": [
"https://repo1.maven.org/maven2/org/mariadb/jdbc/mariadb-java-client/2.7.0/mariadb-java-client-2.7.0.jar"
diff --git a/misc/quick-start/jdbc-bridge/config/datasources/mysql5.json b/misc/quick-start/jdbc-bridge/config/datasources/mysql5.json
index 64402c9..2cdb05b 100644
--- a/misc/quick-start/jdbc-bridge/config/datasources/mysql5.json
+++ b/misc/quick-start/jdbc-bridge/config/datasources/mysql5.json
@@ -1,4 +1,5 @@
{
+ "$schema": "../../../../../docker/config/datasource.jschema",
"mysql5": {
"driverUrls": [
"https://repo1.maven.org/maven2/mysql/mysql-connector-java/5.1.49/mysql-connector-java-5.1.49.jar"
diff --git a/misc/quick-start/jdbc-bridge/config/datasources/mysql8.json b/misc/quick-start/jdbc-bridge/config/datasources/mysql8.json
index 3984ec2..27fcec1 100644
--- a/misc/quick-start/jdbc-bridge/config/datasources/mysql8.json
+++ b/misc/quick-start/jdbc-bridge/config/datasources/mysql8.json
@@ -1,4 +1,5 @@
{
+ "$schema": "../../../../../docker/config/datasource.jschema",
"mysql8": {
"driverUrls": [
"https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.21/mysql-connector-java-8.0.21.jar"
diff --git a/misc/quick-start/jdbc-bridge/config/datasources/postgres13.json b/misc/quick-start/jdbc-bridge/config/datasources/postgres13.json
index 64eb6c6..df86e08 100644
--- a/misc/quick-start/jdbc-bridge/config/datasources/postgres13.json
+++ b/misc/quick-start/jdbc-bridge/config/datasources/postgres13.json
@@ -1,7 +1,11 @@
{
+ "$schema": "../../../../../docker/config/datasource.jschema",
"postgres13": {
+ "converter": {
+ "mappings": [{ "nativeType": "bool", "toType": "String" }]
+ },
"driverUrls": [
- "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.16/postgresql-42.2.16.jar"
+ "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.18/postgresql-42.2.18.jar"
],
"driverClassName": "org.postgresql.Driver",
"jdbcUrl": "jdbc:postgresql://db-postgres13/test",
diff --git a/misc/quick-start/jdbc-bridge/config/queries/show-query-logs.json b/misc/quick-start/jdbc-bridge/config/queries/show-query-logs.json
index e991a21..1081057 100644
--- a/misc/quick-start/jdbc-bridge/config/queries/show-query-logs.json
+++ b/misc/quick-start/jdbc-bridge/config/queries/show-query-logs.json
@@ -1,4 +1,5 @@
{
+ "$schema": "../../../../../docker/config/datasource.jschema",
"show-query-logs": {
"query": "scripts/show-query-logs.sql",
"columns": [
diff --git a/misc/quick-start/jdbc-bridge/config/schemas/query-log.json b/misc/quick-start/jdbc-bridge/config/schemas/query-log.json
new file mode 100644
index 0000000..205099b
--- /dev/null
+++ b/misc/quick-start/jdbc-bridge/config/schemas/query-log.json
@@ -0,0 +1,37 @@
+{
+ "$schema": "../../../../../docker/config/schema.jschema",
+ "query-log": {
+ "columns": [
+ {
+ "name": "query_id",
+ "type": "String",
+ "nullable": false
+ },
+ {
+ "name": "type",
+ "type": "String",
+ "nullable": false
+ },
+ {
+ "name": "event_time",
+ "type": "DateTime",
+ "nullable": false
+ },
+ {
+ "name": "query_start_time",
+ "type": "DateTime",
+ "nullable": false
+ },
+ {
+ "name": "query",
+ "type": "String",
+ "nullable": true
+ },
+ {
+ "name": "user",
+ "type": "String",
+ "nullable": true
+ }
+ ]
+ }
+}
diff --git a/pom.xml b/pom.xml
index 430331b..2aa2d57 100644
--- a/pom.xml
+++ b/pom.xml
@@ -13,10 +13,10 @@
2.0.0-SNAPSHOT
- 2.8.5
- 3.3.0
- 3.9.3
- 1.1.18
+ 2.8.6
+ 3.3.1
+ 3.9.5
+ 1.1.19
1.7.30
3.4.5
3.32.3.2
@@ -29,10 +29,10 @@
1.5.3
2.11
1.1.0
- 3.5.1
- 3.0.0
+ 3.8.1
+ 3.2.1
3.2.0
- 2.10.3
+ 3.2.0
2.22.2
3.2.4
ru.yandex.clickhouse.jdbcbridge.internal
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/JdbcBridgeVerticle.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/JdbcBridgeVerticle.java
index b83e4c1..9410d3c 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/JdbcBridgeVerticle.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/JdbcBridgeVerticle.java
@@ -53,21 +53,21 @@
import ru.yandex.clickhouse.jdbcbridge.core.ByteBuffer;
import ru.yandex.clickhouse.jdbcbridge.core.ColumnDefinition;
import ru.yandex.clickhouse.jdbcbridge.core.TableDefinition;
-import ru.yandex.clickhouse.jdbcbridge.core.DataSourceManager;
import ru.yandex.clickhouse.jdbcbridge.core.DataType;
import ru.yandex.clickhouse.jdbcbridge.core.Extension;
import ru.yandex.clickhouse.jdbcbridge.core.ExtensionManager;
import ru.yandex.clickhouse.jdbcbridge.core.NamedDataSource;
import ru.yandex.clickhouse.jdbcbridge.core.NamedQuery;
import ru.yandex.clickhouse.jdbcbridge.core.NamedSchema;
-import ru.yandex.clickhouse.jdbcbridge.core.QueryManager;
import ru.yandex.clickhouse.jdbcbridge.core.QueryParameters;
import ru.yandex.clickhouse.jdbcbridge.core.QueryParser;
+import ru.yandex.clickhouse.jdbcbridge.core.Repository;
+import ru.yandex.clickhouse.jdbcbridge.core.RepositoryManager;
import ru.yandex.clickhouse.jdbcbridge.core.ResponseWriter;
-import ru.yandex.clickhouse.jdbcbridge.core.SchemaManager;
import ru.yandex.clickhouse.jdbcbridge.core.Utils;
import ru.yandex.clickhouse.jdbcbridge.impl.ConfigDataSource;
import ru.yandex.clickhouse.jdbcbridge.impl.JdbcDataSource;
+import ru.yandex.clickhouse.jdbcbridge.impl.JsonFileRepository;
import ru.yandex.clickhouse.jdbcbridge.impl.ScriptDataSource;
import static ru.yandex.clickhouse.jdbcbridge.core.DataType.*;
@@ -80,7 +80,7 @@
public class JdbcBridgeVerticle extends AbstractVerticle implements ExtensionManager {
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(JdbcBridgeVerticle.class);
- private static long startTime;
+ private static volatile long startTime;
private static final String CONFIG_PATH = Utils.getConfiguration("config", "CONFIG_DIR", "jdbc-bridge.config.dir");
@@ -93,15 +93,64 @@ public class JdbcBridgeVerticle extends AbstractVerticle implements ExtensionMan
private final List> extensions;
- private final DataSourceManager datasources;
- private final QueryManager queries;
- private final SchemaManager schemas;
+ private final RepositoryManager repos;
- private DataSourceManager customDataSourceManager;
- private QueryManager customQueryManager;
- private SchemaManager customSchemaManager;
private long scanInterval = 5000L;
+ List> loadRepositories(JsonObject serverConfig) {
+ List> repos = new ArrayList<>();
+
+ Extension defaultExt = new Extension<>(JsonFileRepository.class);
+ JsonArray declaredRepos = serverConfig == null ? null : serverConfig.getJsonArray("repositories");
+ if (declaredRepos == null) {
+ // let's go with default extensions
+ declaredRepos = new JsonArray();
+
+ declaredRepos.add(NamedDataSource.class.getName());
+ declaredRepos.add(NamedSchema.class.getName());
+ declaredRepos.add(NamedQuery.class.getName());
+ }
+
+ for (Object item : declaredRepos) {
+ Repository> repo = null;
+
+ if (item instanceof String) {
+ repo = defaultExt.newInstance(this, defaultExt.loadClass(String.valueOf(item)));
+ } else {
+ JsonObject o = (JsonObject) item;
+
+ String entityClassName = o.getString("entity");
+ if (entityClassName == null || entityClassName.isEmpty()) {
+ continue;
+ }
+
+ String repoClassName = o.getString("repository");
+
+ ArrayList urls = null;
+ JsonArray libUrls = o.getJsonArray("libUrls");
+ if (repoClassName != null && !repoClassName.isEmpty() && libUrls != null) {
+ urls = new ArrayList<>(libUrls.size());
+ for (Object u : libUrls) {
+ if (u instanceof String) {
+ urls.add((String) u);
+ }
+ }
+ }
+
+ Extension> ext = Utils.loadExtension(urls, repoClassName);
+ if (ext != null) {
+ repo = (Repository>) ext.newInstance(this, ext.loadClass(entityClassName));
+ }
+ }
+
+ if (repo != null) {
+ repos.add(repo);
+ }
+ }
+
+ return repos;
+ }
+
List> loadExtensions(JsonObject serverConfig) {
List> exts = new ArrayList<>();
@@ -154,10 +203,7 @@ public JdbcBridgeVerticle() {
this.extensions = new ArrayList<>();
- this.datasources = Utils.loadService(DataSourceManager.class);
- this.schemas = Utils.loadService(SchemaManager.class);
- // Query has dependency of schema
- this.queries = Utils.loadService(QueryManager.class);
+ this.repos = Utils.loadService(RepositoryManager.class);
}
@Override
@@ -169,14 +215,11 @@ public void start() {
this.scanInterval = config.getLong("configScanPeriod", 5000L);
- // initialize default implementations
- for (Class> clazz : new Class>[] { this.datasources.getClass(), this.schemas.getClass(),
- this.queries.getClass() }) {
- new Extension(clazz).initialize(this);
- }
-
+ this.repos.update(this.loadRepositories(config));
+ // extension must be loaded *after* repository is initialized
this.extensions.addAll(this.loadExtensions(config));
+ // initialize extensions so that they're fully ready for use
for (Extension> ext : this.extensions) {
ext.initialize(this);
}
@@ -188,6 +231,11 @@ public void start() {
}
private void startServer(JsonObject bridgeServerConfig, JsonObject httpServerConfig) {
+ if (httpServerConfig.isEmpty()) {
+ // make sure we can pass long query/script by default
+ httpServerConfig.put("maxInitialLineLength", 2147483647L);
+ }
+
HttpServer server = vertx.createHttpServer(new HttpServerOptions(httpServerConfig));
// vertx.createHttpServer(new
// HttpServerOptions().setTcpNoDelay(false).setTcpKeepAlive(true)
@@ -232,23 +280,34 @@ private void responseHandlers(RoutingContext ctx) {
HttpServerRequest req = ctx.request();
String path = ctx.normalisedPath();
- log.debug("[{}] Context:\n{}", path, ctx.data());
- log.debug("[{}] Headers:\n{}", path, req.headers());
- log.debug("[{}] Parameters:\n{}", path, req.params());
- log.trace("[{}] Body:\n{}", path, ctx.getBodyAsString());
+ if (log.isDebugEnabled()) {
+ log.debug("[{}] Context:\n{}", path, ctx.data());
+ log.debug("[{}] Headers:\n{}", path, req.headers());
+ log.debug("[{}] Parameters:\n{}", path, req.params());
+ }
+
+ if (log.isTraceEnabled()) {
+ log.trace("[{}] Body:\n{}", path, ctx.getBodyAsString());
+ }
HttpServerResponse resp = ctx.response();
resp.endHandler(handler -> {
- log.trace("[{}] About to end response...", ctx.normalisedPath());
+ if (log.isTraceEnabled()) {
+ log.trace("[{}] About to end response...", ctx.normalisedPath());
+ }
});
resp.closeHandler(handler -> {
- log.trace("[{}] About to close response...", ctx.normalisedPath());
+ if (log.isTraceEnabled()) {
+ log.trace("[{}] About to close response...", ctx.normalisedPath());
+ }
});
resp.drainHandler(handler -> {
- log.trace("[{}] About to drain response...", ctx.normalisedPath());
+ if (log.isTraceEnabled()) {
+ log.trace("[{}] About to drain response...", ctx.normalisedPath());
+ }
});
resp.exceptionHandler(throwable -> {
@@ -267,8 +326,18 @@ private void handlePing(RoutingContext ctx) {
ctx.response().end(PING_RESPONSE);
}
+ private NamedDataSource getDataSource(String uri, boolean orCreate) {
+ return getDataSource(getDataSourceRepository(), uri, orCreate);
+ }
+
+ private NamedDataSource getDataSource(Repository repo, String uri, boolean orCreate) {
+ NamedDataSource ds = repo.get(uri);
+
+ return ds == null && orCreate ? new NamedDataSource(uri, null, null) : ds;
+ }
+
private void handleColumnsInfo(RoutingContext ctx) {
- final QueryParser parser = QueryParser.fromRequest(ctx, getDataSourceManager());
+ final QueryParser parser = QueryParser.fromRequest(ctx, getDataSourceRepository());
String rawQuery = parser.getRawQuery();
@@ -277,7 +346,7 @@ private void handleColumnsInfo(RoutingContext ctx) {
String uri = parser.getConnectionString();
QueryParameters params = parser.getQueryParameters();
- NamedDataSource ds = getDataSourceManager().get(uri, params.isDebug());
+ NamedDataSource ds = getDataSource(uri, params.isDebug());
String dsId = uri;
if (ds != null) {
dsId = ds.getId();
@@ -285,12 +354,21 @@ private void handleColumnsInfo(RoutingContext ctx) {
}
// even it's a named query, the column list could be empty
- NamedQuery namedQuery = getQueryManager().get(rawQuery);
+ NamedQuery namedQuery = getQueryRepository().get(rawQuery);
// priority: name query -> named schema -> type inferring
- NamedSchema namedSchema = getSchemaManager().get(parser.getNormalizedSchema());
- TableDefinition tableDef = namedQuery != null && namedQuery.hasColumn() ? namedQuery.getColumns(params)
- : (namedSchema != null ? namedSchema.getColumns()
- : ds.getResultColumns(parser.getSchema(), parser.getNormalizedQuery(), params));
+ NamedSchema namedSchema = getSchemaRepository().get(parser.getNormalizedSchema());
+
+ TableDefinition tableDef;
+ if (namedQuery != null) {
+ if (namedSchema == null) {
+ namedSchema = getSchemaRepository().get(namedQuery.getSchema());
+ }
+
+ tableDef = namedQuery.getColumns(params);
+ } else {
+ tableDef = namedSchema != null ? namedSchema.getColumns()
+ : ds.getResultColumns(parser.getSchema(), parser.getNormalizedQuery(), params);
+ }
List additionalColumns = new ArrayList();
if (params.showDatasourceColumn()) {
@@ -308,7 +386,9 @@ private void handleColumnsInfo(RoutingContext ctx) {
String columnsInfo = tableDef.toString();
- log.debug("Columns info:\n[{}]", columnsInfo);
+ if (log.isDebugEnabled()) {
+ log.debug("Columns info:\n[{}]", columnsInfo);
+ }
ctx.response().end(ByteBuffer.asBuffer(columnsInfo));
}
@@ -317,29 +397,33 @@ private void handleIdentifierQuote(RoutingContext ctx) {
}
private void handleQuery(RoutingContext ctx) {
- final DataSourceManager manager = getDataSourceManager();
+ final Repository manager = getDataSourceRepository();
final QueryParser parser = QueryParser.fromRequest(ctx, manager);
ctx.response().setChunked(true);
vertx.executeBlocking(promise -> {
- log.trace("About to execute query...");
+ if (log.isTraceEnabled()) {
+ log.trace("About to execute query...");
+ }
QueryParameters params = parser.getQueryParameters();
- NamedDataSource ds = manager.get(parser.getConnectionString(), params.isDebug());
+ NamedDataSource ds = getDataSource(manager, parser.getConnectionString(), params.isDebug());
params = ds.newQueryParameters(params);
String schema = parser.getSchema();
- NamedSchema namedSchema = getSchemaManager().get(parser.getNormalizedSchema());
+ NamedSchema namedSchema = getSchemaRepository().get(parser.getNormalizedSchema());
String generatedQuery = parser.getRawQuery();
String normalizedQuery = parser.getNormalizedQuery();
// try if it's a named query first
- NamedQuery namedQuery = getQueryManager().get(normalizedQuery);
+ NamedQuery namedQuery = getQueryRepository().get(normalizedQuery);
// in case the "query" is a local file...
normalizedQuery = ds.loadSavedQueryAsNeeded(normalizedQuery, params);
- log.debug("Generated query:\n{}\nNormalized query:\n{}", generatedQuery, normalizedQuery);
+ if (log.isDebugEnabled()) {
+ log.debug("Generated query:\n{}\nNormalized query:\n{}", generatedQuery, normalizedQuery);
+ }
final HttpServerResponse resp = ctx.response();
@@ -348,8 +432,13 @@ private void handleQuery(RoutingContext ctx) {
long executionStartTime = System.currentTimeMillis();
if (namedQuery != null) {
- log.debug("Found named query: [{}]", namedQuery);
+ if (log.isDebugEnabled()) {
+ log.debug("Found named query: [{}]", namedQuery);
+ }
+ if (namedSchema == null) {
+ namedSchema = getSchemaRepository().get(namedQuery.getSchema());
+ }
// columns in request might just be a subset of defined list
// for example:
// - named query 'test' is: select a, b, c from table
@@ -394,12 +483,16 @@ private void handleQuery(RoutingContext ctx) {
params, writer);
}
- log.debug("Completed execution in {} ms.", System.currentTimeMillis() - executionStartTime);
+ if (log.isDebugEnabled()) {
+ log.debug("Completed execution in {} ms.", System.currentTimeMillis() - executionStartTime);
+ }
promise.complete();
}, false, res -> {
if (res.succeeded()) {
- log.debug("Wrote back query result");
+ if (log.isDebugEnabled()) {
+ log.debug("Wrote back query result");
+ }
ctx.response().end();
} else {
ctx.fail(res.cause());
@@ -409,16 +502,18 @@ private void handleQuery(RoutingContext ctx) {
// https://github.com/ClickHouse/ClickHouse/blob/bee5849c6a7dba20dbd24dfc5fd5a786745d90ff/programs/odbc-bridge/MainHandler.cpp#L169
private void handleWrite(RoutingContext ctx) {
- final DataSourceManager manager = getDataSourceManager();
+ final Repository manager = getDataSourceRepository();
final QueryParser parser = QueryParser.fromRequest(ctx, manager, true);
ctx.response().setChunked(true);
vertx.executeBlocking(promise -> {
- log.trace("About to execute mutation...");
+ if (log.isTraceEnabled()) {
+ log.trace("About to execute mutation...");
+ }
QueryParameters params = parser.getQueryParameters();
- NamedDataSource ds = manager.get(parser.getConnectionString(), params.isDebug());
+ NamedDataSource ds = getDataSource(manager, parser.getConnectionString(), params.isDebug());
params = ds == null ? params : ds.newQueryParameters(params);
// final HttpServerRequest req = ctx.request();
@@ -427,13 +522,17 @@ private void handleWrite(RoutingContext ctx) {
final String generatedQuery = parser.getRawQuery();
String normalizedQuery = parser.getNormalizedQuery();
- log.debug("Generated query:\n{}\nNormalized query:\n{}", generatedQuery, normalizedQuery);
+ if (log.isDebugEnabled()) {
+ log.debug("Generated query:\n{}\nNormalized query:\n{}", generatedQuery, normalizedQuery);
+ }
// try if it's a named query first
- NamedQuery namedQuery = getQueryManager().get(normalizedQuery);
+ NamedQuery namedQuery = getQueryRepository().get(normalizedQuery);
// in case the "query" is a local file...
normalizedQuery = ds.loadSavedQueryAsNeeded(normalizedQuery, params);
+ // TODO: use named schema as table name?
+
String table = parser.getRawQuery();
if (namedQuery != null) {
table = parser.extractTable(ds.loadSavedQueryAsNeeded(namedQuery.getQuery(), params));
@@ -448,7 +547,9 @@ private void handleWrite(RoutingContext ctx) {
promise.complete();
}, false, res -> {
if (res.succeeded()) {
- log.debug("Wrote back query result");
+ if (log.isDebugEnabled()) {
+ log.debug("Wrote back query result");
+ }
ctx.response().end();
} else {
ctx.fail(res.cause());
@@ -456,27 +557,16 @@ private void handleWrite(RoutingContext ctx) {
});
}
- public static void main(String[] args) {
- startTime = System.currentTimeMillis();
-
- MeterRegistry registry = Utils.getDefaultMetricRegistry();
- new ClassLoaderMetrics().bindTo(registry);
- new JvmGcMetrics().bindTo(registry);
- new JvmMemoryMetrics().bindTo(registry);
- new JvmThreadMetrics().bindTo(registry);
- new ProcessorMetrics().bindTo(registry);
- new UptimeMetrics().bindTo(registry);
+ private Repository getDataSourceRepository() {
+ return getRepositoryManager().getRepository(NamedDataSource.class);
+ }
- // https://github.com/eclipse-vertx/vert.x/blob/master/src/main/generated/io/vertx/core/VertxOptionsConverter.java
- Vertx vertx = Vertx.vertx(new VertxOptions(Utils.loadJsonFromFile(Paths
- .get(CONFIG_PATH,
- Utils.getConfiguration("vertx.json", "VERTX_CONFIG_FILE", "jdbc-bridge.vertx.config.file"))
- .toString()))
- .setMetricsOptions(new MicrometerMetricsOptions()
- .setPrometheusOptions(new VertxPrometheusOptions().setEnabled(true))
- .setMicrometerRegistry(registry).setEnabled(true)));
+ private Repository getSchemaRepository() {
+ return getRepositoryManager().getRepository(NamedSchema.class);
+ }
- vertx.deployVerticle(new JdbcBridgeVerticle());
+ private Repository getQueryRepository() {
+ return getRepositoryManager().getRepository(NamedQuery.class);
}
@Override
@@ -495,40 +585,15 @@ public Extension getExtension(Class extends T> clazz) {
}
@Override
- public DataSourceManager getDataSourceManager() {
- return this.customDataSourceManager == null ? datasources : this.customDataSourceManager;
- }
-
- @Override
- public void setDataSourceManager(DataSourceManager manager) {
- this.customDataSourceManager = manager;
- }
-
- @Override
- public QueryManager getQueryManager() {
- return this.customQueryManager == null ? queries : this.customQueryManager;
- }
-
- @Override
- public void setQueryManager(QueryManager manager) {
- this.customQueryManager = manager;
- }
-
- @Override
- public SchemaManager getSchemaManager() {
- return this.customSchemaManager == null ? schemas : this.customSchemaManager;
- }
-
- @Override
- public void setSchemaManager(SchemaManager manager) {
- this.customSchemaManager = manager;
+ public RepositoryManager getRepositoryManager() {
+ return this.repos;
}
@Override
public void registerConfigLoader(String configPath, Consumer consumer) {
final String path = Paths.get(CONFIG_PATH, configPath).toString();
- log.info("Registering consumer to monitor configuration file(s) at [{}]", path);
+ log.info("Start to monitor configuration file(s) at [{}]", path);
ConfigRetriever retriever = ConfigRetriever.create(vertx,
new ConfigRetrieverOptions().setScanPeriod(this.scanInterval)
@@ -569,10 +634,33 @@ public Map getScriptableObjects() {
// TODO and some utilities?
vars.put("__vertx", vertx);
- vars.put("__datasources", getDataSourceManager());
- vars.put("__schemas", getSchemaManager());
- vars.put("__queries", getQueryManager());
+ vars.put("__datasources", getDataSourceRepository());
+ vars.put("__schemas", getSchemaRepository());
+ vars.put("__queries", getQueryRepository());
return vars;
}
+
+ public static void main(String[] args) {
+ startTime = System.currentTimeMillis();
+
+ MeterRegistry registry = Utils.getDefaultMetricRegistry();
+ new ClassLoaderMetrics().bindTo(registry);
+ new JvmGcMetrics().bindTo(registry);
+ new JvmMemoryMetrics().bindTo(registry);
+ new JvmThreadMetrics().bindTo(registry);
+ new ProcessorMetrics().bindTo(registry);
+ new UptimeMetrics().bindTo(registry);
+
+ // https://github.com/eclipse-vertx/vert.x/blob/master/src/main/generated/io/vertx/core/VertxOptionsConverter.java
+ Vertx vertx = Vertx.vertx(new VertxOptions(Utils.loadJsonFromFile(Paths
+ .get(CONFIG_PATH,
+ Utils.getConfiguration("vertx.json", "VERTX_CONFIG_FILE", "jdbc-bridge.vertx.config.file"))
+ .toString()))
+ .setMetricsOptions(new MicrometerMetricsOptions()
+ .setPrometheusOptions(new VertxPrometheusOptions().setEnabled(true))
+ .setMicrometerRegistry(registry).setEnabled(true)));
+
+ vertx.deployVerticle(new JdbcBridgeVerticle());
+ }
}
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/BaseRepository.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/BaseRepository.java
new file mode 100644
index 0000000..f93c568
--- /dev/null
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/BaseRepository.java
@@ -0,0 +1,340 @@
+/**
+ * Copyright 2019-2020, Zhichun Wu
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package ru.yandex.clickhouse.jdbcbridge.core;
+
+import java.io.Closeable;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.Map.Entry;
+
+import io.vertx.core.json.JsonObject;
+
+import static ru.yandex.clickhouse.jdbcbridge.core.Utils.EMPTY_STRING;
+
+/**
+ * Base class for implementing a repository managing entities by ID and type.
+ *
+ * @since 2.0
+ */
+public abstract class BaseRepository implements Repository {
+ private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(BaseRepository.class);
+
+ protected final Map> types = Collections.synchronizedMap(new LinkedHashMap<>());
+ protected final Map mappings = Collections.synchronizedMap(new HashMap<>());
+
+ private final Class clazz;
+ private final String name;
+ private final DnsResolver resolver;
+ private final List stats;
+
+ private String defaultType = null;
+
+ protected final Extension defaultExtension;
+
+ protected String getEntityName() {
+ return this.name;
+ }
+
+ protected Extension getExtensionByType(String type, boolean autoCreate) {
+ Extension extension = types.size() > 0 ? types.get(type) : null;
+
+ if (extension == null) {
+ if (autoCreate) {
+ extension = defaultExtension;
+ } else {
+ throw new IllegalArgumentException("Unsupported type of " + getEntityName() + ": " + type);
+ }
+ }
+
+ return extension;
+ }
+
+ /**
+ * Create entity of given type.
+ *
+ * @param id id of the entity
+ * @param type type of the entity
+ * @return non-null entity
+ */
+ protected T createFromType(String id, String type) {
+ Extension extension = getExtensionByType(type, false);
+ return extension == null ? null : extension.newInstance(id, this, null);
+ }
+
+ /**
+ * Create entity based on {@code type} defined given configuration.
+ *
+ * @param id id of the entity
+ * @param config configuration in JSON format
+ * @return non-null entity
+ */
+ protected T createFromConfig(String id, JsonObject config) {
+ String type = config == null ? null : config.getString(ManagedEntity.CONF_TYPE);
+ if (type == null) {
+ type = defaultType;
+ }
+ Extension extension = getExtensionByType(type, true);
+
+ return extension.newInstance(id, this, config);
+ }
+
+ /**
+ * Atomic add operation. For example, save the entity into a database table.
+ *
+ * @param entity entity
+ */
+ protected void atomicAdd(T entity) {
+ }
+
+ /**
+ * Atomic remove operation. For example, delete the entity from database.
+ *
+ * @param entity entity
+ */
+ protected void atomicRemove(T entity) {
+ }
+
+ /**
+ * Remove an entity and all its aliases(when {@code id} is not an alias) based
+ * on given id.
+ *
+ * @param id id of the entity, could be an alias
+ */
+ protected void remove(String id) {
+ // empty id is reserved for ConfigDataSource
+ T entity = EMPTY_STRING.equals(id) ? null : mappings.remove(id);
+
+ if (entity == null) {
+ return;
+ }
+
+ final List removedEntities;
+ if (id != null && id.equals(entity.getId())) {
+ log.info("Removing {}(id={}) and all its aliases...", getEntityName(), id);
+
+ Set aliases = entity.getAliases();
+ removedEntities = new ArrayList<>(aliases.size());
+ for (String alias : entity.getAliases()) {
+ T ref = mappings.get(alias);
+ // we don't want to remove an entity when its id is same as an
+ // alias of another entity
+ if (ref != null && !alias.equals(ref.getId())) {
+ log.info("Removing alias [{}] of {}(id={})...", alias, getEntityName(), ref.getId());
+ T removedEntity = mappings.remove(alias);
+ if (removedEntity != entity) {
+ removedEntities.add(removedEntity);
+ }
+ }
+ }
+
+ if (entity instanceof Closeable) {
+ if (log.isDebugEnabled()) {
+ log.debug("Closing {}(id={})...", getEntityName(), id);
+ }
+ // TODO async close in case it's too slow?
+ try {
+ ((Closeable) entity).close();
+ } catch (Exception e) {
+ }
+ }
+
+ atomicRemove(entity);
+ } else { // just an alias
+ log.info("Removing alias [{}] of {}(id={})...", id, getEntityName(), entity.getId());
+ removedEntities = Collections.singletonList(entity);
+ }
+
+ // close remove entries as needed
+ for (T e : removedEntities) {
+ if (!(e instanceof Closeable)) {
+ continue;
+ }
+
+ boolean matched = false;
+ for (T v : mappings.values()) {
+ if (e == v) {
+ matched = true;
+ break;
+ }
+ }
+
+ if (matched) {
+ if (log.isDebugEnabled()) {
+ log.debug("Closing {}(id={})...", getEntityName(), e.getId());
+ }
+ try {
+ ((Closeable) e).close();
+ } catch (Exception exp) {
+ }
+ }
+ }
+ }
+
+ protected void update(String id, JsonObject config) {
+ T entity = mappings.get(id);
+
+ boolean addEntity = entity == null;
+
+ if (!addEntity) {
+ if (entity.isDifferentFrom(config)) {
+ remove(id);
+ addEntity = true;
+ }
+ }
+
+ if (addEntity && config != null) {
+ log.info("Adding {}(id={})...", getEntityName(), id);
+
+ try {
+ entity = createFromConfig(id, config);
+ mappings.put(id, entity);
+
+ for (String alias : entity.getAliases()) {
+ if (mappings.containsKey(alias)) {
+ log.warn("Not able to add {} alias [{}] as it exists already", getEntityName(), alias);
+ } else {
+ mappings.put(alias, entity);
+ }
+ }
+
+ atomicAdd(entity);
+ } catch (RuntimeException e) {
+ log.warn("Failed to add " + getEntityName() + "(id=" + id + ")", e);
+ }
+ }
+ }
+
+ public BaseRepository(Class clazz) {
+ this.clazz = Objects.requireNonNull(clazz);
+ this.name = clazz.getSimpleName();
+ this.resolver = new DnsResolver();
+ this.stats = new ArrayList<>();
+
+ this.defaultExtension = new Extension<>(clazz);
+ }
+
+ @Override
+ public Class getEntityClass() {
+ return this.clazz;
+ }
+
+ @Override
+ public boolean accept(Class> clazz) {
+ return clazz != null && clazz.isAssignableFrom(this.clazz);
+ }
+
+ @Override
+ public String resolve(String name) {
+ return Utils.applyVariables(name, resolver::apply);
+ }
+
+ @Override
+ public List getUsageStats() {
+ List list = new ArrayList<>(this.mappings.size());
+ for (Entry entry : mappings.entrySet()) {
+ UsageStats usage = entry.getValue().getUsage(entry.getKey());
+ if (usage != null) {
+ list.add(usage);
+ }
+ }
+
+ return Collections.unmodifiableList(list);
+ }
+
+ @Override
+ public void registerType(String type, Extension extension) {
+ String className = Objects.requireNonNull(extension).getProviderClass().getName();
+ type = type == null || type.isEmpty() ? extension.getName() : type;
+
+ Extension registered = this.types.put(type, extension);
+
+ if (registered != null) {
+ log.warn("Discard [{}] as type [{}] is reserved by [{}]", className, type, registered.getClass().getName());
+ return;
+ }
+
+ log.info("Registering new type of {}: [{}] -> [{}]", getEntityName(), type, className);
+
+ if (types.size() == 1) {
+ log.info("Default type of {} is set to [{}]", getEntityName(), defaultType = type);
+ }
+ }
+
+ @Override
+ public void put(String id, T entity) {
+ Objects.requireNonNull(entity);
+
+ if (id == null) {
+ id = entity.getId();
+ }
+
+ this.remove(id);
+ this.mappings.put(id, entity);
+ atomicAdd(entity);
+
+ // now update aliases...
+ for (String alias : entity.getAliases()) {
+ if (alias == null || alias.isEmpty()) {
+ continue;
+ }
+
+ T e = this.mappings.get(alias);
+ if (e != null && alias.equals(e.getId())) {
+ log.warn("Not going to add alias [{}] as it's an ID reserved by another {}", alias, getEntityName());
+ continue;
+ } else {
+ this.remove(alias);
+ }
+
+ this.mappings.put(alias, entity);
+ }
+ }
+
+ @Override
+ public T get(final String id) {
+ T entity = null;
+
+ String normalizedId = id == null ? EMPTY_STRING : id;
+ // [:][?], for example: "my-db" and "jdbc:my-db"
+ // connection string like "jdbc:clickhouse:..."
+ if (this.types.size() > 0) { // multi-type repository
+ // check if any type is declared first
+ int index = normalizedId.indexOf(':');
+ if (index >= 0) {
+ return createFromType(normalizedId, normalizedId.substring(0, index));
+ } else {
+ if ((index = normalizedId.indexOf('?')) >= 0) {
+ normalizedId = normalizedId.substring(0, index);
+ }
+
+ if ((entity = this.mappings.get(normalizedId)) == null) {
+ throw new IllegalArgumentException(getEntityName() + " [" + normalizedId + "] does not exist!");
+ }
+ }
+ } else {
+ entity = this.mappings.get(id);
+ }
+
+ return entity;
+ }
+}
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ByteBuffer.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ByteBuffer.java
index 8edcc26..d09ac5e 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ByteBuffer.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ByteBuffer.java
@@ -20,7 +20,6 @@
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.sql.Timestamp;
-import java.util.Calendar;
import java.util.Date;
import java.util.Objects;
import java.util.TimeZone;
@@ -47,7 +46,7 @@ public static ByteBuffer wrap(Buffer buffer, TimeZone timezone) {
}
public static ByteBuffer wrap(Buffer buffer) {
- return wrap(buffer, null);
+ return wrap(buffer, TimeZone.getDefault());
}
public static ByteBuffer newInstance(int initialSizeHint, TimeZone timezone) {
@@ -55,7 +54,7 @@ public static ByteBuffer newInstance(int initialSizeHint, TimeZone timezone) {
}
public static ByteBuffer newInstance(int initialSizeHint) {
- return newInstance(initialSizeHint, null);
+ return newInstance(initialSizeHint, TimeZone.getDefault());
}
public static Buffer asBuffer(String str) {
@@ -64,7 +63,7 @@ public static Buffer asBuffer(String str) {
private ByteBuffer(Buffer buffer, TimeZone timezone) {
this.buffer = buffer != null ? buffer : Buffer.buffer();
- this.timezone = timezone;
+ this.timezone = timezone == null ? TimeZone.getDefault() : timezone;
}
public int length() {
@@ -318,12 +317,15 @@ public BigInteger readInt128() {
}
public ByteBuffer writeInt128(BigInteger value) {
+ byte empty = value.signum() == -1 ? (byte) 0xFF : 0x00;
byte[] bytes = value.toByteArray();
for (int i = bytes.length - 1; i >= 0; i--) {
writeByte(bytes[i]);
}
- writeBytes(new byte[16 - bytes.length]);
+ for (int i = 16 - bytes.length; i > 0; i--) {
+ writeByte(empty);
+ }
return this;
}
@@ -338,12 +340,15 @@ public BigInteger readInt256() {
}
public ByteBuffer writeInt256(BigInteger value) {
+ byte empty = value.signum() == -1 ? (byte) 0xFF : 0x00;
byte[] bytes = value.toByteArray();
for (int i = bytes.length - 1; i >= 0; i--) {
writeByte(bytes[i]);
}
- writeBytes(new byte[32 - bytes.length]);
+ for (int i = 32 - bytes.length; i > 0; i--) {
+ writeByte(empty);
+ }
return this;
}
@@ -420,12 +425,15 @@ private BigInteger toBigInteger(BigDecimal value, int scale) {
}
public BigDecimal readDecimal(int precision, int scale) {
- return precision > 18 ? readDecimal128(scale) : (precision > 9 ? readDecimal64(scale) : readDecimal32(scale));
+ return precision > 38 ? readDecimal256(scale)
+ : (precision > 18 ? readDecimal128(scale)
+ : (precision > 9 ? readDecimal64(scale) : readDecimal32(scale)));
}
public ByteBuffer writeDecimal(BigDecimal value, int precision, int scale) {
- return precision > 18 ? writeDecimal128(value, scale)
- : (precision > 9 ? writeDecimal64(value, scale) : writeDecimal32(value, scale));
+ return precision > 38 ? writeDecimal256(value, scale)
+ : (precision > 18 ? writeDecimal128(value, scale)
+ : (precision > 9 ? writeDecimal64(value, scale) : writeDecimal32(value, scale)));
}
public BigDecimal readDecimal32(int scale) {
@@ -454,15 +462,7 @@ public BigDecimal readDecimal128(int scale) {
}
public ByteBuffer writeDecimal128(BigDecimal value, int scale) {
- byte[] bytes = toBigInteger(value, scale).toByteArray();
-
- for (int i = bytes.length - 1; i >= 0; i--) {
- writeByte(bytes[i]);
- }
-
- writeBytes(new byte[16 - bytes.length]);
-
- return this;
+ return writeInt128(toBigInteger(value, scale));
}
public BigDecimal readDecimal256(int scale) {
@@ -475,15 +475,7 @@ public BigDecimal readDecimal256(int scale) {
}
public ByteBuffer writeDecimal256(BigDecimal value, int scale) {
- byte[] bytes = toBigInteger(value, scale).toByteArray();
-
- for (int i = bytes.length - 1; i >= 0; i--) {
- writeByte(bytes[i]);
- }
-
- writeBytes(new byte[32 - bytes.length]);
-
- return this;
+ return writeInt256(toBigInteger(value, scale));
}
public Timestamp readDateTime() {
@@ -556,23 +548,44 @@ public Timestamp readDateTime64(TimeZone tz) {
return new Timestamp(time.longValue());
}
- public ByteBuffer writeDateTime64(Date value) {
- return writeDateTime64(value, null);
+ public ByteBuffer writeDateTime64(Date value, int scale) {
+ return writeDateTime64(value, scale, null);
+ }
+
+ public ByteBuffer writeDateTime64(Timestamp value, int scale) {
+ return writeDateTime64(value, scale, null);
+ }
+
+ public ByteBuffer writeDateTime64(Date value, int scale, TimeZone tz) {
+ return writeDateTime64(Objects.requireNonNull(value).getTime(), 0, scale, tz);
}
- public ByteBuffer writeDateTime64(Date value, TimeZone tz) {
- return writeDateTime64(Objects.requireNonNull(value).getTime(), tz);
+ public ByteBuffer writeDateTime64(Timestamp value, int scale, TimeZone tz) {
+ return writeDateTime64(Objects.requireNonNull(value).getTime(), value.getNanos(), scale, tz);
}
// ClickHouse's DateTime64 supports precision from 0 to 18, but JDBC only
// supports 3(millisecond)
- public ByteBuffer writeDateTime64(long time, TimeZone tz) {
+ public ByteBuffer writeDateTime64(long time, int nanos, int scale, TimeZone tz) {
if ((tz = tz == null ? this.timezone : tz) != null) {
time += tz.getOffset(time);
}
if (time <= 0L) { // 0000-00-00 00:00:00.000
- time = 1L;
+ time = nanos > 0 ? nanos / 1000000 : 1L;
+ }
+
+ if (scale > 0) {
+ double normalizedTime = time;
+ if (nanos != 0) {
+ normalizedTime = time - nanos / 1000000 + nanos / 1000000.0;
+ }
+
+ if (scale < 3) {
+ time = BigDecimal.valueOf(normalizedTime).divide(BigDecimal.valueOf(10).pow(3 - scale)).longValue();
+ } else if (scale > 3) {
+ time = BigDecimal.valueOf(normalizedTime).multiply(BigDecimal.valueOf(10).pow(scale - 3)).longValue();
+ }
}
return this.writeUInt64(time);
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ColumnDefinition.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ColumnDefinition.java
index bf9e02d..a1220d0 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ColumnDefinition.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ColumnDefinition.java
@@ -458,11 +458,17 @@ public ColumnDefinition(String name, DataType type, boolean nullable, int length
int recommendedScale = DEFAULT_SCALE;
switch (type) {
+ case DateTime64:
+ recommendedPrecision = precision < 0 ? DEFAULT_DATETIME64_PRECISION : precision;
+ recommendedScale = scale < 0 ? DEFAULT_DATETIME64_SCALE
+ : (scale > MAX_DATETIME64_SCALE ? MAX_DATETIME64_SCALE : scale);
+ break;
case Decimal:
recommendedPrecision = DEFAULT_DECIMAL_PRECISON;
recommendedPrecision = precision <= 0 ? recommendedPrecision
: (precision > MAX_PRECISON ? MAX_PRECISON : precision);
- recommendedScale = DEFAULT_DECIMAL_SCALE;
+ recommendedScale = scale < 0 ? DEFAULT_DECIMAL_SCALE
+ : (scale > recommendedPrecision ? recommendedPrecision : scale);
break;
case Decimal32:
recommendedPrecision = DEFAULT_DECIMAL32_PRECISON;
@@ -487,8 +493,12 @@ public ColumnDefinition(String name, DataType type, boolean nullable, int length
this.length = type == FixedStr ? (length <= 0 ? 1 : length) : type.getLength();
this.precision = recommendedPrecision < type.getPrecision() ? recommendedPrecision : type.getPrecision();
- this.scale = this.type == DataType.DateTime64 ? DEFAULT_DATETIME64_SCALE
- : (scale <= 0 ? recommendedScale : (scale > this.precision ? this.precision : scale));
+ this.scale = scale <= 0 ? recommendedScale : (scale > this.precision ? this.precision : scale);
+ /*
+ * this.scale = this.type == DataType.DateTime64 ? DEFAULT_DATETIME64_SCALE :
+ * (scale <= 0 ? recommendedScale : (scale > this.precision ? this.precision :
+ * scale));
+ */
}
public String getName() {
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataSourceManager.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataSourceManager.java
deleted file mode 100644
index 33cb2f1..0000000
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataSourceManager.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Copyright 2019-2020, Zhichun Wu
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package ru.yandex.clickhouse.jdbcbridge.core;
-
-import java.util.List;
-
-/**
- * This interface defines a service responsible for registering new datasource
- * type, as well as adding/retrieving named datasources using ID or alias.
- *
- * @since 2.0
- */
-public interface DataSourceManager extends Reloadable {
- static final String DEFAULT_TYPE = "default";
-
- final DnsResolver resolver = new DnsResolver();
-
- /**
- * Resolve given connection string.
- *
- * @param uri connection string
- * @return resolved connection string
- */
- default String resolve(String uri) {
- return Utils.applyVariables(uri, resolver::apply);
- }
-
- /**
- * Get statistics of all datasources.
- *
- * @return statistics of all datasources
- */
- List getDataSourceStats();
-
- /**
- * Register new datasource type.
- *
- * @param typeName name of the new datasource type to register
- * @param extension extension of the new datasource type
- */
- void registerType(String typeName, Extension extension);
-
- /**
- * Add a new datasource.
- *
- * @param id id of the new datasource; if it's null, datasource.getId()
- * will be used instead
- * @param datasource non-null datasource to be added
- */
- void put(String id, NamedDataSource datasource);
-
- default NamedDataSource get(String id) {
- return get(id, false);
- }
-
- /**
- * Get or create a data source from given URI.
- *
- * @param uri connection string
- * @param orCreate true to create the data source if no found; false otherwise
- * @return desired data source
- */
- NamedDataSource get(String uri, boolean orCreate);
-}
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataSourceStats.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataSourceStats.java
index cbc6531..64edf04 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataSourceStats.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataSourceStats.java
@@ -23,7 +23,7 @@
*
* @since 2.0
*/
-public class DataSourceStats {
+public class DataSourceStats implements UsageStats {
private final String idOrAlias;
private final int instance;
private final boolean alias;
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataTableReader.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataTableReader.java
index e3b31dc..4e57848 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataTableReader.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataTableReader.java
@@ -15,6 +15,8 @@
*/
package ru.yandex.clickhouse.jdbcbridge.core;
+import java.util.HashMap;
+import java.util.Map;
import java.util.Objects;
import java.util.TimeZone;
@@ -74,6 +76,7 @@ default void process(String dataSourceId, ColumnDefinition[] requestColumns, Col
Objects.requireNonNull(params);
Objects.requireNonNull(writer);
+ // Map colName2Index = new HashMap<>();
// build column indices: 0 -> Request column index; 1 -> ResultSet column index
int length = requestColumns.length;
int[][] colIndices = new int[length][2];
@@ -128,7 +131,7 @@ default void process(String dataSourceId, ColumnDefinition[] requestColumns, Col
}
// now let's read rows
- int rowCount = 0;
+ int rowCount = params.isMutation() ? 0 : this.skipRows(params);
int batchSize = params.getBatchSize();
if (batchSize <= 0) {
batchSize = 1;
@@ -136,8 +139,10 @@ default void process(String dataSourceId, ColumnDefinition[] requestColumns, Col
int estimatedBufferSize = length * 4 * batchSize;
ByteBuffer buffer = ByteBuffer.newInstance(estimatedBufferSize, timezone);
+ boolean skipped = rowCount > 0;
+ while (skipped || nextRow()) {
+ skipped = false;
- while (nextRow()) {
for (int i = 0; i < length; i++) {
int[] indices = colIndices[i];
@@ -184,4 +189,53 @@ default void process(String dataSourceId, ColumnDefinition[] requestColumns, Col
writer.write(buffer);
}
}
+
+ default int skipRows(QueryParameters parameters) {
+ int rowCount = 0;
+
+ if (parameters == null) {
+ return rowCount;
+ }
+
+ int position = parameters.getPosition();
+ // absolute position takes priority
+ if (position != 0) {
+ if (position < 0) {
+ throw new IllegalArgumentException("Only positive position is supported!");
+ }
+
+ // position of the first row is 1
+ for (int i = 0; i < position; i++) {
+ if (nextRow()) {
+ rowCount++;
+ continue;
+ } else {
+ throw new IllegalStateException(
+ "Not able to move cursor to row #" + position + "as we only got " + i);
+ }
+ }
+ } else { // now skip rows as needed
+ int offset = parameters.getOffset();
+
+ if (offset < 0) {
+ throw new IllegalArgumentException("Only positive offset is supported!");
+ } else if (offset != 0) {
+ int counter = offset;
+ while (nextRow()) {
+ rowCount++;
+
+ if (--offset <= 0) {
+ break;
+ }
+ }
+
+ if (offset != 0) {
+ throw new IllegalStateException("Not able to move cursor to row #" + (counter + 1)
+ + " as we only got " + (counter - offset));
+ }
+ }
+ }
+
+ return rowCount;
+ }
}
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataType.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataType.java
index a89bd38..41535b2 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataType.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataType.java
@@ -30,7 +30,7 @@ public enum DataType {
Float32(4, 8, 8), Float64(16, 17, 17),
// Date time
- Date(4, 10, 0), DateTime(8, 19, 0), DateTime64(16, 29, 0),
+ Date(4, 10, 0), DateTime(8, 19, 0), DateTime64(16, 38, 18),
// Decimals
Decimal(32, 76, 76), Decimal32(4, 9, 9), Decimal64(8, 18, 18), Decimal128(16, 38, 38), Decimal256(32, 76, 76),
@@ -65,7 +65,9 @@ public enum DataType {
public static final int DEFAULT_PRECISION = 0;
public static final int DEFAULT_SCALE = 0;
+ public static final int MAX_DATETIME64_PRECISION = 38; // 19 + 1 + 18
public static final int MAX_DATETIME64_SCALE = 18;
+ public static final int DEFAULT_DATETIME64_PRECISION = 23; // 19 + 1 + 3
// Tick size (precision): 10-precision seconds
public static final int DEFAULT_DATETIME64_SCALE = 3;
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataTypeConverter.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataTypeConverter.java
index ff05554..5f7ae3b 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataTypeConverter.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataTypeConverter.java
@@ -139,7 +139,7 @@ default T as(Class type, Object value) {
return (T) result;
}
- DataType from(JDBCType jdbcType, boolean signed, boolean useDateTime);
+ DataType from(JDBCType jdbcType, String typeName, int precision, int scale, boolean signed);
DataType from(Object javaObject);
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataTypeMapping.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataTypeMapping.java
new file mode 100644
index 0000000..0669cac
--- /dev/null
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/DataTypeMapping.java
@@ -0,0 +1,88 @@
+/**
+ * Copyright 2019-2020, Zhichun Wu
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package ru.yandex.clickhouse.jdbcbridge.core;
+
+import java.sql.JDBCType;
+
+/**
+ * This defines a mapping from specific JDBC/native type to {@link DataType}.
+ * Native type is case-sensitive. {@code "*"} represents any type.
+ *
+ * @since 2.0
+ */
+public class DataTypeMapping {
+ public static final String ANY_NATIVE_TYPE = "*";
+
+ private final JDBCType fromJdbcType;
+ private final String fromNativeType;
+ private final DataType toType;
+
+ private static JDBCType parse(String fromJdbcType) {
+ JDBCType jdbcType = JDBCType.OTHER;
+
+ if (fromJdbcType != null) {
+ try {
+ fromJdbcType = fromJdbcType.trim().toUpperCase();
+ jdbcType = JDBCType.valueOf(fromJdbcType);
+ } catch (RuntimeException e) {
+ }
+ }
+
+ return jdbcType;
+ }
+
+ private static JDBCType parse(int fromJdbcType) {
+ JDBCType jdbcType = JDBCType.OTHER;
+
+ try {
+ jdbcType = JDBCType.valueOf(fromJdbcType);
+ } catch (RuntimeException e) {
+ }
+
+ return jdbcType;
+ }
+
+ public DataTypeMapping(String fromJdbcType, String fromNativeType, String toType) {
+ this(parse(fromJdbcType), fromNativeType, DataType.from(toType));
+ }
+
+ public DataTypeMapping(int fromJdbcType, String fromNativeType, DataType toType) {
+ this(parse(fromJdbcType), fromNativeType, toType);
+ }
+
+ public DataTypeMapping(JDBCType fromJdbcType, String fromNativeType, DataType toType) {
+ this.fromJdbcType = fromJdbcType;
+ this.fromNativeType = ANY_NATIVE_TYPE.equals(fromNativeType) ? ANY_NATIVE_TYPE : fromNativeType;
+ this.toType = toType;
+ }
+
+ public JDBCType getSourceJdbcType() {
+ return fromJdbcType;
+ }
+
+ public String getSourceNativeType() {
+ return fromNativeType;
+ }
+
+ public DataType getMappedType() {
+ return toType;
+ }
+
+ public boolean accept(JDBCType jdbcType, String nativeType) {
+ return fromNativeType != null ? (ANY_NATIVE_TYPE == fromNativeType || fromNativeType.equals(nativeType))
+ : fromJdbcType == jdbcType;
+ }
+}
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ExpandedUrlClassLoader.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ExpandedUrlClassLoader.java
index 819aaef..ce6093b 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ExpandedUrlClassLoader.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ExpandedUrlClassLoader.java
@@ -38,9 +38,9 @@
public class ExpandedUrlClassLoader extends URLClassLoader {
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(ExpandedUrlClassLoader.class);
- private static final String PROTOCOL_FILE = "file";
- private static final String FILE_URL_PREFIX = PROTOCOL_FILE + ":///";
- private static final String DRIVER_EXTENSION = ".jar";
+ static final String PROTOCOL_FILE = "file";
+ static final String FILE_URL_PREFIX = PROTOCOL_FILE + ":///";
+ static final String DRIVER_EXTENSION = ".jar";
// not going to use OSGi and maven which are over-complex
protected static URL[] expandURLs(String... urls) {
@@ -63,12 +63,15 @@ protected static URL[] expandURLs(String... urls) {
url = cache.add(s) ? new URL(s) : null;
} catch (MalformedURLException e) {
// might be a local path?
- if (cache.add(s = FILE_URL_PREFIX + Paths.get(s).normalize().toFile().getAbsolutePath())) {
- try {
- url = new URL(s);
- } catch (MalformedURLException exp) {
- log.warn("Skip malformed URL [{}]", s);
+ try {
+ URL tmp = Paths.get(s).normalize().toFile().toURI().toURL();
+ if (cache.add(s = tmp.toString())) {
+ url = tmp;
}
+ } catch (InvalidPathException exp) {
+ log.warn("Skip invalid path [{}]", s);
+ } catch (MalformedURLException exp) {
+ log.warn("Skip malformed URL [{}]", s);
}
}
@@ -90,11 +93,11 @@ protected static URL[] expandURLs(String... urls) {
}
if (path != null && Files.isDirectory(path)) {
- File dir = path.toFile();
+ File dir = path.normalize().toFile();
for (String file : dir.list()) {
if (file.endsWith(DRIVER_EXTENSION)) {
- file = new StringBuilder().append(FILE_URL_PREFIX).append(dir.getPath()).append('/')
- .append(file).toString();
+ file = new StringBuilder().append(FILE_URL_PREFIX).append(dir.getPath())
+ .append(File.separatorChar).append(file).toString();
if (isNegative) {
try {
@@ -122,7 +125,9 @@ protected static URL[] expandURLs(String... urls) {
}
if (list.removeAll(negativeSet)) {
- log.debug("Excluded URLs: {}", negativeSet);
+ if (log.isDebugEnabled()) {
+ log.debug("Excluded URLs: {}", negativeSet);
+ }
}
return list.toArray(new URL[list.size()]);
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Extension.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Extension.java
index a363f65..96ecf31 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Extension.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Extension.java
@@ -42,6 +42,10 @@ public final class Extension {
private final Method newMethod;
public Extension(Class extends T> clazz) {
+ this(null, clazz);
+ }
+
+ public Extension(String name, Class extends T> clazz) {
this.loader = Thread.currentThread().getContextClassLoader();
this.extClass = Objects.requireNonNull(clazz);
@@ -55,23 +59,29 @@ public Extension(Class extends T> clazz) {
}
}
} catch (Exception e) {
- log.trace("Extension [{}] does not have [{}] declared, use [{}] as its name instead", clazz, EXTENSION_NAME,
- extName);
+ if (log.isTraceEnabled()) {
+ log.trace("Extension [{}] does not have [{}] declared, use [{}] as its name instead", clazz,
+ EXTENSION_NAME, extName);
+ }
}
- this.name = extName;
+ this.name = name == null ? extName : name;
Method m = null;
try {
m = this.extClass.getDeclaredMethod(METHOD_INITIALIZE, ExtensionManager.class);
} catch (Exception e) {
- log.trace("Extension [{}] does not have static method for initialization.", clazz);
+ if (log.isTraceEnabled()) {
+ log.trace("Extension [{}] does not have static method for initialization.", clazz);
+ }
}
this.initMethod = m;
try {
m = this.extClass.getDeclaredMethod(METHOD_NEW_INSTANCE, Object[].class);
} catch (Exception e) {
- log.trace("Extension [{}] does not have static method for instantiation.", clazz);
+ if (log.isTraceEnabled()) {
+ log.trace("Extension [{}] does not have static method for instantiation.", clazz);
+ }
}
this.newMethod = m;
}
@@ -96,6 +106,27 @@ public Class extends T> getProviderClass() {
return this.extClass;
}
+ /**
+ * Load a specific class.
+ *
+ * @param className class name
+ * @return desired class
+ */
+ public Class> loadClass(String className) {
+ Class> clazz = null;
+
+ ClassLoader loader = this.loader == null ? getClass().getClassLoader() : this.loader;
+ try {
+ clazz = loader.loadClass(className);
+ } catch (ClassNotFoundException e) {
+ log.warn("Not able to load class: " + className);
+ } catch (Exception e) {
+ log.warn("Failed to load class: " + className, e);
+ }
+
+ return clazz;
+ }
+
/**
* Initialize the extension. This will be only called once at startup of the
* application.
@@ -123,8 +154,9 @@ public void initialize(ExtensionManager manager) {
*
* @param args list of arguments for instantiation
* @return new instance of the extension
- * @throws UnsupportedOperationException if no static newInstance method and
- * suitable constructor for instantiation
+ * @throws UnsupportedOperationException if no static {@code newInstance} method
+ * and suitable constructor for
+ * instantiation
* @throws IllegalArgumentException if failed to create new instance using
* given arguments
*/
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ExtensionManager.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ExtensionManager.java
index 510e194..5750b6e 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ExtensionManager.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ExtensionManager.java
@@ -28,19 +28,21 @@
* @since 2.0
*/
public interface ExtensionManager {
+ /**
+ * Get extension implemented by given class.
+ *
+ * @param type of the extension
+ * @param clazz implementation class of the extension
+ * @return desired extension
+ */
Extension getExtension(Class extends T> clazz);
- DataSourceManager getDataSourceManager();
-
- void setDataSourceManager(DataSourceManager manager);
-
- QueryManager getQueryManager();
-
- void setQueryManager(QueryManager manager);
-
- SchemaManager getSchemaManager();
-
- void setSchemaManager(SchemaManager manager);
+ /**
+ * Get repository manager.
+ *
+ * @return repository manager
+ */
+ RepositoryManager getRepositoryManager();
/**
* Register a consumer to load configuration files(in JSON format) based on
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ManagedEntity.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ManagedEntity.java
new file mode 100644
index 0000000..83dd408
--- /dev/null
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ManagedEntity.java
@@ -0,0 +1,123 @@
+/**
+ * Copyright 2019-2020, Zhichun Wu
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package ru.yandex.clickhouse.jdbcbridge.core;
+
+import java.util.Collections;
+import java.util.Date;
+import java.util.LinkedHashSet;
+import java.util.Objects;
+import java.util.Set;
+
+import io.vertx.core.json.JsonArray;
+import io.vertx.core.json.JsonObject;
+
+/**
+ * This class defines a entity which can be initialized using JSON format
+ * configuration.
+ *
+ * @since 2.0
+ */
+public abstract class ManagedEntity {
+ protected static final String CONF_ID = "id";
+ protected static final String CONF_ALIASES = "aliases";
+ protected static final String CONF_TYPE = "type";
+
+ protected final Set aliases;
+ protected final Date createDateTime;
+ protected final String digest;
+ protected final String id;
+ protected final String type;
+
+ /**
+ * Constructor of configurable entity.
+ *
+ * @param id id of the entity
+ * @param config configuration in JSON format, {@code id}, {@code type} and
+ * {@code aliases} properties are reserved for instantiation
+ */
+ protected ManagedEntity(String id, JsonObject config) {
+ this.aliases = new LinkedHashSet<>();
+ this.createDateTime = new Date();
+ this.digest = Utils.digest(config);
+ this.id = id;
+
+ String defaultType = getClass().getSimpleName();
+ if (config != null) {
+ this.type = config.getString(CONF_TYPE, defaultType);
+ JsonArray array = config.getJsonArray(CONF_ALIASES);
+ if (array != null) {
+ for (Object item : array) {
+ if ((item instanceof String) && !Utils.EMPTY_STRING.equals(item)) {
+ this.aliases.add((String) item);
+ }
+ }
+
+ this.aliases.remove(id);
+ }
+ } else {
+ this.type = defaultType;
+ }
+ }
+
+ /**
+ * Get id of the entity.
+ *
+ * @return id of the entity
+ */
+ public final String getId() {
+ return id;
+ }
+
+ /**
+ * Get list of aliases of the entity.
+ *
+ * @return aliases of the entity
+ */
+ public final Set getAliases() {
+ return Collections.unmodifiableSet(this.aliases);
+ }
+
+ /**
+ * Get creation datetime of the entity.
+ *
+ * @return creation datetime of the entity
+ */
+ public final Date getCreationDateTime() {
+ return this.createDateTime;
+ }
+
+ /**
+ * Get type of the entity.
+ *
+ * @return type of the entity
+ */
+ public String getType() {
+ return Objects.requireNonNull(type);
+ }
+
+ /**
+ * Check if given configuration is different from current or not.
+ *
+ * @param config configuration in JSON format
+ * @return true if the given configuration is different from current; false
+ * otherwise
+ */
+ public final boolean isDifferentFrom(JsonObject config) {
+ return this.digest == null || this.digest.isEmpty() || !this.digest.equals(Utils.digest(config));
+ }
+
+ public abstract UsageStats getUsage(String idOrAlias);
+}
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/NamedDataSource.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/NamedDataSource.java
index 9c97a0a..32ecd7b 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/NamedDataSource.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/NamedDataSource.java
@@ -20,11 +20,11 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Objects;
-import java.util.SortedSet;
+import java.util.Set;
import java.util.TimeZone;
-import java.util.TreeSet;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
@@ -43,18 +43,16 @@
*
* @since 2.0
*/
-public class NamedDataSource implements Closeable {
+public class NamedDataSource extends ManagedEntity implements Closeable {
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(NamedDataSource.class);
private static final String DATASOURCE_TYPE = "general";
-
- protected static final String CONF_ID = "id";
+ private static final DataTypeConverter defaultConverter = Utils.loadService(DataTypeConverter.class);
protected static final String CONF_CACHE = "cache";
protected static final String CONF_SIZE = "size";
protected static final String CONF_EXPIRATION = "expiration";
- protected static final String CONF_ALIASES = "aliases";
protected static final String CONF_COLUMNS = "columns";
protected static final String CONF_DEFAULTS = "defaults";
protected static final String CONF_DRIVER_URLS = "driverUrls";
@@ -72,8 +70,6 @@ public class NamedDataSource implements Closeable {
protected static final String COLUMN_PREFIX = "col_";
- protected static final DataTypeConverter converter = Utils.loadService(DataTypeConverter.class);
-
// See all supported values defined in:
// https://github.com/ClickHouse/ClickHouse/blob/master/src/Parsers/IdentifierQuotingStyle.h
public static final String DEFAULT_QUOTE_IDENTIFIER = "`";
@@ -81,10 +77,15 @@ public class NamedDataSource implements Closeable {
public static final String CONF_SCHEMA = "$schema";
public static final String CONF_TYPE = "type";
public static final String CONF_TIMEZONE = "timezone";
- public static final String CONF_DATETIME = "datetime";
public static final String CONF_QUERY_TIMEOUT = "queryTimeout";
public static final String CONF_WRITE_TIMEOUT = "writeTimeout";
public static final String CONF_SEALED = "sealed";
+ public static final String CONF_CONVERTER = "converter";
+ public static final String CONF_CLASS = "class";
+ public static final String CONF_MAPPINGS = "mappings";
+ public static final String CONF_JDBC_TYPE = "jdbcType";
+ public static final String CONF_NATIVE_TYPE = "nativeType";
+ public static final String CONF_TO_TYPE = "to";
protected static final boolean USE_CUSTOM_DRIVER_LOADER = Boolean
.valueOf(Utils.getConfiguration("true", "CUSTOM_DRIVER_LOADER", "jdbc-bridge.driver.loader"));
@@ -98,17 +99,9 @@ public class NamedDataSource implements Closeable {
private final Cache columnsCache;
- private final String id;
- private final SortedSet aliases;
-
- private final Date createDateTime;
-
- private final SortedSet driverUrls;
+ private final Set driverUrls;
private final ClassLoader driverClassLoader;
- private final String digest;
-
- private final boolean dateTime;
private final TimeZone timezone;
private final int queryTimeout;
private final int writeTimeout;
@@ -117,6 +110,8 @@ public class NamedDataSource implements Closeable {
private final DefaultValues defaultValues;
private final QueryParameters queryParameters;
+ protected final DataTypeConverter converter;
+
public static NamedDataSource newInstance(Object... args) {
if (Objects.requireNonNull(args).length < 2) {
throw new IllegalArgumentException(
@@ -124,7 +119,7 @@ public static NamedDataSource newInstance(Object... args) {
}
String id = (String) args[0];
- DataSourceManager manager = (DataSourceManager) Objects.requireNonNull(args[1]);
+ Repository manager = (Repository) Objects.requireNonNull(args[1]);
JsonObject config = args.length > 2 ? (JsonObject) args[2] : null;
return new NamedDataSource(id, manager, config);
@@ -144,10 +139,8 @@ protected boolean isSavedQuery(String file) {
}
private void writeDebugResult(String schema, String originalQuery, String loadedQuery, QueryParameters parameters,
- TableDefinition metaData, ResponseWriter writer) {
- if (metaData == null) {
- metaData = new TableDefinition();
- }
+ ResponseWriter writer) {
+ TableDefinition metaData = TableDefinition.DEBUG_COLUMNS;
ByteBuffer buffer = ByteBuffer.newInstance(loadedQuery.length() * 4);
@@ -183,20 +176,14 @@ protected void writeQueryResult(String schema, String originalQuery, String load
ResponseWriter writer) {
}
- public NamedDataSource(String id, DataSourceManager manager, JsonObject config) {
- if (Objects.requireNonNull(id).isEmpty()) {
+ public NamedDataSource(String id, Repository extends NamedDataSource> repository, JsonObject config) {
+ super(Objects.requireNonNull(id), config);
+
+ if (id.isEmpty()) {
throw new IllegalArgumentException("Non-empty datasource id required.");
}
- Objects.requireNonNull(manager);
-
- this.id = id;
- this.aliases = new TreeSet<>();
-
- this.createDateTime = new Date();
-
- this.driverUrls = new TreeSet<>();
- this.digest = Utils.digest(config);
+ this.driverUrls = new LinkedHashSet<>();
this.customColumns = new ArrayList();
@@ -204,17 +191,16 @@ public NamedDataSource(String id, DataSourceManager manager, JsonObject config)
int cacheExpireMinute = 5;
if (config == null) {
- this.dateTime = false;
this.timezone = null;
this.queryTimeout = -1;
this.writeTimeout = -1;
this.sealed = false;
this.defaultValues = new DefaultValues();
this.queryParameters = new QueryParameters();
+ this.converter = defaultConverter;
} else {
- this.dateTime = config.getBoolean(CONF_DATETIME, false);
- String tz = config.getString(CONF_TIMEZONE);
- this.timezone = tz == null ? null : TimeZone.getTimeZone(tz);
+ String str = config.getString(CONF_TIMEZONE);
+ this.timezone = str == null ? null : TimeZone.getTimeZone(str);
this.queryTimeout = config.getInteger(CONF_QUERY_TIMEOUT, -1);
this.writeTimeout = config.getInteger(CONF_WRITE_TIMEOUT, -1);
this.sealed = config.getBoolean(CONF_SEALED, false);
@@ -238,6 +224,34 @@ public NamedDataSource(String id, DataSourceManager manager, JsonObject config)
}
}
+ DataTypeConverter customConverter = defaultConverter;
+ JsonObject obj = config.getJsonObject(CONF_CONVERTER);
+ if (obj != null) {
+ List mappings = new ArrayList<>();
+ array = obj.getJsonArray(CONF_MAPPINGS);
+ if (array != null) {
+ for (Object m : array) {
+ if (m instanceof JsonObject) {
+ JsonObject jm = (JsonObject) m;
+ mappings.add(new DataTypeMapping(jm.getString(CONF_JDBC_TYPE),
+ jm.getString(CONF_NATIVE_TYPE), jm.getString(CONF_TO_TYPE)));
+ }
+ }
+ }
+
+ str = obj.getString(CONF_CLASS);
+ if (str == null || str.isEmpty()) {
+ str = defaultConverter.getClass().getName();
+ }
+
+ try {
+ customConverter = (DataTypeConverter) Utils.loadExtension(driverUrls, str).newInstance(mappings);
+ } catch (Exception e) {
+ log.warn("Failed to instantiate custom data type converter [{}] due to: {}", str, e.getMessage());
+ }
+ }
+ this.converter = customConverter;
+
JsonObject cacheConfig = config.getJsonObject(CONF_CACHE);
if (cacheConfig != null) {
for (Entry entry : cacheConfig) {
@@ -252,9 +266,9 @@ public NamedDataSource(String id, DataSourceManager manager, JsonObject config)
}
array = config.getJsonArray(CONF_COLUMNS);
if (array != null) {
- for (Object obj : array) {
- if (obj instanceof JsonObject) {
- this.customColumns.add(ColumnDefinition.fromJson((JsonObject) obj));
+ for (Object o : array) {
+ if (o instanceof JsonObject) {
+ this.customColumns.add(ColumnDefinition.fromJson((JsonObject) o));
}
}
}
@@ -262,9 +276,9 @@ public NamedDataSource(String id, DataSourceManager manager, JsonObject config)
this.queryParameters = new QueryParameters(config.getJsonObject(CONF_PARAMETERS));
}
- this.driverClassLoader = USE_CUSTOM_DRIVER_LOADER
- ? new ExpandedUrlClassLoader(DEFAULT_DRIVER_CLASSLOADER,
- this.driverUrls.toArray(new String[this.driverUrls.size()]))
+ this.driverClassLoader = USE_CUSTOM_DRIVER_LOADER ? (this.driverUrls.isEmpty() ? DEFAULT_DRIVER_CLASSLOADER
+ : new ExpandedUrlClassLoader(DEFAULT_DRIVER_CLASSLOADER,
+ this.driverUrls.toArray(new String[this.driverUrls.size()])))
: null;
this.columnsCache = Caffeine.newBuilder().maximumSize(cacheSize).recordStats()
@@ -291,30 +305,18 @@ public String getPoolUsage() {
return EMPTY_USAGE;
}
- public final String getId() {
- return this.id;
- }
-
- public final SortedSet getAliases() {
- return Collections.unmodifiableSortedSet(this.aliases);
- }
-
public final Date getCreateDateTime() {
return this.createDateTime;
}
- public final SortedSet getDriverUrls() {
- return Collections.unmodifiableSortedSet(this.driverUrls);
+ public final Set getDriverUrls() {
+ return Collections.unmodifiableSet(this.driverUrls);
}
public final ClassLoader getDriverClassLoader() {
return this.driverClassLoader;
}
- public final boolean useDateTime() {
- return this.dateTime;
- }
-
public final TimeZone getTimeZone() {
return this.timezone;
}
@@ -343,7 +345,7 @@ public final String getParametersAsJsonString() {
JsonObject obj = new JsonObject();
obj.put(CONF_ID, this.getId());
- SortedSet aliases = this.getAliases();
+ Set aliases = this.getAliases();
if (aliases.size() > 1) {
JsonArray array = new JsonArray();
for (String a : aliases) {
@@ -352,7 +354,7 @@ public final String getParametersAsJsonString() {
obj.put(CONF_ALIASES, array);
}
- SortedSet driverUrls = this.getDriverUrls();
+ Set driverUrls = this.getDriverUrls();
if (driverUrls.size() > 1) {
JsonArray array = new JsonArray();
for (String a : driverUrls) {
@@ -361,7 +363,6 @@ public final String getParametersAsJsonString() {
obj.put(CONF_DRIVER_URLS, array);
}
- obj.put(CONF_DATETIME, this.useDateTime());
if (this.getTimeZone() != null) {
obj.put(CONF_TIMEZONE, this.getTimeZone().getID());
}
@@ -382,7 +383,9 @@ public final String getParametersAsJsonString() {
}
public final TableDefinition getResultColumns(String schema, String query, QueryParameters params) {
- log.debug("Inferring columns: schema=[{}], query=[{}]", schema, query);
+ if (log.isDebugEnabled()) {
+ log.debug("Inferring columns: schema=[{}], query=[{}]", schema, query);
+ }
final TableDefinition columns;
@@ -406,18 +409,6 @@ public final TableDefinition getResultColumns(String schema, String query, Query
return columns;
}
- public final boolean isDifferentFrom(JsonObject newConfig) {
- String newDigest = Utils.digest(newConfig == null ? null : newConfig.encode());
- boolean isDifferent = this.digest == null || this.digest.length() == 0 || !this.digest.equals(newDigest);
- if (isDifferent) {
- log.info("Datasource configuration of [{}] is changed from [{}] to [{}]", this.id, digest, newDigest);
- } else {
- log.info("Datasource configuration of [{}] remains the same", this.id);
- }
-
- return isDifferent;
- }
-
public final List getCustomColumns() {
return Collections.unmodifiableList(this.customColumns);
}
@@ -484,6 +475,11 @@ public final String loadSavedQueryAsNeeded(String normalizedQuery, QueryParamete
return Utils.applyVariables(normalizedQuery, params == null ? null : params.asVariables());
}
+ @Override
+ public UsageStats getUsage(String idOrAlias) {
+ return new DataSourceStats(idOrAlias, this);
+ }
+
@Override
public void close() {
log.info("Closing datasource[id={}, instance={}]", this.id, this);
@@ -494,7 +490,7 @@ public final void executeQuery(String schema, String originalQuery, String loade
log.info("Executing query(schema=[{}]):\n{}", schema, loadedQuery);
if (params.isDebug()) {
- writeDebugResult(schema, originalQuery, loadedQuery, params, null, writer);
+ writeDebugResult(schema, originalQuery, loadedQuery, params, writer);
} else {
ColumnDefinition[] customColumns = this.customColumns
.toArray(new ColumnDefinition[this.customColumns.size()]);
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/NamedQuery.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/NamedQuery.java
index 08cad3b..ce4ffa8 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/NamedQuery.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/NamedQuery.java
@@ -21,67 +21,44 @@
/**
* This class defines a named query, which is composed of query, schema and
- * query parameter.
+ * parameters.
*
* @since 2.0
*/
-public class NamedQuery {
- private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(NamedQuery.class);
-
+public class NamedQuery extends NamedSchema {
private static final String CONF_QUERY = "query";
- private static final String CONF_COLUMNS = "columns";
+ private static final String CONF_SCHEMA = "schema";
private static final String CONF_PARAMETERS = "parameters";
- private final String id;
- private final String digest;
private final String query;
- private final TableDefinition columns;
+ private final String schema;
private final QueryParameters parameters;
- public NamedQuery(String id, JsonObject config) {
- Objects.requireNonNull(config);
-
- this.id = id;
- this.digest = Utils.digest(config);
+ public NamedQuery(String id, Repository repo, JsonObject config) {
+ super(id, repo, config);
- String namedQuery = config.getString(CONF_QUERY);
- Objects.requireNonNull(namedQuery);
+ String str = config.getString(CONF_QUERY);
+ this.query = Objects.requireNonNull(str);
+ str = config.getString(CONF_SCHEMA);
+ this.schema = str == null ? Utils.EMPTY_STRING : str;
- this.query = namedQuery;
- this.columns = TableDefinition.fromJson(config.getJsonArray(CONF_COLUMNS));
this.parameters = new QueryParameters(config.getJsonObject(CONF_PARAMETERS));
}
- public String getId() {
- return this.id;
- }
-
public String getQuery() {
return this.query;
}
- public boolean hasColumn() {
- return this.columns != null && this.columns.hasColumn();
+ public String getSchema() {
+ return this.schema;
}
public TableDefinition getColumns(QueryParameters params) {
- return this.columns;
+ return this.getColumns();
}
public QueryParameters getParameters() {
return this.parameters;
}
-
- public final boolean isDifferentFrom(JsonObject newConfig) {
- String newDigest = Utils.digest(newConfig == null ? null : newConfig.encode());
- boolean isDifferent = this.digest == null || this.digest.length() == 0 || !this.digest.equals(newDigest);
- if (isDifferent) {
- log.info("Query configuration of [{}] is changed from [{}] to [{}]", this.id, digest, newDigest);
- } else {
- log.debug("Query configuration of [{}] remains the same", this.id);
- }
-
- return isDifferent;
- }
}
\ No newline at end of file
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/NamedSchema.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/NamedSchema.java
index d541b74..4a21952 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/NamedSchema.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/NamedSchema.java
@@ -24,28 +24,17 @@
*
* @since 2.0
*/
-public class NamedSchema {
- private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(NamedSchema.class);
+public class NamedSchema extends ManagedEntity {
+ protected static final String CONF_COLUMNS = "columns";
- private static final String CONF_COLUMNS = "columns";
-
- private final String id;
- private final String digest;
private final TableDefinition columns;
- public NamedSchema(String id, JsonObject config) {
- Objects.requireNonNull(config);
-
- this.id = id;
- this.digest = Utils.digest(config);
+ public NamedSchema(String id, Repository extends NamedSchema> repo, JsonObject config) {
+ super(id, Objects.requireNonNull(config));
this.columns = TableDefinition.fromJson(config.getJsonArray(CONF_COLUMNS));
}
- public String getId() {
- return this.id;
- }
-
public boolean hasColumn() {
return this.columns != null && this.columns.hasColumn();
}
@@ -54,15 +43,8 @@ public TableDefinition getColumns() {
return this.columns;
}
- public final boolean isDifferentFrom(JsonObject newConfig) {
- String newDigest = Utils.digest(newConfig == null ? null : newConfig.encode());
- boolean isDifferent = this.digest == null || this.digest.length() == 0 || !this.digest.equals(newDigest);
- if (isDifferent) {
- log.info("Schema configuration of [{}] is changed from [{}] to [{}]", this.id, digest, newDigest);
- } else {
- log.debug("Schema configuration of [{}] remains the same", this.id);
- }
-
- return isDifferent;
+ @Override
+ public UsageStats getUsage(String idOrAlias) {
+ return null;
}
}
\ No newline at end of file
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/QueryParser.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/QueryParser.java
index 6fcfcfe..4318f59 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/QueryParser.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/QueryParser.java
@@ -236,16 +236,16 @@ static String extractTableName(String query) {
return table;
}
- public static String extractConnectionString(RoutingContext ctx, DataSourceManager resolver) {
+ public static String extractConnectionString(RoutingContext ctx, Repository resolver) {
HttpServerRequest req = Objects.requireNonNull(ctx).request();
return Objects.requireNonNull(resolver).resolve(req.getParam(PARAM_CONNECTION_STRING));
}
- public static QueryParser fromRequest(RoutingContext ctx, DataSourceManager resolver) {
+ public static QueryParser fromRequest(RoutingContext ctx, Repository resolver) {
return fromRequest(ctx, resolver, false);
}
- public static QueryParser fromRequest(RoutingContext ctx, DataSourceManager resolver, boolean forWrite) {
+ public static QueryParser fromRequest(RoutingContext ctx, Repository resolver, boolean forWrite) {
HttpServerRequest req = Objects.requireNonNull(ctx).request();
final QueryParser query;
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Reloadable.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Reloadable.java
index fd6fcea..c480be5 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Reloadable.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Reloadable.java
@@ -18,8 +18,8 @@
import io.vertx.core.json.JsonObject;
/**
- * This interface defines the ability to load configuration automatically
- * whenever there's change detected.
+ * This interface defines the ability to load configuration automatically when
+ * there's change detected.
*
* @since 2.0
*/
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Repository.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Repository.java
new file mode 100644
index 0000000..1f1a1d7
--- /dev/null
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Repository.java
@@ -0,0 +1,99 @@
+/**
+ * Copyright 2019-2020, Zhichun Wu
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package ru.yandex.clickhouse.jdbcbridge.core;
+
+import java.util.List;
+
+/**
+ * This interface defines a repository for managing {@link ManagedEntity} like
+ * {@link NamedDataSource}, {@link NamedSchema}, and {@link NamedQuery}.
+ *
+ * @since 2.0
+ */
+public interface Repository {
+ /**
+ * Get class of managed entities.
+ *
+ * @return class of managed entities
+ */
+ Class getEntityClass();
+
+ /**
+ * Check if the given type of entity can be managed by this repository.
+ *
+ * @param clazz class of the entity
+ * @return true if the type of entity can be managed by this repository; false
+ * otherwise
+ */
+ boolean accept(Class> clazz);
+
+ /**
+ * Resolve given name. Usually just about DNS SRV record resolving, for example:
+ * {@code jdbc:clickhouse:{{ ch-server.somedomain }}/system} will be resolved to
+ * something like {@code jdbc:clickhouse:127.0.0.1:8123/system}.
+ *
+ * @param name name to resolve
+ * @return resolved name
+ */
+ String resolve(String name);
+
+ /**
+ * Get usage statistics of the repository.
+ *
+ * @return usage statistics
+ */
+ List getUsageStats();
+
+ /**
+ * Register new type of entity to be manged in this repository.
+ *
+ * @param type type of entity, defaults to extension class name
+ * @param extension extension for instantiate new type of entity
+ */
+ void registerType(String type, Extension extension);
+
+ /**
+ * Put a named entity into the repository.
+ *
+ * @param id id of the entity, could be null
+ * @param entity non-null entity to be added
+ */
+ void put(String id, T entity);
+
+ /**
+ * Get entity from repository by id.
+ *
+ * @param id id of the entity
+ * @return desired entity
+ */
+ T get(String id);
+
+ /**
+ * Get or create entity from repository by id.
+ *
+ * @param id id of the entity
+ * @return desired entity
+ */
+ default T getOrCreate(String id) {
+ T entity = get(id);
+
+ if (entity == null) {
+ throw new UnsupportedOperationException("Creating entity is not supported");
+ }
+
+ return entity;
+ }
+}
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/SchemaManager.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/RepositoryManager.java
similarity index 52%
rename from src/main/java/ru/yandex/clickhouse/jdbcbridge/core/SchemaManager.java
rename to src/main/java/ru/yandex/clickhouse/jdbcbridge/core/RepositoryManager.java
index 050a13f..de41b36 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/SchemaManager.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/RepositoryManager.java
@@ -15,17 +15,28 @@
*/
package ru.yandex.clickhouse.jdbcbridge.core;
+import java.util.List;
+
/**
- * This interface defines how to manage named schemas.
+ * This interface defines a service for managing entities like
+ * {@link NamedDataSource}, {@link NamedSchema}, and {@link NamedQuery}.
*
* @since 2.0
*/
-public interface SchemaManager extends Reloadable {
+public interface RepositoryManager {
+ /**
+ * Get repository capable of managing given type of entity.
+ *
+ * @param type of entity to be managed
+ * @param clazz class of entity
+ * @return repository capable of managing given type of entity
+ */
+ Repository getRepository(Class clazz);
+
/**
- * Get named schema.
+ * Update repository list using given repositories.
*
- * @param name name of the schema
- * @return desired schema
+ * @param repos repositories
*/
- NamedSchema get(String name);
+ void update(List> repos);
}
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ResponseWriter.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ResponseWriter.java
index 0dcb8ce..2788b08 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ResponseWriter.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/ResponseWriter.java
@@ -41,8 +41,10 @@ public ResponseWriter(HttpServerResponse response, StreamOptions options, int ti
this.response.setWriteQueueMaxSize(this.options.getMaxBlockSize());
- log.debug("Start Time={}, Timeout={}, Max Block Size={}", this.startTime, this.timeout,
- this.options.getMaxBlockSize());
+ if (log.isDebugEnabled()) {
+ log.debug("Start Time={}, Timeout={}, Max Block Size={}", this.startTime, this.timeout,
+ this.options.getMaxBlockSize());
+ }
}
public StreamOptions getOptions() {
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/TypedParameter.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/TypedParameter.java
index 22425b7..6f1b29c 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/TypedParameter.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/TypedParameter.java
@@ -192,7 +192,7 @@ public TypedParameter writeValueTo(ByteBuffer buffer, int precision, int scal
buffer.writeDateTime((Long) this.value, timezone);
break;
case DateTime64:
- buffer.writeDateTime64((Long) this.value, timezone);
+ buffer.writeDateTime64((Long) this.value, 0, this.chType.getScale(), timezone);
break;
case UInt64:
buffer.writeUInt64((Long) this.value);
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/QueryManager.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/UsageStats.java
similarity index 72%
rename from src/main/java/ru/yandex/clickhouse/jdbcbridge/core/QueryManager.java
rename to src/main/java/ru/yandex/clickhouse/jdbcbridge/core/UsageStats.java
index b31b30a..45686b9 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/QueryManager.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/UsageStats.java
@@ -16,16 +16,11 @@
package ru.yandex.clickhouse.jdbcbridge.core;
/**
- * This interface defines how to manage named queries.
+ * This interface defines the ability to load configuration automatically when
+ * there's change detected.
*
* @since 2.0
*/
-public interface QueryManager extends Reloadable {
- /**
- * Get named query.
- *
- * @param name name of the query
- * @return desired query
- */
- NamedQuery get(String name);
+public interface UsageStats {
+ String getName();
}
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Utils.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Utils.java
index dfaf3c0..e9e0cb6 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Utils.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/core/Utils.java
@@ -23,10 +23,12 @@
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.ServiceLoader;
import java.util.concurrent.TimeUnit;
import java.util.function.UnaryOperator;
@@ -594,7 +596,7 @@ public static Extension> loadExtension(String className) {
return loadExtension(null, className);
}
- public static Extension> loadExtension(List libUrls, String className) {
+ public static Extension> loadExtension(Collection libUrls, String className) {
Extension> extension = null;
final ClassLoader loader = new ExpandedUrlClassLoader(extClassLoader,
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/ConfigDataSource.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/ConfigDataSource.java
index 816da43..de94874 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/ConfigDataSource.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/ConfigDataSource.java
@@ -18,6 +18,7 @@
import ru.yandex.clickhouse.jdbcbridge.core.ByteBuffer;
import ru.yandex.clickhouse.jdbcbridge.core.ColumnDefinition;
import ru.yandex.clickhouse.jdbcbridge.core.TableDefinition;
+import ru.yandex.clickhouse.jdbcbridge.core.UsageStats;
import ru.yandex.clickhouse.jdbcbridge.core.NamedDataSource;
import ru.yandex.clickhouse.jdbcbridge.core.DataType;
import ru.yandex.clickhouse.jdbcbridge.core.DefaultValues;
@@ -27,8 +28,8 @@
import ru.yandex.clickhouse.jdbcbridge.core.Utils;
import ru.yandex.clickhouse.jdbcbridge.core.DataSourceStats;
import ru.yandex.clickhouse.jdbcbridge.core.DataTableReader;
-import ru.yandex.clickhouse.jdbcbridge.core.DataSourceManager;
import ru.yandex.clickhouse.jdbcbridge.core.QueryParameters;
+import ru.yandex.clickhouse.jdbcbridge.core.Repository;
import static ru.yandex.clickhouse.jdbcbridge.core.DataType.*;
@@ -80,12 +81,12 @@ protected static class ConfigQuery {
}
static class DataSourceStatReader implements DataTableReader {
- private final Iterator stats;
+ private final Iterator stats;
private DataSourceStats current = null;
- protected DataSourceStatReader(Iterator stats) {
- this.stats = stats;
+ protected DataSourceStatReader(List stats) {
+ this.stats = stats.iterator();
}
@Override
@@ -93,12 +94,15 @@ public boolean nextRow() {
boolean hasNext = false;
while (stats.hasNext()) {
- current = stats.next();
- if (current.getName().isEmpty()) { // skip this special datasource)
- continue;
- } else {
+ UsageStats usage = stats.next();
+
+ // skip non-supported statistics and ConfigDataSource
+ if (usage instanceof DataSourceStats && !(current = (DataSourceStats) usage).getName().isEmpty()) {
hasNext = true;
break;
+ } else {
+ log.warn("Discard unsupported usage statistics: {}", usage);
+ continue;
}
}
@@ -150,14 +154,14 @@ public void read(int row, int column, ColumnDefinition metadata, ByteBuffer buff
}
public static void initialize(ExtensionManager manager) {
- DataSourceManager dsManager = manager.getDataSourceManager();
+ Repository dsRepo = manager.getRepositoryManager().getRepository(NamedDataSource.class);
Extension thisExtension = manager.getExtension(ConfigDataSource.class);
- dsManager.registerType(EXTENSION_NAME, thisExtension);
- dsManager.put(Utils.EMPTY_STRING, new ConfigDataSource(dsManager));
+ dsRepo.registerType(EXTENSION_NAME, thisExtension);
+ dsRepo.put(Utils.EMPTY_STRING, new ConfigDataSource(dsRepo));
}
- private final DataSourceManager dataSourceManager;
+ private final Repository dataSourceRepo;
protected ConfigQuery parse(String query) {
ConfigQuery cq = new ConfigQuery();
@@ -182,10 +186,10 @@ protected ConfigQuery parse(String query) {
return cq;
}
- protected ConfigDataSource(DataSourceManager dataSourceManager) {
- super(EXTENSION_NAME, dataSourceManager, null);
+ protected ConfigDataSource(Repository dataSourceRepo) {
+ super(EXTENSION_NAME, dataSourceRepo, null);
- this.dataSourceManager = dataSourceManager;
+ this.dataSourceRepo = dataSourceRepo;
}
@Override
@@ -198,8 +202,8 @@ protected void writeQueryResult(String schema, String originalQuery, String load
return;
}
- new DataSourceStatReader(dataSourceManager.getDataSourceStats().iterator()).process(getId(), requestColumns,
- customColumns, DATASOURCE_CONFIG_COLUMNS.getColumns(), defaultValues, getTimeZone(), params, writer);
+ new DataSourceStatReader(dataSourceRepo.getUsageStats()).process(getId(), requestColumns, customColumns,
+ DATASOURCE_CONFIG_COLUMNS.getColumns(), defaultValues, getTimeZone(), params, writer);
}
@Override
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultDataSourceManager.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultDataSourceManager.java
deleted file mode 100644
index 2855f02..0000000
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultDataSourceManager.java
+++ /dev/null
@@ -1,359 +0,0 @@
-/**
- * Copyright 2019-2020, Zhichun Wu
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package ru.yandex.clickhouse.jdbcbridge.impl;
-
-import java.net.URI;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Map.Entry;
-
-import io.vertx.core.json.JsonObject;
-
-import ru.yandex.clickhouse.jdbcbridge.core.NamedDataSource;
-import ru.yandex.clickhouse.jdbcbridge.core.Utils;
-import ru.yandex.clickhouse.jdbcbridge.core.DataSourceStats;
-import ru.yandex.clickhouse.jdbcbridge.core.Extension;
-import ru.yandex.clickhouse.jdbcbridge.core.ExtensionManager;
-import ru.yandex.clickhouse.jdbcbridge.core.DataSourceManager;
-
-import static ru.yandex.clickhouse.jdbcbridge.core.Utils.EMPTY_STRING;
-
-/**
- * This class is the default implmentation of DataSourceManager.
- *
- * @since 2.0
- */
-public class DefaultDataSourceManager implements DataSourceManager {
- private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(DefaultDataSourceManager.class);
-
- private final List> types = Collections.synchronizedList(new ArrayList<>(5));
- private final Map mappings = Collections.synchronizedMap(new HashMap<>());
-
- public static void initialize(ExtensionManager manager) {
- DataSourceManager dsManager = manager.getDataSourceManager();
- if (dsManager == null || !(dsManager instanceof DefaultDataSourceManager)) {
- manager.setDataSourceManager(dsManager = new DefaultDataSourceManager());
- }
-
- manager.registerConfigLoader(
- Utils.getConfiguration("datasources", "DATASOURCE_CONFIG_DIR", "jdbc-bridge.datasource.config.dir"),
- dsManager::reload);
- }
-
- private Extension getExtensionByType(String typeName) {
- Extension extension = null;
- boolean isFirst = true;
-
- for (Extension ext : types) {
- if (isFirst) {
- extension = ext;
- isFirst = false;
- }
-
- if (ext.getName().equals(typeName)) {
- extension = ext;
- break;
- }
- }
-
- return extension;
- }
-
- private NamedDataSource createFromType(String uri, String type, boolean nonNullRequired) {
- NamedDataSource ds = null;
-
- Extension extension = getExtensionByType(type);
-
- if (extension != null) {
- try {
- ds = extension.newInstance(uri, this, null);
- } catch (Exception e) {
- log.error("Failed to create data source [" + uri + "]", e);
- }
- }
-
- return ds == null && nonNullRequired ? new NamedDataSource(uri, this, null) : ds;
- }
-
- /**
- * Create datasource object based on given configuration.
- *
- * @param id datasource id
- * @param config configuration in JSON format
- * @return desired datasource
- */
- protected NamedDataSource createFromConfig(String id, JsonObject config) {
- NamedDataSource ds = null;
-
- Extension extension = getExtensionByType(
- config == null ? null : config.getString(NamedDataSource.CONF_TYPE));
- ds = extension == null ? null : extension.newInstance(id, this, config);
-
- // fall back to default implementation
- if (ds == null) {
- ds = new NamedDataSource(id, this, config);
- }
-
- return ds;
- }
-
- protected void remove(String id, NamedDataSource ds) {
- if (ds == null || EMPTY_STRING.equals(id)) {
- return;
- }
-
- if (id != null && id.equals(ds.getId())) {
- log.info("Removing datasource [{}] and all its aliases...", id);
-
- for (String alias : ds.getAliases()) {
- log.info("Removing alias [{}] of datasource [{}]...", alias, id);
- NamedDataSource ref = mappings.get(alias);
- // we don't want to remove a datasource when its id is same as an
- // alias of another datasource
- if (ref == ds) {
- mappings.remove(alias);
- }
- }
- } else { // just an alias
- log.info("Removing alias [{}] of datasource [{}]...", id, ds.getId());
- mappings.remove(id);
- }
-
- try {
- ds.close();
- } catch (Exception e) {
- }
- }
-
- protected void update(String id, JsonObject config) {
- NamedDataSource ds = mappings.get(id);
-
- boolean addDataSource = false;
- if (ds == null) {
- addDataSource = true;
- } else if (ds.isDifferentFrom(config)) {
- remove(id, mappings.remove(id));
- addDataSource = true;
- }
-
- if (addDataSource && config != null) {
- log.info("Adding datasource [{}]...", id);
-
- try {
- ds = createFromConfig(id, config);
- mappings.put(id, ds);
-
- for (String alias : ds.getAliases()) {
- if (mappings.containsKey(alias)) {
- log.warn("Not able to add datasource alias [{}] as it exists already", alias);
- } else {
- mappings.put(alias, ds);
- }
- }
- } catch (Exception e) {
- log.warn("Failed to add datasource [" + id + "]", e);
- }
- }
- }
-
- @Override
- public void registerType(String typeName, Extension extension) {
- String className = Objects.requireNonNull(extension).getProviderClass().getName();
- typeName = typeName == null || typeName.isEmpty() ? extension.getName() : typeName;
-
- Extension registered = null;
- for (Extension ext : this.types) {
- if (ext.getName().equals(typeName)) {
- registered = ext;
- break;
- }
- }
-
- if (registered != null) {
- log.warn("Discard [{}] as type [{}] is reserved by [{}]", className, typeName,
- registered.getClass().getName());
- return;
- }
-
- log.info("Registering new type of datasource: [{}] -> [{}]", typeName, className);
- types.add(extension);
-
- if (types.size() == 1) {
- log.info("Default datasource type is set to [{}]", typeName);
- }
- }
-
- @Override
- public void put(String id, NamedDataSource datasource) {
- if (datasource == null) {
- log.warn("Non-null datasource is required for registration!");
- return;
- }
-
- if (id == null) {
- id = datasource.getId();
- }
-
- NamedDataSource existDs = this.mappings.get(id);
- if (existDs != null) {
- String existId = existDs.getId();
-
- this.mappings.remove(id);
- if (existId == null ? id == null : existId.equals(id)) {
- log.warn("Datasource alias [{}] was overrided", id);
- } else {
- for (String alias : existDs.getAliases()) {
- this.mappings.remove(alias);
- }
-
- log.warn("Datasource [{}] and all its aliases[{}] were removed", existId, existDs.getAliases());
- }
-
- try {
- existDs.close();
- } catch (Exception e) {
- }
- }
-
- this.mappings.put(id, datasource);
-
- // now update aliases...
- for (String alias : datasource.getAliases()) {
- if (alias == null || alias.isEmpty()) {
- continue;
- }
-
- NamedDataSource ds = this.mappings.get(alias);
- if (ds != null && alias.equals(ds.getId())) {
- log.warn("Not going to add datasource alias [{}] as it's been taken by a datasource", alias);
- continue;
- }
-
- if (ds != null) {
- this.mappings.remove(alias);
- log.warn("Datasource alias [{}] will be replaced", alias);
- }
-
- this.mappings.put(alias, datasource);
- }
- }
-
- @Override
- public void reload(JsonObject config) {
- if (config == null || config.fieldNames().size() == 0) {
- log.info("No datasource configuration found");
-
- HashSet keys = new HashSet<>();
- for (String key : mappings.keySet()) {
- keys.add(key);
- }
-
- for (String key : keys) {
- remove(key, mappings.remove(key));
- }
- // mappings.clear();
- } else {
- HashSet keys = new HashSet<>();
- for (Entry entry : config) {
- String key = entry.getKey();
- Object value = entry.getValue();
- if (key != null && value instanceof JsonObject) {
- keys.add(key);
- update(key, (JsonObject) value);
- }
- }
-
- HashSet dsIds = new HashSet<>();
- mappings.entrySet().forEach(entry -> {
- String id = entry.getKey();
- NamedDataSource ds = entry.getValue();
- if (id != null && !id.isEmpty() && ds != null && id.equals(ds.getId())) {
- dsIds.add(id);
- }
- });
-
- for (String id : dsIds) {
- if (!keys.contains(id)) {
- remove(id, mappings.remove(id));
- }
- }
- }
- }
-
- /**
- * Get or create a datasource from given URI.
- *
- * @param uri connection string
- * @param orCreate true to create the datasource anyway
- * @return desired datasource
- */
- public NamedDataSource get(String uri, boolean orCreate) {
- // [:][?]
- String id = uri;
- String type = null;
- if (id != null) {
- // remove query parameters first
- int index = id.indexOf('?');
- if (index >= 0) {
- id = id.substring(0, index);
- }
-
- // and then type prefix
- index = id.indexOf(':');
- if (index >= 0) {
- type = id.substring(0, index);
- id = id.substring(index + 1);
- }
-
- // now try parsing it as URI
- try {
- URI u = new URI(id);
- if (u.getHost() != null) {
- id = u.getHost();
- }
- } catch (Exception e) {
- }
- }
-
- NamedDataSource ds = mappings.get(id);
-
- if (ds == null && (ds = createFromType(uri, type, orCreate)) == null) {
- throw new IllegalArgumentException("Data source [" + uri + "] not found!");
- }
-
- return ds;
- }
-
- @Override
- public final List getDataSourceStats() {
- String[] idOrAlias = this.mappings.keySet().toArray(new String[this.mappings.size()]);
- ArrayList list = new ArrayList<>(idOrAlias.length);
- for (int i = 0; i < idOrAlias.length; i++) {
- String dsName = idOrAlias[i];
- NamedDataSource nds = this.mappings.get(dsName);
-
- if (nds != null) {
- list.add(new DataSourceStats(idOrAlias[i], nds));
- }
- }
- return list;
- }
-}
\ No newline at end of file
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultDataTypeConverter.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultDataTypeConverter.java
index acd4a12..e77a9d7 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultDataTypeConverter.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultDataTypeConverter.java
@@ -16,9 +16,12 @@
package ru.yandex.clickhouse.jdbcbridge.impl;
import java.sql.JDBCType;
+import java.util.ArrayList;
+import java.util.List;
import ru.yandex.clickhouse.jdbcbridge.core.DataType;
import ru.yandex.clickhouse.jdbcbridge.core.DataTypeConverter;
+import ru.yandex.clickhouse.jdbcbridge.core.DataTypeMapping;
/**
* This class is default implementation of DataTypeConvert.
@@ -28,14 +31,49 @@
public class DefaultDataTypeConverter implements DataTypeConverter {
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(DataTypeConverter.class);
+ private final DataTypeMapping[] mappings;
+
+ public DefaultDataTypeConverter() {
+ this(null);
+ }
+
+ public DefaultDataTypeConverter(List mappings) {
+ if (mappings == null) {
+ this.mappings = new DataTypeMapping[0];
+ } else {
+ this.mappings = new DataTypeMapping[mappings.size()];
+ int index = 0;
+ for (DataTypeMapping m : mappings) {
+ this.mappings[index++] = m;
+ }
+ }
+ }
+
@Override
- public DataType from(JDBCType jdbcType, boolean signed, boolean useDateTime) {
+ public DataType from(JDBCType jdbcType, String typeName, int precision, int scale, boolean signed) {
+ for (int i = 0; i < mappings.length; i++) {
+ if (mappings[i].accept(jdbcType, typeName)) {
+ return mappings[i].getMappedType();
+ }
+ }
+
DataType type = DataType.Str;
switch (jdbcType) {
case BIT:
- case BOOLEAN:
- type = DataType.UInt8;
+ if (precision > 128) {
+ type = DataType.Int256;
+ } else if (precision > 64) {
+ type = DataType.Int128;
+ } else if (precision > 32) {
+ type = DataType.Int64;
+ } else if (precision > 16) {
+ type = DataType.Int32;
+ } else if (precision > 8) {
+ type = DataType.Int16;
+ } else {
+ type = DataType.Int8;
+ }
break;
case TINYINT:
type = signed ? DataType.Int8 : DataType.UInt8;
@@ -60,6 +98,9 @@ public DataType from(JDBCType jdbcType, boolean signed, boolean useDateTime) {
case DECIMAL:
type = DataType.Decimal;
break;
+ case ARRAY:
+ case OTHER:
+ case BOOLEAN:
case CHAR:
case NCHAR:
case VARCHAR:
@@ -70,13 +111,14 @@ public DataType from(JDBCType jdbcType, boolean signed, boolean useDateTime) {
type = DataType.Str;
break;
case DATE:
- type = DataType.Date;
+ type = DataType.Date; // precision should be 10
break;
case TIME:
case TIMESTAMP:
case TIME_WITH_TIMEZONE:
case TIMESTAMP_WITH_TIMEZONE:
- type = useDateTime ? DataType.DateTime : DataType.DateTime64;
+ // type = useDateTime ? DataType.DateTime : DataType.DateTime64;
+ type = scale > 0 ? DataType.DateTime64 : DataType.DateTime;
break;
default:
log.warn("Unsupported JDBC type [{}], which will be treated as [{}]", jdbcType.name(), type.name());
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultQueryManager.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultQueryManager.java
deleted file mode 100644
index 6cd6319..0000000
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultQueryManager.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/**
- * Copyright 2019-2020, Zhichun Wu
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package ru.yandex.clickhouse.jdbcbridge.impl;
-
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-
-import ru.yandex.clickhouse.jdbcbridge.core.ExtensionManager;
-import ru.yandex.clickhouse.jdbcbridge.core.NamedQuery;
-import ru.yandex.clickhouse.jdbcbridge.core.QueryManager;
-import ru.yandex.clickhouse.jdbcbridge.core.Utils;
-import io.vertx.core.json.JsonObject;
-
-/**
- * This class is default implementation of QueryManager. Basically, it loads
- * named queries from JSON files under /config/queries, and then
- * later used to retrieve named query by its name.
- *
- * @since 2.0
- */
-public class DefaultQueryManager implements QueryManager {
- private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(DefaultQueryManager.class);
-
- private final Map mappings = Collections.synchronizedMap(new HashMap<>());
-
- public static void initialize(ExtensionManager manager) {
- QueryManager qManager = manager.getQueryManager();
- if (qManager == null || !(qManager instanceof DefaultQueryManager)) {
- manager.setQueryManager(qManager = new DefaultQueryManager());
- }
-
- manager.registerConfigLoader(
- Utils.getConfiguration("queries", "QUERY_CONFIG_DIR", "jdbc-bridge.query.config.dir"),
- qManager::reload);
- }
-
- protected void update(String id, JsonObject config) {
- NamedQuery query = mappings.get(id);
-
- boolean addQuery = false;
- if (query == null) {
- addQuery = true;
- } else if (query.isDifferentFrom(config)) {
- mappings.remove(id);
- addQuery = true;
- }
-
- if (addQuery && config != null) {
- log.info("Adding query [{}]...", id);
- try {
- mappings.put(id, new NamedQuery(id, config));
- } catch (Exception e) {
- log.error("Failed to add query", e);
- }
- }
- }
-
- @Override
- public void reload(JsonObject config) {
- if (config == null || config.fieldNames().size() == 0) {
- log.info("No query configuration found");
- mappings.clear();
- } else {
- HashSet keys = new HashSet<>();
- config.forEach(action -> {
- String id = action.getKey();
- if (id != null) {
- keys.add(id);
- update(id, action.getValue() instanceof JsonObject ? (JsonObject) action.getValue() : null);
- }
- });
-
- mappings.entrySet().removeIf(entry -> {
- boolean shouldRemove = !keys.contains(entry.getKey());
-
- if (shouldRemove) {
- log.info("Removing query [{}]...", entry.getKey());
- }
-
- return shouldRemove;
- });
- }
- }
-
- @Override
- public NamedQuery get(String query) {
- return mappings.get(query);
- }
-}
\ No newline at end of file
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultRepositoryManager.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultRepositoryManager.java
new file mode 100644
index 0000000..4fda8cc
--- /dev/null
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultRepositoryManager.java
@@ -0,0 +1,72 @@
+/**
+ * Copyright 2019-2020, Zhichun Wu
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package ru.yandex.clickhouse.jdbcbridge.impl;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+
+import ru.yandex.clickhouse.jdbcbridge.core.ManagedEntity;
+import ru.yandex.clickhouse.jdbcbridge.core.Repository;
+import ru.yandex.clickhouse.jdbcbridge.core.RepositoryManager;
+
+/**
+ * Default implementation of
+ * {@link ru.yandex.clickhouse.jdbcbridge.core.RepositoryManager}.
+ *
+ * @since 2.0
+ */
+public class DefaultRepositoryManager implements RepositoryManager {
+ private final List> repos = Collections.synchronizedList(new ArrayList<>());
+
+ @Override
+ public Repository getRepository(Class clazz) {
+ Objects.requireNonNull(clazz);
+
+ for (Repository> repo : repos) {
+ if (repo.accept(clazz)) {
+ return (Repository) repo;
+ }
+ }
+
+ throw new IllegalArgumentException("No repository available for " + clazz.getName());
+ }
+
+ @Override
+ public void update(List> repos) {
+ if (repos == null) {
+ return;
+ }
+
+ for (Repository> repo : repos) {
+ boolean replaced = false;
+ for (int i = 0, len = this.repos.size(); i < len; i++) {
+ Repository> current = this.repos.get(i);
+ if (!current.getClass().equals(repo.getClass())
+ && !current.getEntityClass().equals(repo.getEntityClass())) {
+ this.repos.set(i, repo);
+ replaced = true;
+ break;
+ }
+ }
+
+ if (!replaced) {
+ this.repos.add(repo);
+ }
+ }
+ }
+}
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultSchemaManager.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultSchemaManager.java
deleted file mode 100644
index fdadf50..0000000
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultSchemaManager.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/**
- * Copyright 2019-2020, Zhichun Wu
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package ru.yandex.clickhouse.jdbcbridge.impl;
-
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-
-import ru.yandex.clickhouse.jdbcbridge.core.ExtensionManager;
-import ru.yandex.clickhouse.jdbcbridge.core.NamedSchema;
-import ru.yandex.clickhouse.jdbcbridge.core.SchemaManager;
-import ru.yandex.clickhouse.jdbcbridge.core.Utils;
-import io.vertx.core.json.JsonObject;
-
-/**
- * This class is default implementation of
- * {@link ru.yandex.clickhouse.jdbcbridge.core.QueryManager}. Basically, it
- * loads named schemas from JSON files under /config/schemas,
- * and then later used to retrieve named schema by its name.
- *
- * @since 2.0
- */
-public class DefaultSchemaManager implements SchemaManager {
- private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(DefaultQueryManager.class);
-
- private final Map mappings = Collections.synchronizedMap(new HashMap<>());
-
- public static void initialize(ExtensionManager manager) {
- SchemaManager sManager = manager.getSchemaManager();
- if (sManager == null || !(sManager instanceof DefaultSchemaManager)) {
- manager.setSchemaManager(sManager = new DefaultSchemaManager());
- }
-
- manager.registerConfigLoader(
- Utils.getConfiguration("schemas", "SCHEMA_CONFIG_DIR", "jdbc-bridge.schema.config.dir"),
- sManager::reload);
- }
-
- protected void update(String id, JsonObject config) {
- NamedSchema schema = mappings.get(id);
-
- boolean addSchema = false;
- if (schema == null) {
- addSchema = true;
- } else if (schema.isDifferentFrom(config)) {
- mappings.remove(id);
- addSchema = true;
- }
-
- if (addSchema && config != null) {
- log.info("Adding schema [{}]...", id);
- try {
- mappings.put(id, new NamedSchema(id, config));
- } catch (Exception e) {
- log.error("Failed to add schema", e);
- }
- }
- }
-
- @Override
- public void reload(JsonObject config) {
- if (config == null || config.fieldNames().size() == 0) {
- log.info("No schema configuration found");
- mappings.clear();
- } else {
- HashSet keys = new HashSet<>();
- config.forEach(action -> {
- String id = action.getKey();
- if (id != null) {
- keys.add(id);
- update(id, action.getValue() instanceof JsonObject ? (JsonObject) action.getValue() : null);
- }
- });
-
- mappings.entrySet().removeIf(entry -> {
- boolean shouldRemove = !keys.contains(entry.getKey());
-
- if (shouldRemove) {
- log.info("Removing schema [{}]...", entry.getKey());
- }
-
- return shouldRemove;
- });
- }
- }
-
- @Override
- public NamedSchema get(String query) {
- return mappings.get(query);
- }
-}
\ No newline at end of file
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/JdbcDataSource.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/JdbcDataSource.java
index a19411c..c09186a 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/JdbcDataSource.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/JdbcDataSource.java
@@ -36,7 +36,6 @@
import ru.yandex.clickhouse.jdbcbridge.core.ResponseWriter;
import ru.yandex.clickhouse.jdbcbridge.core.DataTableReader;
import ru.yandex.clickhouse.jdbcbridge.core.Utils;
-import ru.yandex.clickhouse.jdbcbridge.core.DataSourceManager;
import ru.yandex.clickhouse.jdbcbridge.core.ByteBuffer;
import ru.yandex.clickhouse.jdbcbridge.core.DataAccessException;
import ru.yandex.clickhouse.jdbcbridge.core.ColumnDefinition;
@@ -47,6 +46,8 @@
import ru.yandex.clickhouse.jdbcbridge.core.Extension;
import ru.yandex.clickhouse.jdbcbridge.core.ExtensionManager;
import ru.yandex.clickhouse.jdbcbridge.core.QueryParameters;
+import ru.yandex.clickhouse.jdbcbridge.core.Repository;
+
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
@@ -60,8 +61,8 @@ public class JdbcDataSource extends NamedDataSource {
private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(JdbcDataSource.class);
private static final Set PRIVATE_PROPS = Collections
- .unmodifiableSet(new HashSet<>(Arrays.asList(CONF_SCHEMA, CONF_TYPE, CONF_DATETIME, CONF_TIMEZONE,
- CONF_CACHE, CONF_ALIASES, CONF_DRIVER_URLS, CONF_QUERY_TIMEOUT, CONF_WRITE_TIMEOUT, CONF_SEALED)));
+ .unmodifiableSet(new HashSet<>(Arrays.asList(CONF_SCHEMA, CONF_TYPE, CONF_TIMEZONE, CONF_CACHE,
+ CONF_ALIASES, CONF_DRIVER_URLS, CONF_QUERY_TIMEOUT, CONF_WRITE_TIMEOUT, CONF_SEALED)));
private static final Properties DEFAULT_DATASOURCE_PROPERTIES = new Properties();
@@ -102,6 +103,35 @@ protected ResultSetReader(String id, ResultSet rs, QueryParameters params) {
this.params = params;
}
+ @Override
+ public int skipRows(QueryParameters parameters) {
+ int rowCount = 0;
+
+ if (rs == null || parameters == null) {
+ return rowCount;
+ }
+
+ int position = parameters.getPosition();
+ int offset = parameters.getOffset();
+
+ // absolute position takes priority
+ if (position != 0 || (position = offset) < 0) {
+ try {
+ rs.absolute(position);
+ // many JDBC drivers didn't validate position
+ // if you have only two rows in database, you can still use rs.position(100)...
+ // FIXME inaccurate row count here
+ rowCount = position;
+ } catch (SQLException e) {
+ throw new IllegalStateException("Not able to move cursor to row #" + position, e);
+ }
+ } else if (offset != 0) {
+ DataTableReader.super.skipRows(parameters);
+ }
+
+ return rowCount;
+ }
+
@Override
public boolean nextRow() {
try {
@@ -141,6 +171,12 @@ public void read(int row, int column, ColumnDefinition metadata, ByteBuffer buff
case Int64:
buffer.writeInt64(rs.getLong(column));
break;
+ case Int128:
+ buffer.writeInt128(rs.getObject(column, java.math.BigInteger.class));
+ break;
+ case Int256:
+ buffer.writeInt256(rs.getObject(column, java.math.BigInteger.class));
+ break;
case UInt8:
buffer.writeUInt8(rs.getInt(column));
break;
@@ -153,6 +189,12 @@ public void read(int row, int column, ColumnDefinition metadata, ByteBuffer buff
case UInt64:
buffer.writeUInt64(rs.getLong(column));
break;
+ case UInt128:
+ buffer.writeUInt128(rs.getObject(column, java.math.BigInteger.class));
+ break;
+ case UInt256:
+ buffer.writeUInt256(rs.getObject(column, java.math.BigInteger.class));
+ break;
case Float32:
buffer.writeFloat32(rs.getFloat(column));
break;
@@ -166,7 +208,7 @@ public void read(int row, int column, ColumnDefinition metadata, ByteBuffer buff
buffer.writeDateTime(rs.getTimestamp(column), metadata.getTimeZone());
break;
case DateTime64:
- buffer.writeDateTime64(rs.getTimestamp(column), metadata.getTimeZone());
+ buffer.writeDateTime64(rs.getTimestamp(column), metadata.getScale(), metadata.getTimeZone());
break;
case Decimal:
buffer.writeDecimal(rs.getBigDecimal(column), metadata.getPrecision(), metadata.getScale());
@@ -183,6 +225,9 @@ public void read(int row, int column, ColumnDefinition metadata, ByteBuffer buff
case Decimal256:
buffer.writeDecimal256(rs.getBigDecimal(column), metadata.getScale());
break;
+ case Enum:
+ case Enum8:
+ case Enum16:
case Str:
default:
buffer.writeString(rs.getString(column), params.nullAsDefault());
@@ -210,7 +255,7 @@ protected static void deregisterJdbcDriver(String driverClassName) {
public static void initialize(ExtensionManager manager) {
Extension thisExtension = manager.getExtension(JdbcDataSource.class);
- manager.getDataSourceManager().registerType(EXTENSION_NAME, thisExtension);
+ manager.getRepositoryManager().getRepository(NamedDataSource.class).registerType(EXTENSION_NAME, thisExtension);
}
public static JdbcDataSource newInstance(Object... args) {
@@ -220,7 +265,7 @@ public static JdbcDataSource newInstance(Object... args) {
}
String id = (String) args[0];
- DataSourceManager manager = (DataSourceManager) Objects.requireNonNull(args[1]);
+ Repository manager = (Repository) Objects.requireNonNull(args[1]);
JsonObject config = args.length > 2 ? (JsonObject) args[2] : null;
return new JdbcDataSource(id, manager, config);
@@ -258,13 +303,21 @@ private String buildErrorMessage(Throwable t) {
}
err.append("VendorCode(").append(code).append(')').append(' ').append(exp.getMessage());
} else {
- err.append(t == null ? "Unknown error" : t.getMessage());
+ err.append(t == null ? "Unknown error: " : t.getMessage());
+ }
+
+ Throwable rootCause = t;
+ while (rootCause.getCause() != null && rootCause.getCause() != rootCause) {
+ rootCause = rootCause.getCause();
+ }
+ if (rootCause != t) {
+ err.append('\n').append("Root cause: ").append(rootCause.getMessage());
}
return err.toString();
}
- protected JdbcDataSource(String id, DataSourceManager resolver, JsonObject config) {
+ protected JdbcDataSource(String id, Repository resolver, JsonObject config) {
super(id, resolver, config);
Properties props = new Properties();
@@ -315,13 +368,13 @@ protected JdbcDataSource(String id, DataSourceManager resolver, JsonObject confi
if (USE_CUSTOM_DRIVER_LOADER) {
String driverClassName = props.getProperty(PROP_DRIVER_CLASS);
- if (driverClassName == null || driverClassName.isEmpty()) {
- throw new IllegalArgumentException("Missing driverClassName in named datasource: " + id);
+ if (driverClassName != null && !driverClassName.isEmpty()) {
+ // throw new IllegalArgumentException("Missing driverClassName in named
+ // datasource: " + id);
+ // respect driver declared in datasource configuration
+ deregisterJdbcDriver(driverClassName);
}
- // respect driver declared in datasource configuration
- deregisterJdbcDriver(driverClassName);
-
Thread currentThread = Thread.currentThread();
ClassLoader currentContextClassLoader = currentThread.getContextClassLoader();
@@ -417,38 +470,6 @@ protected final void setTimeout(Statement stmt, int expectedTimeout) {
}
}
- protected final void skipRows(ResultSet rs, QueryParameters parameters) throws SQLException {
- if (rs != null && parameters != null) {
- int position = parameters.getPosition();
- // absolute position takes priority
- if (position != 0) {
- log.trace("Move cursor position to row #{}...", position);
- rs.absolute(position);
- log.trace("Now resume reading...");
- } else {
- int offset = parameters.getOffset();
-
- if (offset > 0) {
- log.trace("Skipping first {} rows...", offset);
- while (rs.next()) {
- if (--offset <= 0) {
- break;
- }
- }
- log.trace("Now resume reading the rest rows...");
- }
- }
- }
- }
-
- protected final DataType convert(int jdbcType, boolean signed, boolean useDateTime) {
- return convert(JDBCType.valueOf(jdbcType), signed, useDateTime);
- }
-
- protected DataType convert(JDBCType jdbcType, boolean signed, boolean useDateTime) {
- return converter.from(jdbcType, signed, useDateTime || this.useDateTime());
- }
-
protected ResultSet getFirstQueryResult(Statement stmt, boolean hasResultSet) throws SQLException {
ResultSet rs = null;
@@ -462,13 +483,20 @@ protected ResultSet getFirstQueryResult(Statement stmt, boolean hasResultSet) th
}
protected String getColumnName(ResultSetMetaData meta, int columnIndex) throws SQLException {
- String columnName;
+ String columnName = null;
+ boolean fallback = true;
try {
columnName = meta.getColumnLabel(columnIndex);
+ if (columnName == null || columnName.isEmpty()) {
+ fallback = false;
+ columnName = meta.getColumnName(columnIndex);
+ }
} catch (RuntimeException e) {
// in case get column label was not supported
- columnName = meta.getColumnName(columnIndex);
+ if (fallback) {
+ columnName = meta.getColumnName(columnIndex);
+ }
}
if (columnName == null || columnName.isEmpty()) {
@@ -487,7 +515,7 @@ protected ColumnDefinition[] getColumnsFromResultSet(ResultSet rs, QueryParamete
boolean isSigned = true;
int nullability = ResultSetMetaData.columnNullable;
int length = 0;
- int precison = 0;
+ int precision = 0;
int scale = 0;
// Why try-catch? Try a not-fully implemented JDBC driver and you'll see...
@@ -507,7 +535,7 @@ protected ColumnDefinition[] getColumnsFromResultSet(ResultSet rs, QueryParamete
}
try {
- precison = meta.getPrecision(i);
+ precision = meta.getPrecision(i);
} catch (Exception e) {
}
@@ -516,9 +544,13 @@ protected ColumnDefinition[] getColumnsFromResultSet(ResultSet rs, QueryParamete
} catch (Exception e) {
}
- columns[i - 1] = new ColumnDefinition(getColumnName(meta, i),
- convert(meta.getColumnType(i), isSigned, params.useDateTime()),
- ResultSetMetaData.columnNoNulls != nullability, length, precison, scale);
+ String name = getColumnName(meta, i);
+ String typeName = meta.getColumnTypeName(i);
+ JDBCType jdbcType = JDBCType.valueOf(meta.getColumnType(i));
+ DataType type = converter.from(jdbcType, typeName, precision, scale, isSigned);
+
+ columns[i - 1] = new ColumnDefinition(name, type, ResultSetMetaData.columnNoNulls != nullability, length,
+ precision, scale);
}
return columns;
@@ -557,6 +589,37 @@ protected boolean isSavedQuery(String file) {
return super.isSavedQuery(file) || file.endsWith(QUERY_FILE_EXT);
}
+ @Override
+ protected void writeMutationResult(String schema, String originalQuery, String loadedQuery, QueryParameters params,
+ ColumnDefinition[] requestColumns, ColumnDefinition[] customColumns, DefaultValues defaultValues,
+ ResponseWriter writer) {
+ try (Connection conn = getConnection(); Statement stmt = createStatement(conn, params)) {
+ setTimeout(stmt, this.getQueryTimeout(params.getTimeout()));
+
+ final ResultSet rs = getFirstQueryResult(stmt, stmt.execute(loadedQuery));
+
+ DataTableReader reader = new ResultSetReader(getId(), rs, params);
+ reader.process(getId(), requestColumns, customColumns, getColumnsFromResultSet(rs, params), defaultValues,
+ getTimeZone(), params, writer);
+
+ /*
+ * if (stmt.execute(loadedQuery)) { // TODO multiple resultsets
+ *
+ * } else if (columns.size() == 1 && columns.getColumn(0).getType() ==
+ * ClickHouseDataType.Int32) {
+ * writer.write(ClickHouseBuffer.newInstance(4).writeInt32(stmt.getUpdateCount()
+ * )); } else { throw new IllegalStateException(
+ * "Not able to handle query result due to incompatible columns: " + columns); }
+ */
+ } catch (SQLException e) {
+ throw new DataAccessException(getId(), buildErrorMessage(e), e);
+ } catch (DataAccessException e) {
+ Throwable cause = e.getCause();
+ throw new IllegalStateException(
+ "Failed to query against [" + this.getId() + "] due to: " + buildErrorMessage(cause), cause);
+ }
+ }
+
@Override
protected void writeQueryResult(String schema, String originalQuery, String loadedQuery, QueryParameters params,
ColumnDefinition[] requestColumns, ColumnDefinition[] customColumns, DefaultValues defaultValues,
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/JsonFileRepository.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/JsonFileRepository.java
new file mode 100644
index 0000000..9436f6f
--- /dev/null
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/JsonFileRepository.java
@@ -0,0 +1,113 @@
+/**
+ * Copyright 2019-2020, Zhichun Wu
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package ru.yandex.clickhouse.jdbcbridge.impl;
+
+import java.util.HashSet;
+import java.util.Objects;
+import java.util.Map.Entry;
+
+import io.vertx.core.json.JsonObject;
+import ru.yandex.clickhouse.jdbcbridge.core.BaseRepository;
+import ru.yandex.clickhouse.jdbcbridge.core.ManagedEntity;
+import ru.yandex.clickhouse.jdbcbridge.core.ExtensionManager;
+import ru.yandex.clickhouse.jdbcbridge.core.NamedDataSource;
+import ru.yandex.clickhouse.jdbcbridge.core.NamedQuery;
+import ru.yandex.clickhouse.jdbcbridge.core.NamedSchema;
+import ru.yandex.clickhouse.jdbcbridge.core.Reloadable;
+import ru.yandex.clickhouse.jdbcbridge.core.Utils;
+
+public class JsonFileRepository extends BaseRepository implements Reloadable {
+ private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(JsonFileRepository.class);
+
+ public static JsonFileRepository newInstance(Object... args) {
+ if (Objects.requireNonNull(args).length < 2) {
+ throw new IllegalArgumentException(
+ "In order to create a JSON file repository, you need to specify at least ExtensionManager and entity class.");
+ }
+
+ ExtensionManager manager = (ExtensionManager) Objects.requireNonNull(args[0]);
+ Class entityClass = (Class) Objects.requireNonNull(args[1]);
+
+ JsonFileRepository repo = new JsonFileRepository<>(entityClass);
+ String defaultDir = entityClass.getSimpleName().toLowerCase();
+ String defaultEnv = entityClass.getSimpleName().toUpperCase() + "_CONFIG_DIR";
+ String defaultProp = "jdbc-bridge." + defaultDir + ".config.dir";
+ if (NamedDataSource.class.equals(entityClass)) {
+ defaultDir = "datasources";
+ defaultEnv = "DATASOURCE_CONFIG_DIR";
+ defaultProp = "jdbc-bridge.datasource.config.dir";
+ } else if (NamedSchema.class.equals(entityClass)) {
+ defaultDir = "schemas";
+ defaultEnv = "SCHEMA_CONFIG_DIR";
+ defaultProp = "jdbc-bridge.schema.config.dir";
+ } else if (NamedQuery.class.equals(entityClass)) {
+ defaultDir = "queries";
+ defaultEnv = "QUERY_CONFIG_DIR";
+ defaultProp = "jdbc-bridge.query.config.dir";
+ }
+
+ manager.registerConfigLoader(Utils.getConfiguration(defaultDir, defaultEnv, defaultProp), repo::reload);
+
+ return repo;
+ }
+
+ public JsonFileRepository(Class clazz) {
+ super(clazz);
+ }
+
+ @Override
+ public void reload(JsonObject config) {
+ if (config == null || config.fieldNames().size() == 0) {
+ log.info("No {} configuration found", getEntityName());
+
+ HashSet keys = new HashSet<>();
+ for (String key : mappings.keySet()) {
+ keys.add(key);
+ }
+
+ for (String key : keys) {
+ remove(key);
+ }
+ // mappings.clear();
+ } else {
+ log.info("Loading {} configuration...", getEntityName());
+ HashSet keys = new HashSet<>();
+ for (Entry entry : config) {
+ String key = entry.getKey();
+ Object value = entry.getValue();
+ if (key != null && value instanceof JsonObject) {
+ keys.add(key);
+ update(key, (JsonObject) value);
+ }
+ }
+
+ HashSet entityIds = new HashSet<>();
+ mappings.entrySet().forEach(entry -> {
+ String id = entry.getKey();
+ T entity = entry.getValue();
+ if (id != null && !id.isEmpty() && entity != null && id.equals(entity.getId())) {
+ entityIds.add(id);
+ }
+ });
+
+ for (String id : entityIds) {
+ if (!keys.contains(id)) {
+ remove(id);
+ }
+ }
+ }
+ }
+}
diff --git a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/ScriptDataSource.java b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/ScriptDataSource.java
index d5d6f02..ca5adf0 100644
--- a/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/ScriptDataSource.java
+++ b/src/main/java/ru/yandex/clickhouse/jdbcbridge/impl/ScriptDataSource.java
@@ -33,13 +33,14 @@
import ru.yandex.clickhouse.jdbcbridge.core.DataAccessException;
import ru.yandex.clickhouse.jdbcbridge.core.ColumnDefinition;
import ru.yandex.clickhouse.jdbcbridge.core.TableDefinition;
-import ru.yandex.clickhouse.jdbcbridge.core.DataSourceManager;
import ru.yandex.clickhouse.jdbcbridge.core.DataTableReader;
+import ru.yandex.clickhouse.jdbcbridge.core.DataTypeConverter;
import ru.yandex.clickhouse.jdbcbridge.core.DefaultValues;
import ru.yandex.clickhouse.jdbcbridge.core.Extension;
import ru.yandex.clickhouse.jdbcbridge.core.ExtensionManager;
import ru.yandex.clickhouse.jdbcbridge.core.NamedDataSource;
import ru.yandex.clickhouse.jdbcbridge.core.QueryParameters;
+import ru.yandex.clickhouse.jdbcbridge.core.Repository;
import ru.yandex.clickhouse.jdbcbridge.core.ResponseWriter;
import ru.yandex.clickhouse.jdbcbridge.core.Utils;
@@ -60,11 +61,13 @@ public class ScriptDataSource extends NamedDataSource {
public static final String FUNC_GET_RESULTS = "__results__";
static class ScriptResultReader implements DataTableReader {
+ private final DataTypeConverter converter;
private final Object[][] values;
private int currentRow = 0;
- protected ScriptResultReader(Object result, String... columnNames) {
+ protected ScriptResultReader(DataTypeConverter converter, Object result, String... columnNames) {
+ this.converter = Objects.requireNonNull(converter);
values = Utils.toObjectArrays(result, columnNames);
}
@@ -128,7 +131,7 @@ public void read(int row, int column, ColumnDefinition metadata, ByteBuffer buff
buffer.writeDateTime(converter.as(Date.class, v), metadata.getTimeZone());
break;
case DateTime64:
- buffer.writeDateTime64(converter.as(Date.class, v), metadata.getTimeZone());
+ buffer.writeDateTime64(converter.as(Date.class, v), metadata.getScale(), metadata.getTimeZone());
break;
case Decimal:
buffer.writeDecimal(converter.as(BigDecimal.class, v), metadata.getPrecision(),
@@ -158,7 +161,7 @@ public static void initialize(ExtensionManager manager) {
ScriptDataSource.vars.putAll(manager.getScriptableObjects());
Extension thisExtension = manager.getExtension(ScriptDataSource.class);
- manager.getDataSourceManager().registerType(EXTENSION_NAME, thisExtension);
+ manager.getRepositoryManager().getRepository(NamedDataSource.class).registerType(EXTENSION_NAME, thisExtension);
}
public static ScriptDataSource newInstance(Object... args) {
@@ -168,7 +171,7 @@ public static ScriptDataSource newInstance(Object... args) {
}
String id = (String) args[0];
- DataSourceManager manager = (DataSourceManager) Objects.requireNonNull(args[1]);
+ Repository manager = (Repository) Objects.requireNonNull(args[1]);
JsonObject config = args.length > 2 ? (JsonObject) args[2] : null;
return new ScriptDataSource(id, manager, config);
@@ -176,7 +179,7 @@ public static ScriptDataSource newInstance(Object... args) {
private final ScriptEngineManager scriptManager;
- protected ScriptDataSource(String id, DataSourceManager manager, JsonObject config) {
+ protected ScriptDataSource(String id, Repository manager, JsonObject config) {
super(id, manager, config);
ClassLoader loader = getDriverClassLoader();
@@ -218,7 +221,15 @@ protected ScriptEngine getScriptEngine(String schema, String query) {
protected TableDefinition guessColumns(ScriptEngine engine, Object result, QueryParameters params) {
TableDefinition columns = TableDefinition.DEFAULT_RESULT_COLUMNS;
+ if (log.isDebugEnabled()) {
+ log.debug("Got result from script engine: [{}]", result == null ? null : result.getClass().getName());
+ }
+
if (result == null) {
+ if (log.isDebugEnabled()) {
+ log.debug("Trying to infer types by calling function [{}] or reading variable with same name",
+ FUNC_INFER_TYPES);
+ }
try {
try {
Invocable i = (Invocable) engine;
@@ -231,12 +242,19 @@ protected TableDefinition guessColumns(ScriptEngine engine, Object result, Query
throw new IllegalStateException("Failed to execute given script", e);
}
} else if (result instanceof ResultSet) {
+ if (log.isDebugEnabled()) {
+ log.debug("Trying to infer types from JDBC ResultSet");
+ }
try (JdbcDataSource jdbc = new JdbcDataSource(JdbcDataSource.EXTENSION_NAME, null, null)) {
jdbc.getColumnsFromResultSet((ResultSet) result, params);
} catch (SQLException e) {
throw new DataAccessException(getId(), e);
}
} else {
+ if (log.isDebugEnabled()) {
+ log.debug("No clue on types so let's go with default");
+ }
+
columns = new TableDefinition(new ColumnDefinition(Utils.DEFAULT_COLUMN_NAME, converter.from(result), true,
DEFAULT_LENGTH, DEFAULT_PRECISION, DEFAULT_SCALE));
}
@@ -302,7 +320,7 @@ protected void writeQueryResult(String schema, String originalQuery, String load
names[i] = resultColumns[i].getName();
}
- DataTableReader reader = new ScriptResultReader(result, names);
+ DataTableReader reader = new ScriptResultReader(converter, result, names);
reader.process(getId(), requestColumns, customColumns, resultColumns, defaultValues, getTimeZone(),
params, writer);
}
diff --git a/src/main/resources/META-INF/services/ru.yandex.clickhouse.jdbcbridge.core.DataSourceManager b/src/main/resources/META-INF/services/ru.yandex.clickhouse.jdbcbridge.core.DataSourceManager
deleted file mode 100644
index e782f87..0000000
--- a/src/main/resources/META-INF/services/ru.yandex.clickhouse.jdbcbridge.core.DataSourceManager
+++ /dev/null
@@ -1 +0,0 @@
-ru.yandex.clickhouse.jdbcbridge.impl.DefaultDataSourceManager
\ No newline at end of file
diff --git a/src/main/resources/META-INF/services/ru.yandex.clickhouse.jdbcbridge.core.QueryManager b/src/main/resources/META-INF/services/ru.yandex.clickhouse.jdbcbridge.core.QueryManager
deleted file mode 100644
index a6162dc..0000000
--- a/src/main/resources/META-INF/services/ru.yandex.clickhouse.jdbcbridge.core.QueryManager
+++ /dev/null
@@ -1 +0,0 @@
-ru.yandex.clickhouse.jdbcbridge.impl.DefaultQueryManager
\ No newline at end of file
diff --git a/src/main/resources/META-INF/services/ru.yandex.clickhouse.jdbcbridge.core.RepositoryManager b/src/main/resources/META-INF/services/ru.yandex.clickhouse.jdbcbridge.core.RepositoryManager
new file mode 100644
index 0000000..c9a478b
--- /dev/null
+++ b/src/main/resources/META-INF/services/ru.yandex.clickhouse.jdbcbridge.core.RepositoryManager
@@ -0,0 +1 @@
+ru.yandex.clickhouse.jdbcbridge.impl.DefaultRepositoryManager
\ No newline at end of file
diff --git a/src/main/resources/META-INF/services/ru.yandex.clickhouse.jdbcbridge.core.SchemaManager b/src/main/resources/META-INF/services/ru.yandex.clickhouse.jdbcbridge.core.SchemaManager
deleted file mode 100644
index 3de48cf..0000000
--- a/src/main/resources/META-INF/services/ru.yandex.clickhouse.jdbcbridge.core.SchemaManager
+++ /dev/null
@@ -1 +0,0 @@
-ru.yandex.clickhouse.jdbcbridge.impl.DefaultSchemaManager
\ No newline at end of file
diff --git a/src/test/java/ru/yandex/clickhouse/jdbcbridge/JdbcBridgeVerticleTest.java b/src/test/java/ru/yandex/clickhouse/jdbcbridge/JdbcBridgeVerticleTest.java
index 4f1068b..0bb92d7 100644
--- a/src/test/java/ru/yandex/clickhouse/jdbcbridge/JdbcBridgeVerticleTest.java
+++ b/src/test/java/ru/yandex/clickhouse/jdbcbridge/JdbcBridgeVerticleTest.java
@@ -19,12 +19,66 @@
import java.util.List;
+import org.testng.annotations.AfterSuite;
+import org.testng.annotations.BeforeSuite;
import org.testng.annotations.Test;
+import io.vertx.core.Vertx;
+import ru.yandex.clickhouse.jdbcbridge.core.BaseRepository;
+import ru.yandex.clickhouse.jdbcbridge.core.ManagedEntity;
import ru.yandex.clickhouse.jdbcbridge.core.Extension;
+import ru.yandex.clickhouse.jdbcbridge.core.ExtensionManager;
+import ru.yandex.clickhouse.jdbcbridge.core.NamedDataSource;
+import ru.yandex.clickhouse.jdbcbridge.core.NamedQuery;
+import ru.yandex.clickhouse.jdbcbridge.core.NamedSchema;
+import ru.yandex.clickhouse.jdbcbridge.core.Repository;
import ru.yandex.clickhouse.jdbcbridge.core.Utils;
+import ru.yandex.clickhouse.jdbcbridge.impl.JsonFileRepository;
public class JdbcBridgeVerticleTest {
+ private Vertx vertx;
+
+ public static class TestRepository extends BaseRepository {
+ public TestRepository(ExtensionManager manager, Class clazz) {
+ super(clazz);
+ }
+ }
+
+ @BeforeSuite(groups = { "unit" })
+ public void beforeSuite() {
+ vertx = Vertx.vertx();
+ }
+
+ @AfterSuite(groups = { "unit" })
+ public void afterSuite() {
+ if (vertx != null) {
+ vertx.close();
+ }
+ }
+
+ @Test(groups = { "unit" })
+ public void testLoadRepositories() {
+ JdbcBridgeVerticle main = new JdbcBridgeVerticle();
+ vertx.deployVerticle(main);
+ List> repos = main.loadRepositories(null);
+ assertNotNull(repos);
+ assertEquals(repos.size(), 3);
+ assertEquals(repos.get(0).getClass(), JsonFileRepository.class);
+ assertEquals(repos.get(0).getEntityClass(), NamedDataSource.class);
+ assertEquals(repos.get(1).getClass(), JsonFileRepository.class);
+ assertEquals(repos.get(1).getEntityClass(), NamedSchema.class);
+ assertEquals(repos.get(2).getClass(), JsonFileRepository.class);
+ assertEquals(repos.get(2).getEntityClass(), NamedQuery.class);
+
+ repos = main.loadRepositories(Utils.loadJsonFromFile("src/test/resources/server.json"));
+ assertNotNull(repos);
+ assertEquals(repos.size(), 2);
+ assertEquals(repos.get(0).getClass(), JsonFileRepository.class);
+ assertEquals(repos.get(0).getEntityClass(), NamedDataSource.class);
+ assertEquals(repos.get(1).getClass(), TestRepository.class);
+ assertEquals(repos.get(1).getEntityClass(), NamedSchema.class);
+ }
+
@Test(groups = { "unit" })
public void testLoadExtensions() {
JdbcBridgeVerticle main = new JdbcBridgeVerticle();
diff --git a/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/ByteBufferTest.java b/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/ByteBufferTest.java
index 3d18a43..84f2a3b 100644
--- a/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/ByteBufferTest.java
+++ b/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/ByteBufferTest.java
@@ -20,6 +20,7 @@
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Date;
+import java.util.TimeZone;
import org.testng.annotations.Test;
@@ -159,9 +160,9 @@ public void testWriteAndRead() {
buffer.writeDate(date1);
buffer.writeDate(date2);
buffer.writeDateTime(dt1);
- buffer.writeDateTime(dt2);
- buffer.writeDateTime64(xdt1);
- buffer.writeDateTime64(xdt2);
+ buffer.writeDateTime(dt2, TimeZone.getTimeZone("UTC"));
+ buffer.writeDateTime64(xdt1, 3);
+ buffer.writeDateTime64(xdt2, 3);
assertEquals(buffer.readNull(), true);
assertEquals(buffer.readNull(), false);
@@ -199,7 +200,7 @@ public void testWriteAndRead() {
assertEquals(buffer.readDate(), date1);
assertEquals(buffer.readDate(), date2);
assertEquals(buffer.readDateTime(), dt1);
- assertEquals(buffer.readDateTime(), dt2);
+ assertEquals(buffer.readDateTime(TimeZone.getTimeZone("UTC")), dt2);
assertEquals(buffer.readDateTime64(), xdt1);
assertEquals(buffer.readDateTime64(), xdt2);
}
diff --git a/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/ColumnDefinitionTest.java b/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/ColumnDefinitionTest.java
index 0bde24c..fd98b15 100644
--- a/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/ColumnDefinitionTest.java
+++ b/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/ColumnDefinitionTest.java
@@ -181,8 +181,8 @@ public void testFromString() {
assertEquals(ColumnDefinition.fromString("d Nullable(DateTime('Asia/Chongqing'))").toString(),
"`d` Nullable(DateTime('Asia/Chongqing'))");
assertEquals(ColumnDefinition.fromString("d Nullable(DateTime64(2, 'Asia/Chongqing')) DEFAULT 1").toString(),
- ColumnDefinition.DEFAULT_VALUE_SUPPORT ? "`d` Nullable(DateTime64(3,'Asia/Chongqing')) DEFAULT 1"
- : "`d` Nullable(DateTime64(3,'Asia/Chongqing'))");
+ ColumnDefinition.DEFAULT_VALUE_SUPPORT ? "`d` Nullable(DateTime64(2,'Asia/Chongqing')) DEFAULT 1"
+ : "`d` Nullable(DateTime64(2,'Asia/Chongqing'))");
assertEquals(ColumnDefinition.fromString("d Nullable(Enum('A'=1, 'B'=2,'C'=3)) DEFAULT 2").toString(),
ColumnDefinition.DEFAULT_VALUE_SUPPORT ? "`d` Nullable(Enum('A'=1,'B'=2,'C'=3)) DEFAULT 2"
diff --git a/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/DataTypeMappingTest.java b/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/DataTypeMappingTest.java
new file mode 100644
index 0000000..a45141b
--- /dev/null
+++ b/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/DataTypeMappingTest.java
@@ -0,0 +1,51 @@
+/**
+ * Copyright 2019-2020, Zhichun Wu
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package ru.yandex.clickhouse.jdbcbridge.core;
+
+import static org.testng.Assert.*;
+
+import java.sql.JDBCType;
+import java.sql.Types;
+
+import org.testng.annotations.Test;
+
+public class DataTypeMappingTest {
+ @Test(groups = { "unit" })
+ public void testConstructor() {
+ DataTypeMapping m = new DataTypeMapping(Types.BOOLEAN, null, DataType.Bool);
+ assertEquals(m.getSourceJdbcType(), JDBCType.BOOLEAN);
+ assertEquals(m.getSourceNativeType(), null);
+ assertTrue(m.accept(JDBCType.BOOLEAN, null));
+ assertEquals(m.getMappedType(), DataType.Bool);
+
+ m = new DataTypeMapping("boolean", "bool", "String");
+ assertEquals(m.getSourceJdbcType(), JDBCType.BOOLEAN);
+ assertEquals(m.getSourceNativeType(), "bool");
+ assertFalse(m.accept(JDBCType.BOOLEAN, null));
+ assertFalse(m.accept(JDBCType.BOOLEAN, "Bool"));
+ assertTrue(m.accept(JDBCType.VARCHAR, "bool"));
+ assertEquals(m.getMappedType(), DataType.Str);
+
+ m = new DataTypeMapping("bit", "*", "Int8");
+ assertEquals(m.getSourceJdbcType(), JDBCType.BIT);
+ assertEquals(m.getSourceNativeType(), "*");
+ assertTrue(m.getSourceNativeType() == DataTypeMapping.ANY_NATIVE_TYPE);
+ assertTrue(m.accept(JDBCType.BOOLEAN, null));
+ assertTrue(m.accept(JDBCType.BIT, "Bool"));
+ assertTrue(m.accept(JDBCType.VARCHAR, "bit"));
+ assertEquals(m.getMappedType(), DataType.Int8);
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/ExpandedUrlClassLoaderTest.java b/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/ExpandedUrlClassLoaderTest.java
index ddfb3dc..54a0521 100644
--- a/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/ExpandedUrlClassLoaderTest.java
+++ b/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/ExpandedUrlClassLoaderTest.java
@@ -30,6 +30,8 @@
import org.testng.annotations.Test;
+import static ru.yandex.clickhouse.jdbcbridge.core.ExpandedUrlClassLoader.FILE_URL_PREFIX;
+
public class ExpandedUrlClassLoaderTest {
private static final String TMP_DIR_PREFIX = "jdbc-bridge-test_";
@@ -93,7 +95,7 @@ private ClassLoader testLoadClassAndMethod(ClassLoader parent, String[] urls, St
@Test(groups = { "unit" })
public void testExpandURLs() throws IOException {
// invalid URLs
- URL[] urls = ExpandedUrlClassLoader.expandURLs("a", "b", ".", "..", "", null, "/", "\\");
+ URL[] urls = ExpandedUrlClassLoader.expandURLs("a", "b", ".", "..", "", null, File.separator);
assertNotNull(urls);
assertEquals(urls.length, 5);
@@ -107,7 +109,7 @@ public void testExpandURLs() throws IOException {
assertEquals(urls.length, 3);
// now, local paths
- url1 = "file:///.";
+ url1 = FILE_URL_PREFIX + ".";
urls = ExpandedUrlClassLoader.expandURLs(url1, null, url1);
assertNotNull(urls);
assertEquals(urls.length, 1);
@@ -116,20 +118,20 @@ public void testExpandURLs() throws IOException {
tmpDir.deleteOnExit();
for (String file : new String[] { "a.jar", "b.jar" }) {
- File tmpFile = new File(tmpDir.getPath() + "/" + file);
+ File tmpFile = new File(tmpDir.getPath() + File.separator + file);
tmpFile.deleteOnExit();
tmpFile.createNewFile();
}
- url1 = "file:///" + tmpDir.getPath();
- url2 = "file:///" + tmpDir.getPath() + "/a.jar";
- url3 = "file:///" + tmpDir.getPath() + "/non-exist.jar";
+ url1 = FILE_URL_PREFIX + tmpDir.getPath();
+ url2 = FILE_URL_PREFIX + tmpDir.getPath() + File.separator + "a.jar";
+ url3 = FILE_URL_PREFIX + tmpDir.getPath() + File.separator + "non-exist.jar";
urls = ExpandedUrlClassLoader.expandURLs(url1, url2, url1, url3, url2);
assertNotNull(urls);
assertEquals(urls.length, 4);
- url1 = "test/a";
- url2 = "./test/a";
- url3 = "file:///./test/a";
+ url1 = "test" + File.separator + "a";
+ url2 = "." + File.separator + "test" + File.separator + "a";
+ url3 = FILE_URL_PREFIX + "." + File.separator + "test" + File.separator + "a";
urls = ExpandedUrlClassLoader.expandURLs(url1, url2, url3);
assertNotNull(urls);
assertEquals(urls.length, 2);
@@ -170,38 +172,43 @@ public void testClassLoaderWithOldAndNewClass() throws IOException {
for (String[] pair : new String[][] { new String[] { notRelated, "a.jar" },
new String[] { oldVersion, "b.jar" }, new String[] { newVersion, "c.jar" } }) {
- downloadJar(pair[0], tmpDir.getPath() + "/" + pair[1]);
+ downloadJar(pair[0], tmpDir.getPath() + File.separator + pair[1]);
}
- testLoadClassAndMethod(null, new String[] { "file:///" + tmpDir.getPath() }, className, methodName, true,
+ testLoadClassAndMethod(null, new String[] { FILE_URL_PREFIX + tmpDir.getPath() }, className, methodName, true,
false);
testLoadClassAndMethod(null, new String[] { tmpDir.getPath() }, className, methodName, true, false);
- testLoadClassAndMethod(null,
- new String[] { "file:///" + tmpDir.getPath() + "/c.jar", "file:///" + tmpDir.getPath() }, className,
- methodName, true, true);
+ testLoadClassAndMethod(null, new String[] { FILE_URL_PREFIX + tmpDir.getPath() + File.separator + "c.jar",
+ FILE_URL_PREFIX + tmpDir.getPath() }, className, methodName, true, true);
// try again using relative path
String parentPath = "test-dir";
Paths.get(parentPath).toFile().deleteOnExit();
- String relativePath = parentPath + "/drivers";
+ String relativePath = parentPath + File.separator + "drivers";
tmpDir = Paths.get(relativePath).toFile();
tmpDir.deleteOnExit();
tmpDir.mkdirs();
for (String[] pair : new String[][] { new String[] { notRelated, "a.jar" },
- new String[] { oldVersion, "b.jar" }, new String[] { newVersion, "../c.jar" } }) {
- downloadJar(pair[0], tmpDir.getPath() + "/" + pair[1]);
+ new String[] { oldVersion, "b.jar" }, new String[] { newVersion, ".." + File.separator + "c.jar" } }) {
+ downloadJar(pair[0], tmpDir.getPath() + File.separator + pair[1]);
}
testLoadClassAndMethod(null, new String[] { relativePath }, className, methodName, true, false);
- testLoadClassAndMethod(null, new String[] { "./" + relativePath }, className, methodName, true, false);
- testLoadClassAndMethod(null, new String[] { relativePath + "/../drivers" }, className, methodName, true, false);
- testLoadClassAndMethod(null, new String[] { "./" + relativePath + "/../drivers" }, className, methodName, true,
+ testLoadClassAndMethod(null, new String[] { "." + File.separator + relativePath }, className, methodName, true,
false);
- testLoadClassAndMethod(null, new String[] { relativePath + "/../c.jar" }, className, methodName, true, true);
-
- testLoadClassAndMethod(null, new String[] { "file:///./" + relativePath }, className, methodName, false, false);
+ testLoadClassAndMethod(null, new String[] { relativePath + File.separator + ".." + File.separator + "drivers" },
+ className, methodName, true, false);
+ testLoadClassAndMethod(null,
+ new String[] {
+ "." + File.separator + relativePath + File.separator + ".." + File.separator + "drivers" },
+ className, methodName, true, false);
+ testLoadClassAndMethod(null, new String[] { relativePath + File.separator + ".." + File.separator + "c.jar" },
+ className, methodName, true, true);
+
+ testLoadClassAndMethod(null, new String[] { FILE_URL_PREFIX + "." + File.separator + relativePath }, className,
+ methodName, false, false);
}
}
diff --git a/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/NamedDataSourceTest.java b/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/NamedDataSourceTest.java
index b6685bf..986501f 100644
--- a/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/NamedDataSourceTest.java
+++ b/src/test/java/ru/yandex/clickhouse/jdbcbridge/core/NamedDataSourceTest.java
@@ -25,32 +25,25 @@
import io.vertx.core.json.JsonObject;
public class NamedDataSourceTest {
- static class DummyDataSourceManager implements DataSourceManager {
- @Override
- public String resolve(String uri) {
- return uri;
- }
+ static class TestRepository extends BaseRepository {
+ private int counter = 0;
- @Override
- public List getDataSourceStats() {
- return new ArrayList<>();
+ public TestRepository(Class clazz) {
+ super(clazz);
}
@Override
- public void reload(JsonObject config) {
+ protected void atomicAdd(T entity) {
+ counter++;
}
@Override
- public NamedDataSource get(String uri, boolean orCreate) {
- return null;
+ protected void atomicRemove(T entity) {
+ counter--;
}
- @Override
- public void registerType(String typeName, Extension extension) {
- }
-
- @Override
- public void put(String id, NamedDataSource ds) {
+ public int getCounter() {
+ return this.counter;
}
}
@@ -59,7 +52,7 @@ public void testConstructor() {
String dataSourceId = "test-datasource";
JsonObject config = Utils.loadJsonFromFile("src/test/resources/datasources/test-datasource.json");
- NamedDataSource ds = new NamedDataSource(dataSourceId, new DummyDataSourceManager(),
+ NamedDataSource ds = new NamedDataSource(dataSourceId, new TestRepository<>(NamedDataSource.class),
config.getJsonObject(dataSourceId));
assertEquals(ds.getId(), dataSourceId);
for (ColumnDefinition col : ds.getCustomColumns()) {
@@ -110,7 +103,7 @@ public void testGetColumns() {
String dataSourceId = "test-datasource";
JsonObject config = Utils.loadJsonFromFile("src/test/resources/datasources/test-datasource.json");
- NamedDataSource ds = new NamedDataSource(dataSourceId, new DummyDataSourceManager(),
+ NamedDataSource ds = new NamedDataSource(dataSourceId, new TestRepository<>(NamedDataSource.class),
config.getJsonObject(dataSourceId));
ds.getResultColumns("", "src/test/resources/simple.query", new QueryParameters());
assertEquals(ds.getId(), dataSourceId);
diff --git a/src/test/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultDataSourceManagerTest.java b/src/test/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultDataSourceManagerTest.java
deleted file mode 100644
index 647c863..0000000
--- a/src/test/java/ru/yandex/clickhouse/jdbcbridge/impl/DefaultDataSourceManagerTest.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/**
- * Copyright 2019-2020, Zhichun Wu
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package ru.yandex.clickhouse.jdbcbridge.impl;
-
-import static org.testng.Assert.*;
-
-import ru.yandex.clickhouse.jdbcbridge.core.DataSourceManager;
-import ru.yandex.clickhouse.jdbcbridge.core.Extension;
-import ru.yandex.clickhouse.jdbcbridge.core.NamedDataSource;
-import ru.yandex.clickhouse.jdbcbridge.core.Utils;
-
-import org.testng.annotations.Test;
-
-import io.vertx.core.json.JsonObject;
-
-public class DefaultDataSourceManagerTest {
- @Test(groups = { "unit" })
- public void testGet() {
- DefaultDataSourceManager manager = new DefaultDataSourceManager();
- assertNotNull(manager.get("non-existent-ds", true));
- assertThrows(IllegalArgumentException.class, new ThrowingRunnable() {
- @Override
- public void run() throws Throwable {
- manager.get("non-existent-ds", false);
- }
- });
- assertNotNull(manager.get("invalid-type:non-existent-ds", true));
- assertThrows(IllegalArgumentException.class, new ThrowingRunnable() {
- @Override
- public void run() throws Throwable {
- manager.get("invalid-type:non-existent-ds", false);
- }
- });
-
- manager.registerType(JdbcDataSource.EXTENSION_NAME, new Extension(JdbcDataSource.class));
-
- String uri = "some invalid uri";
- NamedDataSource ds = manager.get(uri, true);
- assertNotNull(ds);
- assertEquals(ds.getId(), uri);
-
- uri = "jdbc:mysql://localhost:3306/test?useSSL=false";
- ds = manager.get(uri, true);
- assertNotNull(ds);
- assertEquals(ds.getId(), uri);
-
- uri = "jdbc:weird:vendor:hostname:1234?database=test";
- ds = manager.get(uri, true);
- assertNotNull(ds);
- assertEquals(ds.getId(), uri);
-
- uri = "jenkins:https://my.ci-server.org/internal/";
- ds = manager.get(uri, true);
- assertNotNull(ds);
- assertEquals(ds.getId(), uri);
- }
-
- @Test(groups = { "unit" })
- public void testPutAndGet() {
- DataSourceManager manager = new DefaultDataSourceManager();
-
- manager.put(null, null);
- manager.put("random", null);
-
- String id = "nds";
- JsonObject config = Utils.loadJsonFromFile("src/test/resources/datasources/test-nds.json")
- .getJsonObject("test-nds");
-
- NamedDataSource nds1 = new NamedDataSource("nds1", manager, config);
- manager.put(id, nds1);
- assertThrows(IllegalArgumentException.class, new ThrowingRunnable() {
- @Override
- public void run() throws Throwable {
- manager.get(nds1.getId(), false);
- }
- });
- assertEquals(manager.get(id, false), nds1);
- assertEquals(manager.get("nds01", false), nds1);
- assertEquals(manager.get("nds001", false), nds1);
-
- NamedDataSource nds2 = new NamedDataSource("nds02", manager, config);
- manager.put("nds02", nds2);
- assertEquals(manager.get(id, false), nds1);
- assertEquals(manager.get("nds02", false), nds2);
- assertEquals(manager.get("nds01", false), nds2);
- assertEquals(manager.get("nds001", false), nds2);
-
- NamedDataSource nds3 = new NamedDataSource(id, manager, config);
- manager.put(id, nds3);
- assertEquals(manager.get(id, false), nds3);
- assertEquals(manager.get("nds02", false), nds2);
- assertEquals(manager.get("nds01", false), nds3);
- assertEquals(manager.get("nds001", false), nds3);
-
- NamedDataSource nds4 = new NamedDataSource("nds04", manager,
- Utils.loadJsonFromFile("src/test/resources/datasources/test-datasource.json")
- .getJsonObject("test-datasource"));
- manager.put("nds01", nds4);
- assertEquals(manager.get(id, false), nds3);
- assertEquals(manager.get("nds02", false), nds2);
- assertEquals(manager.get("nds01", false), nds4);
- assertThrows(IllegalArgumentException.class, new ThrowingRunnable() {
- @Override
- public void run() throws Throwable {
- manager.get("nds001", false);
- }
- });
-
- manager.put(id, nds1);
- manager.put(id, nds4);
- assertEquals(manager.get(id, false), nds4);
- assertEquals(manager.get("nds02", false), nds2);
- assertThrows(IllegalArgumentException.class, new ThrowingRunnable() {
- @Override
- public void run() throws Throwable {
- manager.get("nds01", false);
- }
- });
- assertThrows(IllegalArgumentException.class, new ThrowingRunnable() {
- @Override
- public void run() throws Throwable {
- manager.get("nds001", false);
- }
- });
- }
-
- @Test(groups = { "sit" })
- public void testSrvRecordSupport() {
- DefaultDataSourceManager manager = new DefaultDataSourceManager();
-
- String host = "_sip._udp.sip.voice.google.com";
- String port = "5060";
- String hostAndPort = host + ":" + port;
-
- assertEquals(manager.resolve("jdbc://{{ _sip._udp.sip.voice.google.com }}/aaa"),
- "jdbc://" + hostAndPort + "/aaa");
- }
-}
\ No newline at end of file
diff --git a/src/test/java/ru/yandex/clickhouse/jdbcbridge/impl/JsonFileRepositoryTest.java b/src/test/java/ru/yandex/clickhouse/jdbcbridge/impl/JsonFileRepositoryTest.java
new file mode 100644
index 0000000..5c74467
--- /dev/null
+++ b/src/test/java/ru/yandex/clickhouse/jdbcbridge/impl/JsonFileRepositoryTest.java
@@ -0,0 +1,167 @@
+/**
+ * Copyright 2019-2020, Zhichun Wu
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package ru.yandex.clickhouse.jdbcbridge.impl;
+
+import static org.testng.Assert.*;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.Consumer;
+
+import ru.yandex.clickhouse.jdbcbridge.core.Extension;
+import ru.yandex.clickhouse.jdbcbridge.core.ExtensionManager;
+import ru.yandex.clickhouse.jdbcbridge.core.NamedDataSource;
+import ru.yandex.clickhouse.jdbcbridge.core.Repository;
+import ru.yandex.clickhouse.jdbcbridge.core.RepositoryManager;
+import ru.yandex.clickhouse.jdbcbridge.core.Utils;
+
+import org.testng.annotations.Test;
+
+import io.vertx.core.json.JsonObject;
+
+public class JsonFileRepositoryTest {
+ static class TestExtensionManager implements ExtensionManager {
+ private final RepositoryManager repoManager = new DefaultRepositoryManager();
+
+ @Override
+ public Extension getExtension(Class extends T> clazz) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public RepositoryManager getRepositoryManager() {
+ return repoManager;
+ }
+
+ @Override
+ public void registerConfigLoader(String configPath, Consumer consumer) {
+ }
+
+ @Override
+ public Map getScriptableObjects() {
+ return new HashMap<>();
+ }
+ }
+
+ @Test(groups = { "unit" })
+ public void testGet() {
+ Repository repo = new JsonFileRepository<>(NamedDataSource.class);
+ assertNull(repo.get("non-existent-ds"));
+ assertNull(repo.get("invalid-type:non-existent-ds"));
+
+ repo.registerType("jdbc", new Extension(NamedDataSource.class));
+ assertThrows(IllegalArgumentException.class, new ThrowingRunnable() {
+ @Override
+ public void run() throws Throwable {
+ repo.get("non-existent-ds");
+ }
+ });
+ assertThrows(IllegalArgumentException.class, new ThrowingRunnable() {
+ @Override
+ public void run() throws Throwable {
+ repo.get("invalid-type:non-existent-ds");
+ }
+ });
+ assertThrows(IllegalArgumentException.class, new ThrowingRunnable() {
+ @Override
+ public void run() throws Throwable {
+ repo.get("jenkins:https://my.ci-server.org/internal/");
+ }
+ });
+
+ String uri = "jdbc:mysql://localhost:3306/test?useSSL=false";
+ NamedDataSource ds = repo.get(uri);
+ assertNotNull(ds);
+ assertEquals(ds.getId(), uri);
+
+ uri = "jdbc:weird:vendor:hostname:1234?database=test";
+ ds = repo.get(uri);
+ assertNotNull(ds);
+ assertEquals(ds.getId(), uri);
+ }
+
+ @Test(groups = { "unit" })
+ public void testPutAndGet() {
+ Repository repo = new JsonFileRepository<>(NamedDataSource.class);
+
+ assertThrows(NullPointerException.class, new ThrowingRunnable() {
+ @Override
+ public void run() throws Throwable {
+ repo.put(null, null);
+ }
+ });
+ assertThrows(NullPointerException.class, new ThrowingRunnable() {
+ @Override
+ public void run() throws Throwable {
+ repo.put("random", null);
+ }
+ });
+
+ String id = "nds";
+ JsonObject config = Utils.loadJsonFromFile("src/test/resources/datasources/test-nds.json")
+ .getJsonObject("test-nds");
+
+ NamedDataSource nds1 = new NamedDataSource("nds1", repo, config);
+ repo.put(id, nds1);
+ assertNull(repo.get(nds1.getId()));
+ assertEquals(repo.get(id), nds1);
+ assertEquals(repo.get("nds01"), nds1);
+ assertEquals(repo.get("nds001"), nds1);
+
+ NamedDataSource nds2 = new NamedDataSource("nds02", repo, config);
+ repo.put("nds02", nds2);
+ assertEquals(repo.get(id), nds1);
+ assertEquals(repo.get("nds02"), nds2);
+ assertEquals(repo.get("nds01"), nds2);
+ assertEquals(repo.get("nds001"), nds2);
+
+ NamedDataSource nds3 = new NamedDataSource(id, repo, config);
+ repo.put(id, nds3);
+ assertEquals(repo.get(id), nds3);
+ assertEquals(repo.get("nds02"), nds2);
+ assertEquals(repo.get("nds01"), nds3);
+ assertEquals(repo.get("nds001"), nds3);
+
+ NamedDataSource nds4 = new NamedDataSource("nds04", repo,
+ Utils.loadJsonFromFile("src/test/resources/datasources/test-datasource.json")
+ .getJsonObject("test-datasource"));
+ repo.put("nds01", nds4);
+ assertEquals(repo.get(id), nds3);
+ assertEquals(repo.get("nds02"), nds2);
+ assertEquals(repo.get("nds01"), nds4);
+ assertEquals(repo.get("nds001"), nds3);
+
+ NamedDataSource nds5 = new NamedDataSource(id, repo, config);
+ repo.put(id, nds5);
+ repo.put(id, nds4);
+ assertEquals(repo.get(id), nds4);
+ assertEquals(repo.get("nds02"), nds2);
+ assertNull(repo.get("nds01"));
+ assertNull(repo.get("nds001"));
+ }
+
+ @Test(groups = { "sit" })
+ public void testSrvRecordSupport() {
+ Repository repo = new JsonFileRepository<>(NamedDataSource.class);
+
+ String host = "_sip._udp.sip.voice.google.com";
+ String port = "5060";
+ String hostAndPort = host + ":" + port;
+
+ assertEquals(repo.resolve("jdbc://{{ _sip._udp.sip.voice.google.com }}/aaa"), "jdbc://" + hostAndPort + "/aaa");
+ }
+}
\ No newline at end of file
diff --git a/src/test/resources/server.json b/src/test/resources/server.json
index bb7d6d3..384cc64 100644
--- a/src/test/resources/server.json
+++ b/src/test/resources/server.json
@@ -2,6 +2,16 @@
"requestTimeout": 5000,
"queryTimeout": 30000,
"configScanPeriod": 5000,
+ "repositories": [
+ {
+ "entity": "ru.yandex.clickhouse.jdbcbridge.core.NamedDataSource",
+ "repository": "ru.yandex.clickhouse.jdbcbridge.impl.JsonFileRepository"
+ },
+ {
+ "entity": "ru.yandex.clickhouse.jdbcbridge.core.NamedSchema",
+ "repository": "ru.yandex.clickhouse.jdbcbridge.JdbcBridgeVerticleTest$TestRepository"
+ }
+ ],
"extensions": [
{
"class": "ru.yandex.clickhouse.jdbcbridge.impl.JdbcDataSource"