diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBCRC32FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBCRC32FunctionIT.java
new file mode 100644
index 000000000000..53a9efb85e0f
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBCRC32FunctionIT.java
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBCRC32FunctionIT {
+
+ private static final String DATABASE_NAME = "test_crc32_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_blob BLOB FIELD, c_int INT32 FIELD)",
+ // 1. Test standard string 'hello'
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'hello', 'hello')",
+ // 2. Test Chinese characters '你好' (UTF-8)
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '你好', '你好')",
+ // 3. Test empty string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // 4. Test null values
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // 5. Test special characters and spaces
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'Hello, World!', 'Hello, World!')",
+ // 6. Test blob data (hex representation of 'test')
+ "INSERT INTO table1(time, c_blob) VALUES (6, x'74657374')",
+ // 7. Test standard benchmark string '123456789'
+ "INSERT INTO table1(time, c_text) VALUES (7, '123456789')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Validate the CRC32 checksum for TEXT/STRING types */
+ @Test
+ public void testCrc32OnTextString() {
+
+ String[] expectedHeader = new String[] {"time", "crc32(c_text)", "crc32(c_string)"};
+ String[] retArray =
+ new String[] {
+ // 'hello' -> 907060870
+ "1970-01-01T00:00:00.001Z,907060870,907060870,",
+ // '你好' (UTF-8) -> 3690463373
+ "1970-01-01T00:00:00.002Z,1352841281,1352841281,",
+ // '' -> 0
+ "1970-01-01T00:00:00.003Z,0,0,",
+ // null -> null
+ "1970-01-01T00:00:00.004Z,null,null,",
+ // 'Hello, World!' -> 4158428615
+ "1970-01-01T00:00:00.005Z,3964322768,3964322768,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, crc32(c_text) as \"crc32(c_text)\", crc32(c_string) as \"crc32(c_string)\" FROM table1 where time < 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Validate the CRC32 checksum for BLOB type */
+ @Test
+ public void testCrc32OnBlob() {
+ String[] expectedHeader = new String[] {"time", "crc32(c_blob)"};
+ String[] retArray =
+ new String[] {
+ // blob x'74657374' ('test') -> 3632233996
+ "1970-01-01T00:00:00.006Z,3632233996,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, crc32(c_blob) as \"crc32(c_blob)\" FROM table1 where time = 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Validate against a known industry-standard value */
+ @Test
+ public void testCrc32KnownValue() {
+ String[] expectedHeader = new String[] {"time", "crc32(c_text)"};
+ String[] retArray =
+ new String[] {
+ // '123456789' -> 3421780262
+ "1970-01-01T00:00:00.007Z,3421780262,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, crc32(c_text) as \"crc32(c_text)\" FROM table1 where time = 7",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test that invalid input types or number of arguments are rejected */
+ @Test
+ public void testCrc32FunctionOnInvalidInputs() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function crc32 only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // Test with invalid data type (INT32)
+ tableAssertTestFail("SELECT crc32(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with multiple arguments
+ tableAssertTestFail(
+ "SELECT crc32(c_text, 'another_arg') FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail("SELECT crc32() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBase32ColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBase32ColumnFunctionIT.java
new file mode 100644
index 000000000000..3c916279258a
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBase32ColumnFunctionIT.java
@@ -0,0 +1,197 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBFromBase32ColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_from_base32_function";
+
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT, c_string STRING, c_int INT32)",
+
+ // Case 1: Basic ASCII string
+ // 'IoTDB is fun!' -> Base32: JBCGC3LFN5ZGIIDUNBSSA3LSEI====== -> BLOB:
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'JBCGC3LFN5ZGIIDUNBSSA3LSEI======', 'JBCGC3LFN5ZGIIDUNBSSA3LSEI======')",
+
+ // Case 2: UTF-8 string
+ // '你好, 世界!' -> Base32: 2W4V625443J56W4S4E73H5BUMM4A====== -> BLOB:
+ // X'e4bda0e5a5bd2c20e4b896e7958c21'
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '2W4V625443J56W4S4E73H5BUMM4A======', '2W4V625443J56W4S4E73H5BUMM4A======')",
+
+ // Case 3: Empty string
+ // '' -> Base32: '' -> BLOB: X''
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+
+ // Case 4: Null value
+ "INSERT INTO table1(time, c_int) VALUES (4, 100)",
+
+ // Case 5: Padding scenarios for 1-4 bytes
+ // 'f' (0x66) -> Base32: MY======
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'MY======', 'MY======')",
+ // 'fo' (0x666f) -> Base32: MZXQ====
+ "INSERT INTO table1(time, c_text, c_string) VALUES (6, 'MZXQ====', 'MZXQ====')",
+ // 'foo' (0x666f6f) -> Base32: MZXW6===
+ "INSERT INTO table1(time, c_text, c_string) VALUES (7, 'MZXW6===', 'MZXW6===')",
+ // 'foob' (0x666f6f62) -> Base32: MZXW6YQ=
+ "INSERT INTO table1(time, c_text, c_string) VALUES (8, 'MZXW6YQ=', 'MZXW6YQ=')",
+
+ // Case 9: No padding needed (5 bytes)
+ // 'fooba' -> Base32: MZXW6YQB
+ "INSERT INTO table1(time, c_text, c_string) VALUES (9, 'MZXW6YQB', 'MZXW6YQB')",
+
+ // Case 10: Optional padding test (decoder should handle missing padding)
+ // 'foo' (0x666f6f) -> Base32 without padding: MZXW6
+ "INSERT INTO table1(time, c_text, c_string) VALUES (10, 'MZXW6', 'MZXW6')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Validates the from_base32() function on various supported and valid inputs. */
+ @Test
+ public void testFromBase32OnValidInputs() {
+ String[] expectedHeader = new String[] {"time", "from_base32(c_text)", "from_base32(c_string)"};
+ String[] retArray =
+ new String[] {
+ // 1. Basic ASCII
+ "1970-01-01T00:00:00.001Z,0x4844616d656f726420746865206d7222,0x4844616d656f726420746865206d7222,",
+ // 2. UTF-8
+ "1970-01-01T00:00:00.002Z,0xd5b95f6bbce6d3df5b92e13fb3f4346338,0xd5b95f6bbce6d3df5b92e13fb3f4346338,",
+ // 3. Empty string
+ "1970-01-01T00:00:00.003Z,0x,0x,",
+ // 4. Null input
+ "1970-01-01T00:00:00.004Z,null,null,",
+ // 5. 1 byte with padding
+ "1970-01-01T00:00:00.005Z,0x66,0x66,",
+ // 6. 2 bytes with padding
+ "1970-01-01T00:00:00.006Z,0x666f,0x666f,",
+ // 7. 3 bytes with padding
+ "1970-01-01T00:00:00.007Z,0x666f6f,0x666f6f,",
+ // 8. 4 bytes with padding
+ "1970-01-01T00:00:00.008Z,0x666f6f62,0x666f6f62,",
+ // 9. 5 bytes, no padding
+ "1970-01-01T00:00:00.009Z,0x666f6f6201,0x666f6f6201,",
+ // 10. Optional padding
+ "1970-01-01T00:00:00.010Z,0x666f6f,0x666f6f,",
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, from_base32(c_text) as \"from_base32(c_text)\", from_base32(c_string) as \"from_base32(c_string)\" FROM table1",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Tests for invalid arguments passed to the from_base32() function. */
+ @Test
+ public void testFromBase32FunctionOnInvalidArguments() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function from_base32 only accepts one argument and it must be TEXT or STRING data type.";
+
+ // Test with invalid parameter type (INT32)
+ tableAssertTestFail(
+ "SELECT from_base32(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with invalid parameter count (0)
+ tableAssertTestFail("SELECT from_base32() FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with invalid parameter count (>1)
+ tableAssertTestFail(
+ "SELECT from_base32(c_text, c_string) FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+
+ /** Validates that from_base32() fails when given incorrectly formatted Base32 strings. */
+ @Test
+ public void testFromBase32FunctionOnInvalidDataFormat() {
+ String baseErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'from_base32' due to an invalid input format. Problematic value:";
+
+ // Invalid character: '0' (not in Base32 alphabet)
+ tableAssertTestFail(
+ "SELECT from_base32('JBCGC3LFN5ZGIIDUNBSSA3LSE0==') FROM table1", // [FIXED] Added FROM
+ // clause
+ baseErrorMessage + " JBCGC3LFN5ZGIIDUNBSSA3LSE0==",
+ DATABASE_NAME);
+
+ // Invalid character: '1' (not in Base32 alphabet)
+ tableAssertTestFail(
+ "SELECT from_base32('JBCGC3LFN5ZGIIDUNBSSA3LSE1==') FROM table1", // [FIXED] Added FROM
+ // clause
+ baseErrorMessage + " JBCGC3LFN5ZGIIDUNBSSA3LSE1==",
+ DATABASE_NAME);
+
+ // Invalid character: '8' (not in Base32 alphabet)
+ tableAssertTestFail(
+ "SELECT from_base32('JBCGC3LFN5ZGIIDUNBSSA3LSE8==') FROM table1", // [FIXED] Added FROM
+ // clause
+ baseErrorMessage + " JBCGC3LFN5ZGIIDUNBSSA3LSE8==",
+ DATABASE_NAME);
+
+ // Invalid character: '9' (not in Base32 alphabet)
+ tableAssertTestFail(
+ "SELECT from_base32('JBCGC3LFN5ZGIIDUNBSSA3LSE9==') FROM table1", // [FIXED] Added FROM
+ // clause
+ baseErrorMessage + " JBCGC3LFN5ZGIIDUNBSSA3LSE9==",
+ DATABASE_NAME);
+
+ // Invalid character: '-' (from Base64URL)
+ tableAssertTestFail(
+ "SELECT from_base32('MZXW6YQ-') FROM table1", // [FIXED] Added FROM clause
+ baseErrorMessage + " MZXW6YQ-",
+ DATABASE_NAME);
+
+ // Invalid padding: padding character in the middle
+ tableAssertTestFail(
+ "SELECT from_base32('MZXW6=Q=') FROM table1", // [FIXED] Added FROM clause
+ baseErrorMessage + " MZXW6=Q=",
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBase64ColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBase64ColumnFunctionIT.java
new file mode 100644
index 000000000000..653d045d580e
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBase64ColumnFunctionIT.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBFromBase64ColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_frombase64_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_int INT32 FIELD)",
+
+ // 'hello' base64: aGVsbG8=
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'aGVsbG8=', 'aGVsbG8=')",
+ // '你好' base64: 5L2g5aW9
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '5L2g5aW9', '5L2g5aW9')",
+ // '' base64: ''
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // invalid base64
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'not_base64', 'not_base64')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** validate the normal decoding of from_base64() for TEXT/STRING type */
+ @Test
+ public void testFromBase64OnTextString() {
+ String[] expectedHeader = new String[] {"time", "from_base64(c_text)", "from_base64(c_string)"};
+ String[] retArray =
+ new String[] {
+ // 'hello', 'hello'
+ "1970-01-01T00:00:00.001Z,0x68656c6c6f,0x68656c6c6f,",
+ // '你好', '你好'
+ "1970-01-01T00:00:00.002Z,0xe4bda0e5a5bd,0xe4bda0e5a5bd,",
+ // '', ''
+ "1970-01-01T00:00:00.003Z,0x,0x,",
+ // null, null
+ "1970-01-01T00:00:00.004Z,null,null,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, from_base64(c_text) as \"from_base64(c_text)\", from_base64(c_string) as \"from_base64(c_string)\" FROM table1 where time < 5",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** invalid base64 string should throw exception */
+ @Test
+ public void testFromBase64OnInvalidBase64() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'from_base64' due to an invalid input format. Problematic value: not_base64";
+ tableAssertTestFail(
+ "SELECT from_base64(c_text) FROM table1 WHERE time = 5",
+ expectedErrorMessage,
+ DATABASE_NAME);
+ }
+
+ /** invalid input type or number of arguments should be rejected */
+ @Test
+ public void testFromBase64FunctionOnInvalidInputs() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function from_base64 only accepts one argument and it must be TEXT or STRING data type.";
+
+ tableAssertTestFail(
+ "SELECT from_base64(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT from_base64(c_text, 1) FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBase64UrlColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBase64UrlColumnFunctionIT.java
new file mode 100644
index 000000000000..56c58317fc9b
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBase64UrlColumnFunctionIT.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+/** Integration tests for the from_base64url() scalar function. */
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBFromBase64UrlColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_frombase64url_function_v2";
+
+ // 重新设计的、更全面的测试数据集
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT, c_string STRING, c_int INT32)",
+
+ // 'IoTDB is fun!' -> Base64URL: SW9UREIgaXMgZnVuIQ -> BLOB: X'496f5444422069732066756e21'
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'SW9UREIgaXMgZnVuIQ', 'SW9UREIgaXMgZnVuIQ')",
+
+ // '你好, 世界!' -> Base64URL: 5L2g5aW9LCBll-OCll-OCoSE -> BLOB:
+ // X'e4bda0e5a5bd2c20e4b896e7958c21'
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '5L2g5aW9LCBll-OCll-OCoSE', '5L2g5aW9LCBll-OCll-OCoSE')",
+
+ // Bytes [251, 255, 191] (0xfbffbf) -> Standard Base64: +/+/ -> Base64URL: -_-_
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '-_-_', '-_-_')",
+
+ // '' -> Base64URL: '' -> BLOB: X''
+ "INSERT INTO table1(time, c_text, c_string) VALUES (4, '', '')",
+ "INSERT INTO table1(time, c_int) VALUES (5, 100)",
+
+ // 'a' (0x61) -> Base64URL: YQ
+ "INSERT INTO table1(time, c_text, c_string) VALUES (6, 'YQ', 'YQ')",
+
+ // 'ab' (0x6162) -> Base64URL: YWI
+ "INSERT INTO table1(time, c_text, c_string) VALUES (7, 'YWI', 'YWI')",
+
+ // 'abc' (0x616263) -> Base64URL: YWJj
+ "INSERT INTO table1(time, c_text, c_string) VALUES (8, 'YWJj', 'YWJj')",
+
+ // 'Apache IoTDB is an IoT native database with high performance for data management and
+ // analysis.'
+ // Base64URL:
+ // QXBhY2hlIElvVERCIGlzIGFuIElvVCBuYXRpdmUgZGF0YWJhc2Ugd2l0aCBoaWdoIHBlcmZvcm1hbmNlIGZvciBkYXRhIG1hbmFnZW1lbnQgYW5kIGFuYWx5c2lzLg
+ "INSERT INTO table1(time, c_text, c_string) VALUES (9, 'QXBhY2hlIElvVERCIGlzIGFuIElvVCBuYXRpdmUgZGF0YWJhc2Ugd2l0aCBoaWdoIHBlcmZvcm1hbmNlIGZvciBkYXRhIG1hbmFnZW1lbnQgYW5kIGFuYWx5c2lzLg', 'QXBhY2hlIElvVERCIGlzIGFuIElvVCBuYXRpdmUgZGF0YWJhc2Ugd2l0aCBoaWdoIHBlcmZvcm1hbmNlIGZvciBkYXRhIG1hbmFnZW1lbnQgYW5kIGFuYWx5c2lzLg')",
+
+ // All Base64URL characters (A-Z, a-z, 0-9, -, _)
+ // Bytes [0..63] -> Base64URL:
+ // ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_
+ "INSERT INTO table1(time, c_text, c_string) VALUES (10, 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** validate the from_base64url() function on valid inputs. */
+ @Test
+ public void testFromBase64UrlOnValidInputs() {
+ String[] expectedHeader =
+ new String[] {"time", "from_base64url(c_text)", "from_base64url(c_string)"};
+ String[] retArray =
+ new String[] {
+ "1970-01-01T00:00:00.001Z,0x496f5444422069732066756e21,0x496f5444422069732066756e21,",
+ "1970-01-01T00:00:00.002Z,0xe4bda0e5a5bd2c206597e382965f8e0a8484,0xe4bda0e5a5bd2c206597e382965f8e0a8484,",
+ "1970-01-01T00:00:00.003Z,0xfbffbf,0xfbffbf,",
+ "1970-01-01T00:00:00.004Z,0x,0x,",
+ "1970-01-01T00:00:00.005Z,null,null,",
+ "1970-01-01T00:00:00.006Z,0x61,0x61,",
+ "1970-01-01T00:00:00.007Z,0x6162,0x6162,",
+ "1970-01-01T00:00:00.008Z,0x616263,0x616263,",
+ "1970-01-01T00:00:00.009Z,0x41706163686520496f54444220697320616e20496f54206e61746976652064617461626173652077697468206869676820706572666f726d616e636520666f722064617461206d616e6167656d656e7420616e6420616e616c797369732e,0x41706163686520496f54444220697320616e20496f54206e61746976652064617461626173652077697468206869676820706572666f726d616e636520666f722064617461206d616e6167656d656e7420616e6420616e616c797369732e,",
+ "1970-01-01T00:00:00.010Z,0x00108310518720928b30d38f41149351559761969b71d79f8218a39259a7a29aabb2dbafc31cb3d35db7e39ebbf3dfbf,0x00108310518720928b30d38f41149351559761969b71d79f8218a39259a7a29aabb2dbafc31cb3d35db7e39ebbf3dfbf,"
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, from_base64url(c_text) as \"from_base64url(c_text)\", from_base64url(c_string) as \"from_base64url(c_string)\" FROM table1",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** test the invalid arguments for from_base64url() function. */
+ @Test
+ public void testFromBase64UrlFunctionOnInvalidArguments() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function from_base64url only accepts one argument and it must be TEXT or STRING data type.";
+
+ // test the invalid parameter type (INT32)
+ tableAssertTestFail(
+ "SELECT from_base64url(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // test the invalid parameter type (no parameter)
+ tableAssertTestFail("SELECT from_base64url() FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // test the invalid parameter type (two parameters)
+ tableAssertTestFail(
+ "SELECT from_base64url(c_text, c_string) FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+
+ /**
+ * validate the from_base64url() function fails when given invalid Base64URL formatted strings.
+ */
+ @Test
+ public void testFromBase64UrlFunctionOnInvalidDataFormat() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'from_base64url' due to an invalid input format. Problematic value:";
+
+ // invalid character: '+' (standard Base64 character)
+ tableAssertTestFail(
+ "SELECT from_base64url('aGVsbG8gd29ybG+') FROM table1",
+ expectedErrorMessage + " aGVsbG8gd29ybG+",
+ DATABASE_NAME);
+
+ // invalid character: '/' (standard Base64 character)
+ tableAssertTestFail(
+ "SELECT from_base64url('aGVsbG8gd29ybG/') FROM table1",
+ expectedErrorMessage + " aGVsbG8gd29ybG/",
+ DATABASE_NAME);
+
+ // invalid character: '=' (padding character, not used in Base64URL)
+ tableAssertTestFail(
+ "SELECT from_base64url('YWJj=') FROM table1",
+ expectedErrorMessage + " YWJj=",
+ DATABASE_NAME);
+
+ // invalid character: '$'
+ tableAssertTestFail(
+ "SELECT from_base64url('aGVsbG8gd29ybG$') FROM table1",
+ expectedErrorMessage + " aGVsbG8gd29ybG$",
+ DATABASE_NAME);
+
+ // invalid character: '.'
+ tableAssertTestFail(
+ "SELECT from_base64url('a.b') FROM table1", expectedErrorMessage + " a.b", DATABASE_NAME);
+
+ // invalid length: 5 (Base64URL encoded strings must have a length that is a multiple of 4)
+ tableAssertTestFail(
+ "SELECT from_base64url('YWJjY') FROM table1",
+ expectedErrorMessage + " YWJjY",
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBigEndian32FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBigEndian32FunctionIT.java
new file mode 100644
index 000000000000..9b5b91d55721
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBigEndian32FunctionIT.java
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBFromBigEndian32FunctionIT {
+
+ private static final String DATABASE_NAME = "test_from_big_endian_32";
+
+ // SQL statements to set up the database and table for testing
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_blob BLOB, c_int32 INT32, c_text TEXT)",
+
+ // Case 1: BLOB for a common positive integer (16909060)
+ "INSERT INTO table1(time, c_blob) VALUES (1, X'01020304')",
+
+ // Case 2: BLOB for a negative integer (-1)
+ "INSERT INTO table1(time, c_blob) VALUES (2, X'FFFFFFFF')",
+
+ // Case 3: BLOB for zero
+ "INSERT INTO table1(time, c_blob) VALUES (3, X'00000000')",
+
+ // Case 4: BLOB for the maximum INT32 value
+ "INSERT INTO table1(time, c_blob) VALUES (4, X'7FFFFFFF')",
+
+ // Case 5: BLOB for the minimum INT32 value
+ "INSERT INTO table1(time, c_blob) VALUES (5, X'80000000')",
+
+ // Case 6: Null BLOB input
+ "INSERT INTO table1(time, c_blob) VALUES (6, null)",
+
+ // Case 7: BLOB with invalid length (< 4 bytes) for error testing
+ "INSERT INTO table1(time, c_blob) VALUES (7, X'010203')",
+
+ // Case 8: BLOB with invalid length (> 4 bytes) for error testing
+ "INSERT INTO table1(time, c_blob) VALUES (8, X'0102030405')",
+
+ // Case 9: Data for invalid type testing
+ "INSERT INTO table1(time, c_int32, c_text) VALUES (9, 100, 'some_text')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /**
+ * Validates the from_big_endian_32() function on various valid 4-byte BLOB inputs. This test
+ * covers positive, negative, zero, min/max, and null values.
+ */
+ @Test
+ public void testFromBigEndian32OnValidInputs() {
+ String[] expectedHeader =
+ new String[] {
+ "time", "from_big_endian_32(c_blob)",
+ };
+ String[] retArray =
+ new String[] {
+ // 1. X'01020304' -> 16909060
+ "1970-01-01T00:00:00.001Z,16909060,",
+ // 2. X'FFFFFFFF' -> -1
+ "1970-01-01T00:00:00.002Z,-1,",
+ // 3. X'00000000' -> 0
+ "1970-01-01T00:00:00.003Z,0,",
+ // 4. X'7FFFFFFF' -> 2147483647
+ "1970-01-01T00:00:00.004Z,2147483647,",
+ // 5. X'80000000' -> -2147483648
+ "1970-01-01T00:00:00.005Z,-2147483648,",
+ // 6. Null input -> null output
+ "1970-01-01T00:00:00.006Z,null,",
+ // The rest are for invalid tests, so they won't appear in a valid query result
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, from_big_endian_32(c_blob) as \"from_big_endian_32(c_blob)\" FROM table1 WHERE time <= 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * Tests for invalid arguments passed to the from_big_endian_32() function. This includes wrong
+ * argument count, wrong data types, and wrong BLOB length.
+ */
+ @Test
+ public void testFromBigEndian32OnInvalidInputs() {
+ // Define the expected error message for wrong argument count or type
+ String typeAndCountErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function from_big_endian_32 only accepts one argument and it must be BLOB data type.";
+
+ // Test with invalid parameter type (INT32)
+ tableAssertTestFail(
+ "SELECT from_big_endian_32(c_int32) FROM table1", typeAndCountErrorMessage, DATABASE_NAME);
+
+ // Test with invalid parameter type (TEXT)
+ tableAssertTestFail(
+ "SELECT from_big_endian_32(c_text) FROM table1", typeAndCountErrorMessage, DATABASE_NAME);
+
+ // Test with invalid parameter count (0 arguments)
+ tableAssertTestFail(
+ "SELECT from_big_endian_32() FROM table1", typeAndCountErrorMessage, DATABASE_NAME);
+
+ // Test with invalid parameter count (>1 arguments)
+ tableAssertTestFail(
+ "SELECT from_big_endian_32(c_blob, c_blob) FROM table1",
+ typeAndCountErrorMessage,
+ DATABASE_NAME);
+
+ // Define the expected error message for wrong BLOB length
+ String lengthErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'from_big_endian_32' due to an invalid input format. Problematic value: ";
+
+ // Test with BLOB length < 4
+ tableAssertTestFail(
+ "SELECT from_big_endian_32(c_blob) FROM table1 WHERE time = 7",
+ lengthErrorMessage + "0x010203",
+ DATABASE_NAME);
+
+ // Test with BLOB length > 4
+ tableAssertTestFail(
+ "SELECT from_big_endian_32(c_blob) FROM table1 WHERE time = 8",
+ lengthErrorMessage + "0x0102030405",
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBigEndian64FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBigEndian64FunctionIT.java
new file mode 100644
index 000000000000..c966f2e9316c
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromBigEndian64FunctionIT.java
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBFromBigEndian64FunctionIT {
+
+ private static final String DATABASE_NAME = "test_from_big_endian_64";
+
+ // SQL statements to set up the database and table for testing
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_blob BLOB, c_int64 INT64, c_text TEXT)",
+
+ // Case 1: BLOB for a common positive integer (72623859790382856)
+ "INSERT INTO table1(time, c_blob) VALUES (1, X'0102030405060708')",
+
+ // Case 2: BLOB for a negative integer (-1)
+ "INSERT INTO table1(time, c_blob) VALUES (2, X'FFFFFFFFFFFFFFFF')",
+
+ // Case 3: BLOB for zero
+ "INSERT INTO table1(time, c_blob) VALUES (3, X'0000000000000000')",
+
+ // Case 4: BLOB for the maximum INT64 value
+ "INSERT INTO table1(time, c_blob) VALUES (4, X'7FFFFFFFFFFFFFFF')",
+
+ // Case 5: BLOB for the minimum INT64 value
+ "INSERT INTO table1(time, c_blob) VALUES (5, X'8000000000000000')",
+
+ // Case 6: Null BLOB input
+ "INSERT INTO table1(time, c_blob) VALUES (6, null)",
+
+ // Case 7: BLOB with invalid length (< 8 bytes) for error testing
+ "INSERT INTO table1(time, c_blob) VALUES (7, X'01020304050607')",
+
+ // Case 8: BLOB with invalid length (> 8 bytes) for error testing
+ "INSERT INTO table1(time, c_blob) VALUES (8, X'010203040506070809')",
+
+ // Case 9: Data for invalid type testing
+ "INSERT INTO table1(time, c_int64, c_text) VALUES (9, 100, 'some_text')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /**
+ * Validates the from_big_endian_64() function on various valid 8-byte BLOB inputs. This test
+ * covers positive, negative, zero, min/max, and null values.
+ */
+ @Test
+ public void testFromBigEndian64OnValidInputs() {
+ String[] expectedHeader =
+ new String[] {
+ "time", "from_big_endian_64(c_blob)",
+ };
+ String[] retArray =
+ new String[] {
+ // 1. X'0102030405060708' -> 72623859790382856
+ "1970-01-01T00:00:00.001Z,72623859790382856,",
+ // 2. X'FFFFFFFFFFFFFFFF' -> -1
+ "1970-01-01T00:00:00.002Z,-1,",
+ // 3. X'0000000000000000' -> 0
+ "1970-01-01T00:00:00.003Z,0,",
+ // 4. X'7FFFFFFFFFFFFFFF' -> 9223372036854775807
+ "1970-01-01T00:00:00.004Z,9223372036854775807,",
+ // 5. X'8000000000000000' -> -9223372036854775808
+ "1970-01-01T00:00:00.005Z,-9223372036854775808,",
+ // 6. Null input -> null output
+ "1970-01-01T00:00:00.006Z,null,",
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, from_big_endian_64(c_blob) as \"from_big_endian_64(c_blob)\" FROM table1 WHERE time <= 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * Tests for invalid arguments passed to the from_big_endian_64() function. This includes wrong
+ * argument count, wrong data types, and wrong BLOB length.
+ */
+ @Test
+ public void testFromBigEndian64OnInvalidInputs() {
+ // Define the expected error message for wrong argument count or type
+ String typeAndCountErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function from_big_endian_64 only accepts one argument and it must be BLOB data type.";
+
+ // Test with invalid parameter type (INT64)
+ tableAssertTestFail(
+ "SELECT from_big_endian_64(c_int64) FROM table1", typeAndCountErrorMessage, DATABASE_NAME);
+
+ // Test with invalid parameter type (TEXT)
+ tableAssertTestFail(
+ "SELECT from_big_endian_64(c_text) FROM table1", typeAndCountErrorMessage, DATABASE_NAME);
+
+ // Test with invalid parameter count (0 arguments)
+ tableAssertTestFail(
+ "SELECT from_big_endian_64() FROM table1", typeAndCountErrorMessage, DATABASE_NAME);
+
+ // Test with invalid parameter count (>1 arguments)
+ tableAssertTestFail(
+ "SELECT from_big_endian_64(c_blob, c_blob) FROM table1",
+ typeAndCountErrorMessage,
+ DATABASE_NAME);
+
+ // Define the expected error message for wrong BLOB length
+ String lengthErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'from_big_endian_64' due to an invalid input format. Problematic value: ";
+
+ // Test with BLOB length < 8
+ tableAssertTestFail(
+ "SELECT from_big_endian_64(c_blob) FROM table1 WHERE time = 7",
+ lengthErrorMessage + "0x01020304050607",
+ DATABASE_NAME);
+
+ // Test with BLOB length > 8
+ tableAssertTestFail(
+ "SELECT from_big_endian_64(c_blob) FROM table1 WHERE time = 8",
+ lengthErrorMessage + "0x010203040506070809",
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromHexColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromHexColumnFunctionIT.java
new file mode 100644
index 000000000000..57b4687f91a2
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromHexColumnFunctionIT.java
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBFromHexColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_fromhex_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_int INT32 FIELD)",
+
+ // 'hello' hex: 68656c6c6f
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, '68656c6c6f', '68656c6c6f')",
+ // '你好' hex: e4bda0e5a5bd
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, 'e4bda0e5a5bd', 'e4bda0e5a5bd')",
+ // '' hex: ''
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // for null test
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // invalid hex string (non-hex characters)
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'not_hex', 'not_hex')",
+ // invalid hex string (odd length)
+ "INSERT INTO table1(time, c_text, c_string) VALUES (6, '123', '123')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** validate the normal decoding of from_hex() for TEXT/STRING type */
+ @Test
+ public void testFromHexOnTextString() {
+ String[] expectedHeader = new String[] {"time", "from_hex(c_text)", "from_hex(c_string)"};
+ String[] retArray =
+ new String[] {
+ // 'hello'
+ "1970-01-01T00:00:00.001Z,0x68656c6c6f,0x68656c6c6f,",
+ // '你好'
+ "1970-01-01T00:00:00.002Z,0xe4bda0e5a5bd,0xe4bda0e5a5bd,",
+ // ''
+ "1970-01-01T00:00:00.003Z,0x,0x,",
+ // null
+ "1970-01-01T00:00:00.004Z,null,null,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, from_hex(c_text) as \"from_hex(c_text)\", from_hex(c_string) as \"from_hex(c_string)\" FROM table1 where time < 5",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** invalid hex string (with non-hex characters) should throw exception */
+ @Test
+ public void testFromHexOnInvalidHexChars() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'from_hex' due to an invalid input format. Problematic value: not_hex";
+ tableAssertTestFail(
+ "SELECT from_hex(c_text) FROM table1 WHERE time = 5", expectedErrorMessage, DATABASE_NAME);
+ }
+
+ /** invalid hex string (with odd length) should throw exception */
+ @Test
+ public void testFromHexOnInvalidHexLength() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'from_hex' due to an invalid input format. Problematic value: 123";
+ tableAssertTestFail(
+ "SELECT from_hex(c_text) FROM table1 WHERE time = 6", expectedErrorMessage, DATABASE_NAME);
+ }
+
+ /** invalid input type or number of arguments should be rejected */
+ @Test
+ public void testFromHexFunctionOnInvalidInputs() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function from_hex only accepts one argument and it must be TEXT or STRING data type.";
+
+ // Test with invalid data type
+ tableAssertTestFail("SELECT from_hex(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with multiple arguments
+ tableAssertTestFail(
+ "SELECT from_hex(c_text, 1) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail("SELECT from_hex() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromIEEE754_32FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromIEEE754_32FunctionIT.java
new file mode 100644
index 000000000000..bffd4333a62e
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromIEEE754_32FunctionIT.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBFromIEEE754_32FunctionIT {
+
+ private static final String DATABASE_NAME = "test_from_ieee754_32_big_endian";
+
+ // Test data: Insert big-endian IEEE 754 BLOBs to verify FLOAT parsing
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_blob BLOB, c_int32 INT32, c_text TEXT)",
+
+ // Case 1: Big-endian BLOB 0x3F400000 → 1.25f
+ "INSERT INTO table1(time, c_blob) VALUES (1, X'3FA00000')",
+ // Case 2: Big-endian BLOB 0xC0200000 → -2.5f
+ "INSERT INTO table1(time, c_blob) VALUES (2, X'C0200000')",
+ // Case 3: Big-endian BLOB 0x00000000 → 0.0f
+ "INSERT INTO table1(time, c_blob) VALUES (3, X'00000000')",
+ // Case 4: Big-endian BLOB 0x40490FDB → ~3.1415f
+ "INSERT INTO table1(time, c_blob) VALUES (4, X'40490E56')",
+ // Case 5: Big-endian BLOB 0x7FC00000 → NaN (special value)
+ "INSERT INTO table1(time, c_blob) VALUES (5, X'7FC00000')",
+ // Case 6: Big-endian BLOB 0x7F800000 → INF (positive infinity)
+ "INSERT INTO table1(time, c_blob) VALUES (6, X'7F800000')",
+ // Case 7: Null BLOB → null output
+ "INSERT INTO table1(time, c_blob) VALUES (7, null)",
+ // Invalid BLOB length (<4 bytes)
+ "INSERT INTO table1(time, c_blob) VALUES (8, X'3F40')",
+ // Invalid BLOB length (>4 bytes)
+ "INSERT INTO table1(time, c_blob) VALUES (9, X'3F40000000')",
+ // Invalid type test data
+ "INSERT INTO table1(time, c_int32, c_text) VALUES (10, 100, 'invalid_type')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /**
+ * Verify valid big-endian BLOB parsing: correct conversion to FLOAT (including special values)
+ */
+ @Test
+ public void testFromIEEE754_32OnValidInputs() {
+ String[] expectedHeader = {"time", "from_ieee754_32(c_blob)"};
+ String[] retArray = {
+ "1970-01-01T00:00:00.001Z,1.25,", // X'3FA00000' → 1.25f
+ "1970-01-01T00:00:00.002Z,-2.5,", // X'C0200000' → -2.5f
+ "1970-01-01T00:00:00.003Z,0.0,", // X'00000000' → 0.0f
+ "1970-01-01T00:00:00.004Z,3.1415,", // X'40490FDB' → ~3.1415f
+ "1970-01-01T00:00:00.005Z,NaN,", // X'7FC00000' → NaN
+ "1970-01-01T00:00:00.006Z,Infinity,", // X'7F800000' → Infinity
+ "1970-01-01T00:00:00.007Z,null," // Null BLOB → null
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, from_ieee754_32(c_blob) AS \"from_ieee754_32(c_blob)\" FROM table1 WHERE time <= 7",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Verify error handling for invalid inputs (wrong type/count, invalid BLOB length) */
+ @Test
+ public void testFromIEEE754_32OnInvalidInputs() {
+ // Error msg for wrong type/argument count
+ String typeCountErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function from_ieee754_32 only accepts one argument and it must be BLOB data type.";
+
+ // Test non-BLOB input (INT32)
+ tableAssertTestFail(
+ "SELECT from_ieee754_32(c_int32) FROM table1 WHERE time = 10",
+ typeCountErrorMsg,
+ DATABASE_NAME);
+
+ // Test non-BLOB input (TEXT)
+ tableAssertTestFail(
+ "SELECT from_ieee754_32(c_text) FROM table1 WHERE time = 10",
+ typeCountErrorMsg,
+ DATABASE_NAME);
+
+ // Test no arguments
+ tableAssertTestFail("SELECT from_ieee754_32() FROM table1", typeCountErrorMsg, DATABASE_NAME);
+
+ // Test multiple arguments
+ tableAssertTestFail(
+ "SELECT from_ieee754_32(c_blob, c_blob) FROM table1 WHERE time = 1",
+ typeCountErrorMsg,
+ DATABASE_NAME);
+
+ // Error msg for invalid BLOB length
+ String lengthErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'from_ieee754_32' due to an invalid input format. "
+ + "Problematic value: ";
+
+ // Test BLOB length <4 bytes
+ tableAssertTestFail(
+ "SELECT from_ieee754_32(c_blob) FROM table1 WHERE time = 8",
+ lengthErrorMsg + "0x3f40",
+ DATABASE_NAME);
+
+ // Test BLOB length >4 bytes
+ tableAssertTestFail(
+ "SELECT from_ieee754_32(c_blob) FROM table1 WHERE time = 9",
+ lengthErrorMsg + "0x3f40000000",
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromIEEE754_64FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromIEEE754_64FunctionIT.java
new file mode 100644
index 000000000000..5fcff08e8ac7
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromIEEE754_64FunctionIT.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBFromIEEE754_64FunctionIT {
+
+ private static final String DATABASE_NAME = "test_from_ieee754_64_big_endian";
+
+ // Test data: Insert big-endian IEEE 754 64-bit BLOBs to verify DOUBLE parsing
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_blob BLOB, c_int64 INT64, c_text TEXT)",
+
+ // Case 1: Big-endian BLOB 0x3FF4000000000000 → 1.25d
+ "INSERT INTO table1(time, c_blob) VALUES (1, X'3FF4000000000000')",
+ // Case 2: Big-endian BLOB 0xC004000000000000 → -2.5d
+ "INSERT INTO table1(time, c_blob) VALUES (2, X'C004000000000000')",
+ // Case 3: Big-endian BLOB 0x0000000000000000 → 0.0d
+ "INSERT INTO table1(time, c_blob) VALUES (3, X'0000000000000000')",
+ // Case 4: Big-endian BLOB 0x400921FB54442D18 → ~3.1415926535d
+ "INSERT INTO table1(time, c_blob) VALUES (4, X'400921FB54442D18')",
+ // Case 5: Big-endian BLOB 0x7FF8000000000000 → NaN (special value)
+ "INSERT INTO table1(time, c_blob) VALUES (5, X'7FF8000000000000')",
+ // Case 6: Big-endian BLOB 0x7FF0000000000000 → INF (positive infinity)
+ "INSERT INTO table1(time, c_blob) VALUES (6, X'7FF0000000000000')",
+ // Case 7: Null BLOB → null output
+ "INSERT INTO table1(time, c_blob) VALUES (7, null)",
+ // Invalid BLOB length (<8 bytes)
+ "INSERT INTO table1(time, c_blob) VALUES (8, X'3FF40000')",
+ // Invalid BLOB length (>8 bytes)
+ "INSERT INTO table1(time, c_blob) VALUES (9, X'3FF400000000000000')",
+ // Invalid type test data
+ "INSERT INTO table1(time, c_int64, c_text) VALUES (10, 1000, 'invalid_type')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /**
+ * Verify valid big-endian BLOB parsing: correct conversion to DOUBLE (including special values)
+ */
+ @Test
+ public void testFromIEEE754_64OnValidInputs() {
+ String[] expectedHeader = {"time", "from_ieee754_64(c_blob)"};
+ String[] retArray = {
+ "1970-01-01T00:00:00.001Z,1.25,", // X'3FF4000000000000' → 1.25d
+ "1970-01-01T00:00:00.002Z,-2.5,", // X'C004000000000000' → -2.5d
+ "1970-01-01T00:00:00.003Z,0.0,", // X'0000000000000000' → 0.0d
+ "1970-01-01T00:00:00.004Z,3.141592653589793,", // X'400921FB54442D18' → 3.141592653589793
+ "1970-01-01T00:00:00.005Z,NaN,", // X'7FF8000000000000' → NaN
+ "1970-01-01T00:00:00.006Z,Infinity,", // X'7FF0000000000000' → Infinity
+ "1970-01-01T00:00:00.007Z,null," // Null BLOB → null
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, from_ieee754_64(c_blob) AS \"from_ieee754_64(c_blob)\" FROM table1 WHERE time <= 7",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Verify error handling for invalid inputs (wrong type/count, invalid BLOB length) */
+ @Test
+ public void testFromIEEE754_64OnInvalidInputs() {
+ // Error msg for wrong type/argument count
+ String typeCountErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function from_ieee754_64 only accepts one argument and it must be BLOB data type.";
+
+ // Test non-BLOB input (INT64)
+ tableAssertTestFail(
+ "SELECT from_ieee754_64(c_int64) FROM table1 WHERE time = 10",
+ typeCountErrorMsg,
+ DATABASE_NAME);
+
+ // Test non-BLOB input (TEXT)
+ tableAssertTestFail(
+ "SELECT from_ieee754_64(c_text) FROM table1 WHERE time = 10",
+ typeCountErrorMsg,
+ DATABASE_NAME);
+
+ // Test no arguments
+ tableAssertTestFail("SELECT from_ieee754_64() FROM table1", typeCountErrorMsg, DATABASE_NAME);
+
+ // Test multiple arguments
+ tableAssertTestFail(
+ "SELECT from_ieee754_64(c_blob, c_blob) FROM table1 WHERE time = 1",
+ typeCountErrorMsg,
+ DATABASE_NAME);
+
+ // Error msg for invalid BLOB length
+ String lengthErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'from_ieee754_64' due to an invalid input format. "
+ + "Problematic value: ";
+
+ // Test BLOB length <8 bytes
+ tableAssertTestFail(
+ "SELECT from_ieee754_64(c_blob) FROM table1 WHERE time = 8",
+ lengthErrorMsg + "0x3ff40000",
+ DATABASE_NAME);
+
+ // Test BLOB length >8 bytes
+ tableAssertTestFail(
+ "SELECT from_ieee754_64(c_blob) FROM table1 WHERE time = 9",
+ lengthErrorMsg + "0x3ff400000000000000",
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromLittleEndian32FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromLittleEndian32FunctionIT.java
new file mode 100644
index 000000000000..4472af8880cb
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromLittleEndian32FunctionIT.java
@@ -0,0 +1,173 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBFromLittleEndian32FunctionIT {
+
+ private static final String DATABASE_NAME = "test_from_little_endian_32";
+
+ // SQL statements to set up the database and table for testing
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_blob BLOB, c_int32 INT32, c_text TEXT)",
+
+ // Case 1: BLOB for a common positive integer (16909060) in little-endian
+ "INSERT INTO table1(time, c_blob) VALUES (1, X'04030201')",
+
+ // Case 2: BLOB for a negative integer (-1) in little-endian
+ "INSERT INTO table1(time, c_blob) VALUES (2, X'FFFFFFFF')",
+
+ // Case 3: BLOB for zero in little-endian
+ "INSERT INTO table1(time, c_blob) VALUES (3, X'00000000')",
+
+ // Case 4: BLOB for the maximum INT32 value in little-endian
+ "INSERT INTO table1(time, c_blob) VALUES (4, X'FFFFFF7F')",
+
+ // Case 5: BLOB for the minimum INT32 value in little-endian
+ "INSERT INTO table1(time, c_blob) VALUES (5, X'00000080')",
+
+ // Case 6: Null BLOB input
+ "INSERT INTO table1(time, c_blob) VALUES (6, null)",
+
+ // Case 7: BLOB with invalid length (< 4 bytes) for error testing
+ "INSERT INTO table1(time, c_blob) VALUES (7, X'010203')",
+
+ // Case 8: BLOB with invalid length (> 4 bytes) for error testing
+ "INSERT INTO table1(time, c_blob) VALUES (8, X'0102030405')",
+
+ // Case 9: Data for invalid type testing
+ "INSERT INTO table1(time, c_int32, c_text) VALUES (9, 100, 'some_text')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /**
+ * Validates the from_little_endian_32() function on various valid 4-byte BLOB inputs. This test
+ * covers positive, negative, zero, min/max, and null values.
+ */
+ @Test
+ public void testFromLittleEndian32OnValidInputs() {
+ String[] expectedHeader =
+ new String[] {
+ "time", "from_little_endian_32(c_blob)",
+ };
+ String[] retArray =
+ new String[] {
+ // 1. X'04030201' -> 16909060
+ "1970-01-01T00:00:00.001Z,16909060,",
+ // 2. X'FFFFFFFF' -> -1
+ "1970-01-01T00:00:00.002Z,-1,",
+ // 3. X'00000000' -> 0
+ "1970-01-01T00:00:00.003Z,0,",
+ // 4. X'FFFFFF7F' -> 2147483647
+ "1970-01-01T00:00:00.004Z,2147483647,",
+ // 5. X'00000080' -> -2147483648
+ "1970-01-01T00:00:00.005Z,-2147483648,",
+ // 6. Null input -> null output
+ "1970-01-01T00:00:00.006Z,null,",
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, from_little_endian_32(c_blob) as \"from_little_endian_32(c_blob)\" FROM table1 WHERE time <= 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * Tests for invalid arguments passed to the from_little_endian_32() function. This includes wrong
+ * argument count, wrong data types, and wrong BLOB length.
+ */
+ @Test
+ public void testFromLittleEndian32OnInvalidInputs() {
+ // Define the expected error message for wrong argument count or type
+ String typeAndCountErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function from_little_endian_32 only accepts one argument and it must be BLOB data type.";
+
+ // Test with invalid parameter type (INT32)
+ tableAssertTestFail(
+ "SELECT from_little_endian_32(c_int32) FROM table1",
+ typeAndCountErrorMessage,
+ DATABASE_NAME);
+
+ // Test with invalid parameter type (TEXT)
+ tableAssertTestFail(
+ "SELECT from_little_endian_32(c_text) FROM table1",
+ typeAndCountErrorMessage,
+ DATABASE_NAME);
+
+ // Test with invalid parameter count (0 arguments)
+ tableAssertTestFail(
+ "SELECT from_little_endian_32() FROM table1", typeAndCountErrorMessage, DATABASE_NAME);
+
+ // Test with invalid parameter count (>1 arguments)
+ tableAssertTestFail(
+ "SELECT from_little_endian_32(c_blob, c_blob) FROM table1",
+ typeAndCountErrorMessage,
+ DATABASE_NAME);
+
+ // Define the expected error message for wrong BLOB length
+ String lengthErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'from_little_endian_32' due to an invalid input format. Problematic value: ";
+
+ // Test with BLOB length < 4
+ tableAssertTestFail(
+ "SELECT from_little_endian_32(c_blob) FROM table1 WHERE time = 7",
+ lengthErrorMessage + "0x010203",
+ DATABASE_NAME);
+
+ // Test with BLOB length > 4
+ tableAssertTestFail(
+ "SELECT from_little_endian_32(c_blob) FROM table1 WHERE time = 8",
+ lengthErrorMessage + "0x0102030405",
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromLittleEndian64FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromLittleEndian64FunctionIT.java
new file mode 100644
index 000000000000..35b209f25f93
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBFromLittleEndian64FunctionIT.java
@@ -0,0 +1,173 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBFromLittleEndian64FunctionIT {
+
+ private static final String DATABASE_NAME = "test_from_little_endian_64";
+
+ // SQL statements to set up the database and table for testing
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_blob BLOB, c_int64 INT64, c_text TEXT)",
+
+ // Case 1: BLOB for a common positive integer (72623859790382856) in little-endian
+ "INSERT INTO table1(time, c_blob) VALUES (1, X'0807060504030201')",
+
+ // Case 2: BLOB for a negative integer (-1) in little-endian
+ "INSERT INTO table1(time, c_blob) VALUES (2, X'FFFFFFFFFFFFFFFF')",
+
+ // Case 3: BLOB for zero in little-endian
+ "INSERT INTO table1(time, c_blob) VALUES (3, X'0000000000000000')",
+
+ // Case 4: BLOB for the maximum INT64 value in little-endian
+ "INSERT INTO table1(time, c_blob) VALUES (4, X'FFFFFFFFFFFFFF7F')",
+
+ // Case 5: BLOB for the minimum INT64 value in little-endian
+ "INSERT INTO table1(time, c_blob) VALUES (5, X'0000000000000080')",
+
+ // Case 6: Null BLOB input
+ "INSERT INTO table1(time, c_blob) VALUES (6, null)",
+
+ // Case 7: BLOB with invalid length (< 8 bytes) for error testing
+ "INSERT INTO table1(time, c_blob) VALUES (7, X'01020304050607')",
+
+ // Case 8: BLOB with invalid length (> 8 bytes) for error testing
+ "INSERT INTO table1(time, c_blob) VALUES (8, X'010203040506070809')",
+
+ // Case 9: Data for invalid type testing
+ "INSERT INTO table1(time, c_int64, c_text) VALUES (9, 100, 'some_text')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /**
+ * Validates the from_little_endian_64() function on various valid 8-byte BLOB inputs. This test
+ * covers positive, negative, zero, min/max, and null values.
+ */
+ @Test
+ public void testFromLittleEndian64OnValidInputs() {
+ String[] expectedHeader =
+ new String[] {
+ "time", "from_little_endian_64(c_blob)",
+ };
+ String[] retArray =
+ new String[] {
+ // 1. X'0807060504030201' -> 72623859790382856
+ "1970-01-01T00:00:00.001Z,72623859790382856,",
+ // 2. X'FFFFFFFFFFFFFFFF' -> -1
+ "1970-01-01T00:00:00.002Z,-1,",
+ // 3. X'0000000000000000' -> 0
+ "1970-01-01T00:00:00.003Z,0,",
+ // 4. X'FFFFFFFFFFFFFF7F' -> 9223372036854775807
+ "1970-01-01T00:00:00.004Z,9223372036854775807,",
+ // 5. X'0000000000000080' -> -9223372036854775808
+ "1970-01-01T00:00:00.005Z,-9223372036854775808,",
+ // 6. Null input -> null output
+ "1970-01-01T00:00:00.006Z,null,",
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, from_little_endian_64(c_blob) as \"from_little_endian_64(c_blob)\" FROM table1 WHERE time <= 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * Tests for invalid arguments passed to the from_little_endian_64() function. This includes wrong
+ * argument count, wrong data types, and wrong BLOB length.
+ */
+ @Test
+ public void testFromLittleEndian64OnInvalidInputs() {
+ // Define the expected error message for wrong argument count or type
+ String typeAndCountErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function from_little_endian_64 only accepts one argument and it must be BLOB data type.";
+
+ // Test with invalid parameter type (INT64)
+ tableAssertTestFail(
+ "SELECT from_little_endian_64(c_int64) FROM table1",
+ typeAndCountErrorMessage,
+ DATABASE_NAME);
+
+ // Test with invalid parameter type (TEXT)
+ tableAssertTestFail(
+ "SELECT from_little_endian_64(c_text) FROM table1",
+ typeAndCountErrorMessage,
+ DATABASE_NAME);
+
+ // Test with invalid parameter count (0 arguments)
+ tableAssertTestFail(
+ "SELECT from_little_endian_64() FROM table1", typeAndCountErrorMessage, DATABASE_NAME);
+
+ // Test with invalid parameter count (>1 arguments)
+ tableAssertTestFail(
+ "SELECT from_little_endian_64(c_blob, c_blob) FROM table1",
+ typeAndCountErrorMessage,
+ DATABASE_NAME);
+
+ // Define the expected error message for wrong BLOB length, as per the reference example
+ String lengthErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'from_little_endian_64' due to an invalid input format. Problematic value: ";
+
+ // Test with BLOB length < 8
+ tableAssertTestFail(
+ "SELECT from_little_endian_64(c_blob) FROM table1 WHERE time = 7",
+ lengthErrorMessage + "0x01020304050607",
+ DATABASE_NAME);
+
+ // Test with BLOB length > 8
+ tableAssertTestFail(
+ "SELECT from_little_endian_64(c_blob) FROM table1 WHERE time = 8",
+ lengthErrorMessage + "0x010203040506070809",
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBHmacMd5FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBHmacMd5FunctionIT.java
new file mode 100644
index 000000000000..6e1041de6065
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBHmacMd5FunctionIT.java
@@ -0,0 +1,211 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBHmacMd5FunctionIT {
+
+ private static final String DATABASE_NAME = "test_hmac_md5_function";
+
+ // Comprehensive data for testing the HMAC_MD5 function
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE HmacTestTable("
+ + "data_string STRING, "
+ + "data_text TEXT, "
+ + "data_blob BLOB, "
+ + "key_string STRING, "
+ + "key_text TEXT, "
+ + "key_blob BLOB, "
+ + "int_col INT32"
+ + ")",
+ // 1. Standard STRING data and STRING key
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(1, 'Hello IoTDB', 'secret_key')",
+ // 2. Standard TEXT data and TEXT key
+ "INSERT INTO HmacTestTable(time, data_text, key_text) VALUES(2, 'Another test message', 'another_key')",
+ // 3. BLOB data
+ "INSERT INTO HmacTestTable(time, data_blob) VALUES(3, X'48656C6C6F20496F544442')", // 'Hello
+ // IoTDB'
+ // 4. Unicode data and key
+ "INSERT INTO HmacTestTable(time, data_text, key_text) VALUES(4, '你好世界', '这是一个密钥')",
+ // 5. Empty string data (valid case)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(5, '', 'some_key')",
+ // 6. Empty string key (invalid case, for failure testing)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(6, 'some_data', '')",
+ // 7. Null data (valid case)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(7, null, 'some_key')",
+ // 8. Null key (valid case)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(8, 'some_data', null)",
+ // 9. Data for invalid type testing
+ "INSERT INTO HmacTestTable(time, int_col, key_string, key_blob) VALUES (9, 123, 'key_for_int', X'deadbeef')",
+ "INSERT INTO HmacTestTable(time, data_string, int_col) VALUES (10, 'data_for_int_key', 456)",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test hmac_md5 where inputs are column references. */
+ @Test
+ public void testHmacMd5WithColumnInputs() {
+ // TC-P1: STRING data, STRING key
+ String[] expectedHeader1 = {"time", "hmac_md5(data_string, key_string)"};
+ String[] retArray1 = {"1970-01-01T00:00:00.001Z,0x39cc932e8ee74450ca31422ea48858c7,"};
+ tableResultSetEqualTest(
+ "SELECT time, hmac_md5(data_string, key_string) as \"hmac_md5(data_string, key_string)\" FROM HmacTestTable WHERE time = 1",
+ expectedHeader1,
+ retArray1,
+ DATABASE_NAME);
+
+ // TC-P2: TEXT data, TEXT key
+ String[] expectedHeader2 = {"time", "hmac_md5(data_text, key_text)"};
+ String[] retArray2 = {"1970-01-01T00:00:00.002Z,0x43ddac9f669670c3554f742e4c8b6279,"};
+ tableResultSetEqualTest(
+ "SELECT time, hmac_md5(data_text, key_text) as \"hmac_md5(data_text, key_text)\" FROM HmacTestTable WHERE time = 2",
+ expectedHeader2,
+ retArray2,
+ DATABASE_NAME);
+
+ // TC-P3: Unicode TEXT data, Unicode TEXT key
+ String[] retArray3 = {"1970-01-01T00:00:00.004Z,0xc6b5824c2d9846e89a8f1d340a560df4,"};
+ tableResultSetEqualTest(
+ "SELECT time, hmac_md5(data_text, key_text) as \"hmac_md5(data_text, key_text)\" FROM HmacTestTable WHERE time = 4",
+ expectedHeader2, // Reusing header as the alias format is the same
+ retArray3,
+ DATABASE_NAME);
+ }
+
+ /** Test hmac_md5 where inputs are literals (constants). */
+ @Test
+ public void testHmacMd5WithLiteralInputs() {
+ // TC-L1: STRING literal data, STRING literal key
+ String[] expectedHeader1 = {"hmac_md5('Hello IoTDB', 'secret_key')"};
+ String[] retArray1 = {"0x39cc932e8ee74450ca31422ea48858c7,"};
+ tableResultSetEqualTest(
+ "SELECT hmac_md5('Hello IoTDB', 'secret_key') as \"hmac_md5('Hello IoTDB', 'secret_key')\" from HmacTestTable where time = 1",
+ expectedHeader1,
+ retArray1,
+ DATABASE_NAME);
+
+ // TC-L2: BLOB data (from column), STRING literal key
+ String[] expectedHeader2 = {"hmac_md5(data_blob, 'secret_key')"};
+ String[] retArray2 = {"0x39cc932e8ee74450ca31422ea48858c7,"};
+ tableResultSetEqualTest(
+ "SELECT hmac_md5(data_blob, 'secret_key') as \"hmac_md5(data_blob, 'secret_key')\" FROM HmacTestTable WHERE time = 3",
+ expectedHeader2,
+ retArray2,
+ DATABASE_NAME);
+ }
+
+ /** Test hmac_md5 on edge cases like empty data strings and NULL inputs. */
+ @Test
+ public void testHmacMd5OnEdgeCases() {
+ String[] expectedHeader = {"time", "hmac_md5(data_string, key_string)"};
+ String[] retArray = {
+ // time=5, data='', key='some_key' -> VALID
+ "1970-01-01T00:00:00.005Z,0x3668b7560bcd8a938bdb83de73d6f76b,",
+ // time=7, data=null, key='some_key' -> NULL
+ "1970-01-01T00:00:00.007Z,null,",
+ // time=8, data='some_data', key=null -> NULL
+ "1970-01-01T00:00:00.008Z,null,",
+ };
+ tableResultSetEqualTest(
+ "SELECT time, hmac_md5(data_string, key_string) as \"hmac_md5(data_string, key_string)\" FROM HmacTestTable WHERE time IN (5, 7, 8) ORDER BY time",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * Verify error handling for invalid inputs, such as wrong argument count, incorrect data types,
+ * or an empty string key.
+ */
+ @Test
+ public void testHmacMd5OnInvalidInputs() {
+ String generalErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function hmac_md5 only accepts two arguments, first argument must be TEXT, STRING, or BlOB type, second argument must be STRING OR TEXT type.";
+
+ String emptyKeyErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'hmac_md5' due to an invalid input format. the value 'some_data' corresponding to a empty key, the empty key is not allowed in HMAC operation.";
+
+ String emptyLiteralKeyErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function hmac_md5 due to an invalid input format, the empty key is not allowed in HMAC operation";
+
+ // Case 1: Wrong argument count
+ tableAssertTestFail("SELECT hmac_md5() from HmacTestTable ", generalErrorMsg, DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT hmac_md5(data_string) from HmacTestTable", generalErrorMsg, DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT hmac_md5(data_string, key_string, 'extra') from HmacTestTable",
+ generalErrorMsg,
+ DATABASE_NAME);
+
+ // Case 2: Invalid data types
+ tableAssertTestFail(
+ "SELECT hmac_md5(int_col, key_string) FROM HmacTestTable WHERE time = 9",
+ generalErrorMsg,
+ DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT hmac_md5(data_string, int_col) FROM HmacTestTable WHERE time = 10",
+ generalErrorMsg,
+ DATABASE_NAME);
+
+ // Case 3: CRITICAL - Empty string key is invalid
+ tableAssertTestFail(
+ "SELECT hmac_md5(data_string, key_string) FROM HmacTestTable WHERE time = 6",
+ emptyKeyErrorMsg,
+ DATABASE_NAME);
+
+ // Also test with an empty literal key
+ tableAssertTestFail(
+ "SELECT hmac_md5('some_data', '') FROM HmacTestTable",
+ emptyLiteralKeyErrorMsg,
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBHmacSha1FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBHmacSha1FunctionIT.java
new file mode 100644
index 000000000000..390229969c01
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBHmacSha1FunctionIT.java
@@ -0,0 +1,211 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBHmacSha1FunctionIT {
+
+ private static final String DATABASE_NAME = "test_hmac_sha1_function";
+
+ // Comprehensive data for testing the HMAC_SHA1 function
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE HmacTestTable("
+ + "data_string STRING, "
+ + "data_text TEXT, "
+ + "data_blob BLOB, "
+ + "key_string STRING, "
+ + "key_text TEXT, "
+ + "key_blob BLOB, "
+ + "int_col INT32"
+ + ")",
+ // 1. Standard STRING data and STRING key
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(1, 'Hello IoTDB', 'secret_key')",
+ // 2. Standard TEXT data and TEXT key
+ "INSERT INTO HmacTestTable(time, data_text, key_text) VALUES(2, 'Another test message', 'another_key')",
+ // 3. BLOB data
+ "INSERT INTO HmacTestTable(time, data_blob) VALUES(3, X'48656C6C6F20496F544442')", // 'Hello
+ // IoTDB'
+ // 4. Unicode data and key
+ "INSERT INTO HmacTestTable(time, data_text, key_text) VALUES(4, '你好世界', '这是一个密钥')",
+ // 5. Empty string data (valid case)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(5, '', 'some_key')",
+ // 6. Empty string key (invalid case, for failure testing)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(6, 'some_data', '')",
+ // 7. Null data (valid case)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(7, null, 'some_key')",
+ // 8. Null key (valid case)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(8, 'some_data', null)",
+ // 9. Data for invalid type testing
+ "INSERT INTO HmacTestTable(time, int_col, key_string, key_blob) VALUES (9, 123, 'key_for_int', X'deadbeef')",
+ "INSERT INTO HmacTestTable(time, data_string, int_col) VALUES (10, 'data_for_int_key', 456)",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test hmac_sha1 where inputs are column references. */
+ @Test
+ public void testHmacSha1WithColumnInputs() {
+ // TC-P1: STRING data, STRING key
+ String[] expectedHeader1 = {"time", "hmac_sha1(data_string, key_string)"};
+ String[] retArray1 = {"1970-01-01T00:00:00.001Z,0xd307d46d57a553db1b59c28d2d683a155a16eca3,"};
+ tableResultSetEqualTest(
+ "SELECT time, hmac_sha1(data_string, key_string) as \"hmac_sha1(data_string, key_string)\" FROM HmacTestTable WHERE time = 1",
+ expectedHeader1,
+ retArray1,
+ DATABASE_NAME);
+
+ // TC-P2: TEXT data, TEXT key
+ String[] expectedHeader2 = {"time", "hmac_sha1(data_text, key_text)"};
+ String[] retArray2 = {"1970-01-01T00:00:00.002Z,0x2cdd76b9e5784223528c892bc28ffb4c3175438e,"};
+ tableResultSetEqualTest(
+ "SELECT time, hmac_sha1(data_text, key_text) as \"hmac_sha1(data_text, key_text)\" FROM HmacTestTable WHERE time = 2",
+ expectedHeader2,
+ retArray2,
+ DATABASE_NAME);
+
+ // TC-P3: Unicode TEXT data, Unicode TEXT key
+ String[] retArray3 = {"1970-01-01T00:00:00.004Z,0x0660d7047fa2e1a09d3b285e308439e285d2fd3d,"};
+ tableResultSetEqualTest(
+ "SELECT time, hmac_sha1(data_text, key_text) as \"hmac_sha1(data_text, key_text)\" FROM HmacTestTable WHERE time = 4",
+ expectedHeader2, // Reusing header as the alias format is the same
+ retArray3,
+ DATABASE_NAME);
+ }
+
+ /** Test hmac_sha1 where inputs are literals (constants). */
+ @Test
+ public void testHmacSha1WithLiteralInputs() {
+ // TC-L1: STRING literal data, STRING literal key
+ String[] expectedHeader1 = {"hmac_sha1('Hello IoTDB', 'secret_key')"};
+ String[] retArray1 = {"0xd307d46d57a553db1b59c28d2d683a155a16eca3,"};
+ tableResultSetEqualTest(
+ "SELECT hmac_sha1('Hello IoTDB', 'secret_key') as \"hmac_sha1('Hello IoTDB', 'secret_key')\" from HmacTestTable where time = 1",
+ expectedHeader1,
+ retArray1,
+ DATABASE_NAME);
+
+ // TC-L2: BLOB data (from column), STRING literal key
+ String[] expectedHeader2 = {"hmac_sha1(data_blob, 'secret_key')"};
+ String[] retArray2 = {"0xd307d46d57a553db1b59c28d2d683a155a16eca3,"};
+ tableResultSetEqualTest(
+ "SELECT hmac_sha1(data_blob, 'secret_key') as \"hmac_sha1(data_blob, 'secret_key')\" FROM HmacTestTable WHERE time = 3",
+ expectedHeader2,
+ retArray2,
+ DATABASE_NAME);
+ }
+
+ /** Test hmac_sha1 on edge cases like empty data strings and NULL inputs. */
+ @Test
+ public void testHmacSha1OnEdgeCases() {
+ String[] expectedHeader = {"time", "hmac_sha1(data_string, key_string)"};
+ String[] retArray = {
+ // time=5, data='', key='some_key' -> VALID
+ "1970-01-01T00:00:00.005Z,0xe2b59121f73f8695441d7862f191bd58602d797a,",
+ // time=7, data=null, key='some_key' -> NULL
+ "1970-01-01T00:00:00.007Z,null,",
+ // time=8, data='some_data', key=null -> NULL
+ "1970-01-01T00:00:00.008Z,null,",
+ };
+ tableResultSetEqualTest(
+ "SELECT time, hmac_sha1(data_string, key_string) as \"hmac_sha1(data_string, key_string)\" FROM HmacTestTable WHERE time IN (5, 7, 8) ORDER BY time",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * Verify error handling for invalid inputs, such as wrong argument count, incorrect data types,
+ * or an empty string key.
+ */
+ @Test
+ public void testHmacSha1OnInvalidInputs() {
+ String generalErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function hmac_sha1 only accepts two arguments, first argument must be TEXT, STRING, or BlOB type, second argument must be STRING OR TEXT type.";
+
+ String emptyKeyErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'hmac_sha1' due to an invalid input format. the value 'some_data' corresponding to a empty key, the empty key is not allowed in HMAC operation.";
+
+ String emptyLiteralKeyErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function hmac_sha1 due to an invalid input format, the empty key is not allowed in HMAC operation";
+
+ // Case 1: Wrong argument count
+ tableAssertTestFail("SELECT hmac_sha1() from HmacTestTable ", generalErrorMsg, DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT hmac_sha1(data_string) from HmacTestTable", generalErrorMsg, DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT hmac_sha1(data_string, key_string, 'extra') from HmacTestTable",
+ generalErrorMsg,
+ DATABASE_NAME);
+
+ // Case 2: Invalid data types
+ tableAssertTestFail(
+ "SELECT hmac_sha1(int_col, key_string) FROM HmacTestTable WHERE time = 9",
+ generalErrorMsg,
+ DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT hmac_sha1(data_string, int_col) FROM HmacTestTable WHERE time = 10",
+ generalErrorMsg,
+ DATABASE_NAME);
+
+ // Case 3: CRITICAL - Empty string key is invalid
+ tableAssertTestFail(
+ "SELECT hmac_sha1(data_string, key_string) FROM HmacTestTable WHERE time = 6",
+ emptyKeyErrorMsg,
+ DATABASE_NAME);
+
+ // Also test with an empty literal key
+ tableAssertTestFail(
+ "SELECT hmac_sha1('some_data', '') FROM HmacTestTable",
+ emptyLiteralKeyErrorMsg,
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBHmacSha256FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBHmacSha256FunctionIT.java
new file mode 100644
index 000000000000..33c851ee7607
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBHmacSha256FunctionIT.java
@@ -0,0 +1,217 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBHmacSha256FunctionIT {
+
+ private static final String DATABASE_NAME = "test_hmac_sha256_function";
+
+ // Comprehensive data for testing the HMAC_SHA256 function
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE HmacTestTable("
+ + "data_string STRING, "
+ + "data_text TEXT, "
+ + "data_blob BLOB, "
+ + "key_string STRING, "
+ + "key_text TEXT, "
+ + "key_blob BLOB, "
+ + "int_col INT32"
+ + ")",
+ // 1. Standard STRING data and STRING key
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(1, 'Hello IoTDB', 'secret_key')",
+ // 2. Standard TEXT data and TEXT key
+ "INSERT INTO HmacTestTable(time, data_text, key_text) VALUES(2, 'Another test message', 'another_key')",
+ // 3. BLOB data
+ "INSERT INTO HmacTestTable(time, data_blob) VALUES(3, X'48656C6C6F20496F544442')", // 'Hello
+ // IoTDB'
+ // 4. Unicode data and key
+ "INSERT INTO HmacTestTable(time, data_text, key_text) VALUES(4, '你好世界', '这是一个密钥')",
+ // 5. Empty string data (valid case)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(5, '', 'some_key')",
+ // 6. Empty string key (invalid case, for failure testing)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(6, 'some_data', '')",
+ // 7. Null data (valid case)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(7, null, 'some_key')",
+ // 8. Null key (valid case)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(8, 'some_data', null)",
+ // 9. Data for invalid type testing
+ "INSERT INTO HmacTestTable(time, int_col, key_string, key_blob) VALUES (9, 123, 'key_for_int', X'deadbeef')",
+ "INSERT INTO HmacTestTable(time, data_string, int_col) VALUES (10, 'data_for_int_key', 456)",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test hmac_sha256 where inputs are column references. */
+ @Test
+ public void testHmacSha256WithColumnInputs() {
+ // TC-P1: STRING data, STRING key
+ String[] expectedHeader1 = {"time", "hmac_sha256(data_string, key_string)"};
+ String[] retArray1 = {
+ "1970-01-01T00:00:00.001Z,0x28c30efa3fd5c6a086dfb3d69a2c993a4febdb98143ed6fa337c80d74616b552,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, hmac_sha256(data_string, key_string) as \"hmac_sha256(data_string, key_string)\" FROM HmacTestTable WHERE time = 1",
+ expectedHeader1,
+ retArray1,
+ DATABASE_NAME);
+
+ // TC-P2: TEXT data, TEXT key
+ String[] expectedHeader2 = {"time", "hmac_sha256(data_text, key_text)"};
+ String[] retArray2 = {
+ "1970-01-01T00:00:00.002Z,0xbcd034e2fcd3c2817d22aead831192c2b3510a4ddb8ff426f1b046019668fc0c,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, hmac_sha256(data_text, key_text) as \"hmac_sha256(data_text, key_text)\" FROM HmacTestTable WHERE time = 2",
+ expectedHeader2,
+ retArray2,
+ DATABASE_NAME);
+
+ // TC-P3: Unicode TEXT data, Unicode TEXT key
+ String[] retArray3 = {
+ "1970-01-01T00:00:00.004Z,0x97bcb4378afbbe9b7aafe895152d9a2e7e29f32a294aea397f691d2990d81702,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, hmac_sha256(data_text, key_text) as \"hmac_sha256(data_text, key_text)\" FROM HmacTestTable WHERE time = 4",
+ expectedHeader2, // Reusing header as the alias format is the same
+ retArray3,
+ DATABASE_NAME);
+ }
+
+ /** Test hmac_sha256 where inputs are literals (constants). */
+ @Test
+ public void testHmacSha256WithLiteralInputs() {
+ // TC-L1: STRING literal data, STRING literal key
+ String[] expectedHeader1 = {"hmac_sha256('Hello IoTDB', 'secret_key')"};
+ String[] retArray1 = {"0x28c30efa3fd5c6a086dfb3d69a2c993a4febdb98143ed6fa337c80d74616b552,"};
+ tableResultSetEqualTest(
+ "SELECT hmac_sha256('Hello IoTDB', 'secret_key') as \"hmac_sha256('Hello IoTDB', 'secret_key')\" from HmacTestTable where time = 1",
+ expectedHeader1,
+ retArray1,
+ DATABASE_NAME);
+
+ // TC-L2: BLOB data (from column), STRING literal key
+ String[] expectedHeader2 = {"hmac_sha256(data_blob, 'secret_key')"};
+ String[] retArray2 = {"0x28c30efa3fd5c6a086dfb3d69a2c993a4febdb98143ed6fa337c80d74616b552,"};
+ tableResultSetEqualTest(
+ "SELECT hmac_sha256(data_blob, 'secret_key') as \"hmac_sha256(data_blob, 'secret_key')\" FROM HmacTestTable WHERE time = 3",
+ expectedHeader2,
+ retArray2,
+ DATABASE_NAME);
+ }
+
+ /** Test hmac_sha256 on edge cases like empty data strings and NULL inputs. */
+ @Test
+ public void testHmacSha256OnEdgeCases() {
+ String[] expectedHeader = {"time", "hmac_sha256(data_string, key_string)"};
+ String[] retArray = {
+ // time=5, data='', key='some_key' -> VALID
+ "1970-01-01T00:00:00.005Z,0xf0c70ec541a25501f5bc61da93d421490cc1278fb9c97f00d704b16e1501c71f,",
+ // time=7, data=null, key='some_key' -> NULL
+ "1970-01-01T00:00:00.007Z,null,",
+ // time=8, data='some_data', key=null -> NULL
+ "1970-01-01T00:00:00.008Z,null,",
+ };
+ tableResultSetEqualTest(
+ "SELECT time, hmac_sha256(data_string, key_string) as \"hmac_sha256(data_string, key_string)\" FROM HmacTestTable WHERE time IN (5, 7, 8) ORDER BY time",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * Verify error handling for invalid inputs, such as wrong argument count, incorrect data types,
+ * or an empty string key.
+ */
+ @Test
+ public void testHmacSha256OnInvalidInputs() {
+ String generalErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function hmac_sha256 only accepts two arguments, first argument must be TEXT, STRING, or BlOB type, second argument must be STRING OR TEXT type.";
+
+ String emptyKeyErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'hmac_sha256' due to an invalid input format. the value 'some_data' corresponding to a empty key, the empty key is not allowed in HMAC operation.";
+
+ String emptyLiteralKeyErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function hmac_sha256 due to an invalid input format, the empty key is not allowed in HMAC operation";
+
+ // Case 1: Wrong argument count
+ tableAssertTestFail("SELECT hmac_sha256() from HmacTestTable ", generalErrorMsg, DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT hmac_sha256(data_string) from HmacTestTable", generalErrorMsg, DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT hmac_sha256(data_string, key_string, 'extra') from HmacTestTable",
+ generalErrorMsg,
+ DATABASE_NAME);
+
+ // Case 2: Invalid data types
+ tableAssertTestFail(
+ "SELECT hmac_sha256(int_col, key_string) FROM HmacTestTable WHERE time = 9",
+ generalErrorMsg,
+ DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT hmac_sha256(data_string, int_col) FROM HmacTestTable WHERE time = 10",
+ generalErrorMsg,
+ DATABASE_NAME);
+
+ // Case 3: CRITICAL - Empty string key is invalid
+ tableAssertTestFail(
+ "SELECT hmac_sha256(data_string, key_string) FROM HmacTestTable WHERE time = 6",
+ emptyKeyErrorMsg,
+ DATABASE_NAME);
+
+ // Also test with an empty literal key
+ tableAssertTestFail(
+ "SELECT hmac_sha256('some_data', '') FROM HmacTestTable",
+ emptyLiteralKeyErrorMsg,
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBHmacSha512FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBHmacSha512FunctionIT.java
new file mode 100644
index 000000000000..71d9e046e9b4
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBHmacSha512FunctionIT.java
@@ -0,0 +1,221 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBHmacSha512FunctionIT {
+
+ private static final String DATABASE_NAME = "test_hmac_sha512_function";
+
+ // Comprehensive data for testing the HMAC_SHA512 function
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE HmacTestTable("
+ + "data_string STRING, "
+ + "data_text TEXT, "
+ + "data_blob BLOB, "
+ + "key_string STRING, "
+ + "key_text TEXT, "
+ + "key_blob BLOB, "
+ + "int_col INT32"
+ + ")",
+ // 1. Standard STRING data and STRING key
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(1, 'Hello IoTDB', 'secret_key')",
+ // 2. Standard TEXT data and TEXT key
+ "INSERT INTO HmacTestTable(time, data_text, key_text) VALUES(2, 'Another test message', 'another_key')",
+ // 3. BLOB data
+ "INSERT INTO HmacTestTable(time, data_blob) VALUES(3, X'48656C6C6F20496F544442')", // 'Hello
+ // IoTDB'
+ // 4. Unicode data and key
+ "INSERT INTO HmacTestTable(time, data_text, key_text) VALUES(4, '你好世界', '这是一个密钥')",
+ // 5. Empty string data (valid case)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(5, '', 'some_key')",
+ // 6. Empty string key (invalid case, for failure testing)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(6, 'some_data', '')",
+ // 7. Null data (valid case)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(7, null, 'some_key')",
+ // 8. Null key (valid case)
+ "INSERT INTO HmacTestTable(time, data_string, key_string) VALUES(8, 'some_data', null)",
+ // 9. Data for invalid type testing
+ "INSERT INTO HmacTestTable(time, int_col, key_string, key_blob) VALUES (9, 123, 'key_for_int', X'deadbeef')",
+ "INSERT INTO HmacTestTable(time, data_string, int_col) VALUES (10, 'data_for_int_key', 456)",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test hmac_sha512 where inputs are column references. */
+ @Test
+ public void testHmacSha512WithColumnInputs() {
+ // TC-P1: STRING data, STRING key
+ String[] expectedHeader1 = {"time", "hmac_sha512(data_string, key_string)"};
+ String[] retArray1 = {
+ "1970-01-01T00:00:00.001Z,0xc8cf98e50eb168d0a5de5daeb3020b1bc63b1602884f1291b4a6ee42bee4a52c83bb423e8d024e501c5c285af5925056b9f3d1a07546b54755b31e03295ffd04,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, hmac_sha512(data_string, key_string) as \"hmac_sha512(data_string, key_string)\" FROM HmacTestTable WHERE time = 1",
+ expectedHeader1,
+ retArray1,
+ DATABASE_NAME);
+
+ // TC-P2: TEXT data, TEXT key
+ String[] expectedHeader2 = {"time", "hmac_sha512(data_text, key_text)"};
+ String[] retArray2 = {
+ "1970-01-01T00:00:00.002Z,0x10f2cc84ee40f95a91711b6ed2f1c7c3c7461c26ea29c80b2727ada19f3a77593c1e8b6381416a37ef912be44ac608693ec4005b3b1f89bef455c1c597ed886c,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, hmac_sha512(data_text, key_text) as \"hmac_sha512(data_text, key_text)\" FROM HmacTestTable WHERE time = 2",
+ expectedHeader2,
+ retArray2,
+ DATABASE_NAME);
+
+ // TC-P3: Unicode TEXT data, Unicode TEXT key
+ String[] retArray3 = {
+ "1970-01-01T00:00:00.004Z,0x99e703ffa1c4cc600b72bd1bbe0a53e2d8012d06e22cbb5cb2de4bba65354e1f8a4be34c49509677098953a9b6cb5bc4b56eff5e9093f88e5247315f32e40d77,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, hmac_sha512(data_text, key_text) as \"hmac_sha512(data_text, key_text)\" FROM HmacTestTable WHERE time = 4",
+ expectedHeader2, // Reusing header as the alias format is the same
+ retArray3,
+ DATABASE_NAME);
+ }
+
+ /** Test hmac_sha512 where inputs are literals (constants). */
+ @Test
+ public void testHmacSha512WithLiteralInputs() {
+ // TC-L1: STRING literal data, STRING literal key
+ String[] expectedHeader1 = {"hmac_sha512('Hello IoTDB', 'secret_key')"};
+ String[] retArray1 = {
+ "0xc8cf98e50eb168d0a5de5daeb3020b1bc63b1602884f1291b4a6ee42bee4a52c83bb423e8d024e501c5c285af5925056b9f3d1a07546b54755b31e03295ffd04,"
+ };
+ tableResultSetEqualTest(
+ "SELECT hmac_sha512('Hello IoTDB', 'secret_key') as \"hmac_sha512('Hello IoTDB', 'secret_key')\" from HmacTestTable where time = 1",
+ expectedHeader1,
+ retArray1,
+ DATABASE_NAME);
+
+ // TC-L2: BLOB data (from column), STRING literal key
+ String[] expectedHeader2 = {"hmac_sha512(data_blob, 'secret_key')"};
+ String[] retArray2 = {
+ "0xc8cf98e50eb168d0a5de5daeb3020b1bc63b1602884f1291b4a6ee42bee4a52c83bb423e8d024e501c5c285af5925056b9f3d1a07546b54755b31e03295ffd04,"
+ };
+ tableResultSetEqualTest(
+ "SELECT hmac_sha512(data_blob, 'secret_key') as \"hmac_sha512(data_blob, 'secret_key')\" FROM HmacTestTable WHERE time = 3",
+ expectedHeader2,
+ retArray2,
+ DATABASE_NAME);
+ }
+
+ /** Test hmac_sha512 on edge cases like empty data strings and NULL inputs. */
+ @Test
+ public void testHmacSha512OnEdgeCases() {
+ String[] expectedHeader = {"time", "hmac_sha512(data_string, key_string)"};
+ String[] retArray = {
+ // time=5, data='', key='some_key' -> VALID
+ "1970-01-01T00:00:00.005Z,0xbde90d601f55fc6ddb6faa70f6f20af1a639fa017f4e97db303bd7e70cc6d82797d98b3e38306667e3d00b4909eb3deb135185db6ac2fd5c005914729e172bf4,",
+ // time=7, data=null, key='some_key' -> NULL
+ "1970-01-01T00:00:00.007Z,null,",
+ // time=8, data='some_data', key=null -> NULL
+ "1970-01-01T00:00:00.008Z,null,",
+ };
+ tableResultSetEqualTest(
+ "SELECT time, hmac_sha512(data_string, key_string) as \"hmac_sha512(data_string, key_string)\" FROM HmacTestTable WHERE time IN (5, 7, 8) ORDER BY time",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * Verify error handling for invalid inputs, such as wrong argument count, incorrect data types,
+ * or an empty string key.
+ */
+ @Test
+ public void testHmacSha512OnInvalidInputs() {
+ String generalErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function hmac_sha512 only accepts two arguments, first argument must be TEXT, STRING, or BlOB type, second argument must be STRING OR TEXT type.";
+
+ String emptyKeyErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'hmac_sha512' due to an invalid input format. the value 'some_data' corresponding to a empty key, the empty key is not allowed in HMAC operation.";
+
+ String emptyLiteralKeyErrorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function hmac_sha512 due to an invalid input format, the empty key is not allowed in HMAC operation";
+
+ // Case 1: Wrong argument count
+ tableAssertTestFail("SELECT hmac_sha512() from HmacTestTable ", generalErrorMsg, DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT hmac_sha512(data_string) from HmacTestTable", generalErrorMsg, DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT hmac_sha512(data_string, key_string, 'extra') from HmacTestTable",
+ generalErrorMsg,
+ DATABASE_NAME);
+
+ // Case 2: Invalid data types
+ tableAssertTestFail(
+ "SELECT hmac_sha512(int_col, key_string) FROM HmacTestTable WHERE time = 9",
+ generalErrorMsg,
+ DATABASE_NAME);
+ tableAssertTestFail(
+ "SELECT hmac_sha512(data_string, int_col) FROM HmacTestTable WHERE time = 10",
+ generalErrorMsg,
+ DATABASE_NAME);
+
+ // Case 3: CRITICAL - Empty string key is invalid
+ tableAssertTestFail(
+ "SELECT hmac_sha512(data_string, key_string) FROM HmacTestTable WHERE time = 6",
+ emptyKeyErrorMsg,
+ DATABASE_NAME);
+
+ // Also test with an empty literal key
+ tableAssertTestFail(
+ "SELECT hmac_sha512('some_data', '') FROM HmacTestTable",
+ emptyLiteralKeyErrorMsg,
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBLpadFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBLpadFunctionIT.java
new file mode 100644
index 000000000000..5cfce4ba8921
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBLpadFunctionIT.java
@@ -0,0 +1,230 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBLpadFunctionIT {
+
+ private static final String DATABASE_NAME = "test_lpad_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE t1("
+ + "id INT32 FIELD, "
+ + "c_blob_data BLOB FIELD, "
+ + "c_blob_pad BLOB FIELD, "
+ + "c_text_data TEXT FIELD, "
+ + "c_int_data INT32 FIELD, "
+ + "c_int_size INT32 FIELD)",
+
+ // 1. Base data for padding, also has non-blob types for error testing
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad, c_text_data, c_int_data) VALUES (1, 1, x'AABB', x'00', 'text', 123)",
+ // 2. Data for multi-byte and truncated padding
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad) VALUES (2, 2, x'FF', x'123456')",
+ // 3. Data for truncation
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad) VALUES (3, 3, x'0102030405060708', x'FF')",
+ // 4. Data for equal length test
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad) VALUES (4, 4, x'ABCDEF', x'00')",
+ // 5. Data for empty source blob test
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad) VALUES (5, 5, x'', x'AB')",
+ // 6. Row with NULL blob_data
+ "INSERT INTO t1(time, id, c_blob_pad, c_int_size) VALUES (6, 6, x'00', 5)",
+ // 7. Row with NULL blob_pad
+ "INSERT INTO t1(time, id, c_blob_data, c_int_size) VALUES (7, 7, x'AA', 5)",
+ // 8. Row with NULL c_int_size
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad) VALUES (8, 8, x'AA', x'00')",
+ // 9. Row with invalid (empty) pad data
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad) VALUES (9, 9, x'AA', x'')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test cases where the source blob's length is less than the target size, requiring padding. */
+ @Test
+ public void testPaddingCases() {
+ String[] expectedHeader = new String[] {"time", "lpad"};
+
+ // Test simple padding: LPAD(x'AABB', 5, x'00') -> x'000000aabb'
+ tableResultSetEqualTest(
+ "SELECT time, LPAD(c_blob_data, 5, c_blob_pad) as \"lpad\" FROM t1 WHERE id = 1",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.001Z,0x000000aabb,"},
+ DATABASE_NAME);
+
+ // Test full repetition of pad data: LPAD(x'FF', 7, x'123456') -> x'123456123456ff'
+ tableResultSetEqualTest(
+ "SELECT time, LPAD(c_blob_data, 7, c_blob_pad) as \"lpad\" FROM t1 WHERE id = 2",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.002Z,0x123456123456ff,"},
+ DATABASE_NAME);
+
+ // Test truncated repetition of pad data: LPAD(x'FF', 6, x'123456') -> x'1234561234ff'
+ tableResultSetEqualTest(
+ "SELECT time, LPAD(c_blob_data, 6, c_blob_pad) as \"lpad\" FROM t1 WHERE id = 2",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.002Z,0x1234561234ff,"},
+ DATABASE_NAME);
+
+ // Test padding an empty blob: LPAD(x'', 4, x'AB') -> x'abababab'
+ tableResultSetEqualTest(
+ "SELECT time, LPAD(c_blob_data, 4, c_blob_pad) as \"lpad\" FROM t1 WHERE id = 5",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.005Z,0xabababab,"},
+ DATABASE_NAME);
+ }
+
+ /**
+ * Test cases where the source blob's length is greater than the target size, requiring
+ * truncation.
+ */
+ @Test
+ public void testTruncationCases() {
+ String[] expectedHeader = new String[] {"time", "lpad"};
+
+ // Test standard truncation: LPAD(x'0102030405060708', 4, x'FF') -> x'01020304'
+ tableResultSetEqualTest(
+ "SELECT time, LPAD(c_blob_data, 4, c_blob_pad) as \"lpad\" FROM t1 WHERE id = 3",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.003Z,0x01020304,"},
+ DATABASE_NAME);
+
+ // Test truncation to zero length: LPAD(x'AABB', 0, x'00') -> x''
+ tableResultSetEqualTest(
+ "SELECT time, LPAD(c_blob_data, 0, c_blob_pad) as \"lpad\" FROM t1 WHERE id = 1",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.001Z,0x,"},
+ DATABASE_NAME);
+ }
+
+ /** Test the case where the source blob's length is equal to the target size. */
+ @Test
+ public void testEqualLengthCase() {
+ String[] expectedHeader = new String[] {"time", "lpad"};
+ String[] retArray =
+ new String[] {
+ // LPAD(x'ABCDEF', 3, x'00') -> x'abcdef' (no change)
+ "1970-01-01T00:00:00.004Z,0xabcdef,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, LPAD(c_blob_data, 3, c_blob_pad) as \"lpad\" FROM t1 where id = 4",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test that if any argument is NULL from the table, the result is NULL. */
+ @Test
+ public void testNullInputCases() {
+ String[] expectedHeader = new String[] {"time", "lpad_result"};
+
+ // Case 1: 'data' argument is NULL. Read from row where id=6.
+ tableResultSetEqualTest(
+ "SELECT time, LPAD(c_blob_data, c_int_size, c_blob_pad) as \"lpad_result\" FROM t1 WHERE id = 6",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.006Z,null,"},
+ DATABASE_NAME);
+
+ // Case 2: 'paddata' argument is NULL. Read from row where id=7.
+ tableResultSetEqualTest(
+ "SELECT time, LPAD(c_blob_data, c_int_size, c_blob_pad) as \"lpad_result\" FROM t1 WHERE id = 7",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.007Z,null,"},
+ DATABASE_NAME);
+
+ // Case 3: 'size' argument is NULL. Read from row where id=8.
+ tableResultSetEqualTest(
+ "SELECT time, LPAD(c_blob_data, c_int_size, c_blob_pad) as \"lpad_result\" FROM t1 WHERE id = 8",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.008Z,null,"},
+ DATABASE_NAME);
+ }
+
+ /** Test invalid parameter values that should cause the function to fail. */
+ @Test
+ public void testInvalidParameters() {
+ // Test with a negative size
+ tableAssertTestFail(
+ "SELECT LPAD(c_blob_data, -1, c_blob_pad) FROM t1 WHERE id = 1",
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'Lpad' due to the value 0xaabb corresponding to a invalid target size, the allowed range is [0, 2147483647].",
+ DATABASE_NAME);
+
+ // Test with an empty paddata blob
+ tableAssertTestFail(
+ "SELECT LPAD(c_blob_data, 5, c_blob_pad) FROM t1 WHERE id = 9",
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'Lpad' due the value 0xaa corresponding to a empty padding string.",
+ DATABASE_NAME);
+ }
+
+ /** Test argument type validation for the LPAD function. */
+ @Test
+ public void testInvalidTypes() {
+ String wrongTypeErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function lpad only accepts three arguments, first argument must be BlOB type, "
+ + "second argument must be int32 or int64 type, third argument must be BLOB type.";
+
+ // First argument is not a BLOB
+ tableAssertTestFail(
+ "SELECT LPAD(c_text_data, 5, c_blob_pad) FROM t1 WHERE id = 1",
+ wrongTypeErrorMessage,
+ DATABASE_NAME);
+
+ // Second argument is not an INTEGER
+ tableAssertTestFail(
+ "SELECT LPAD(c_blob_data, '5', c_blob_pad) FROM t1 WHERE id = 1",
+ wrongTypeErrorMessage,
+ DATABASE_NAME);
+
+ // Third argument is not a BLOB
+ tableAssertTestFail(
+ "SELECT LPAD(c_blob_data, 5, c_int_data) FROM t1 WHERE id = 1",
+ wrongTypeErrorMessage,
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBMd5ColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBMd5ColumnFunctionIT.java
new file mode 100644
index 000000000000..83c0889825eb
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBMd5ColumnFunctionIT.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBMd5ColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_md5_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_blob BLOB FIELD, c_int INT32 FIELD)",
+
+ // Test with a simple ASCII string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'hello', 'hello')",
+ // Test with Chinese characters (UTF-8)
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '你好', '你好')",
+ // Test with an empty string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // Test with null values, which should result in a null hash
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // Test with a string containing special characters and spaces
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'Hello, World!', 'Hello, World!')",
+ // Test with blob data (hex representation of 'test')
+ "INSERT INTO table1(time, c_blob) VALUES (6, x'74657374')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test the md5() function on TEXT and STRING data types with various inputs. */
+ @Test
+ public void testMd5OnTextAndString() {
+ String[] expectedHeader = new String[] {"time", "md5(c_text)", "md5(c_string)"};
+ String[] retArray =
+ new String[] {
+ // md5('hello')
+ "1970-01-01T00:00:00.001Z,0x5d41402abc4b2a76b9719d911017c592,0x5d41402abc4b2a76b9719d911017c592,",
+ // md5('你好')
+ "1970-01-01T00:00:00.002Z,0x7eca689f0d3389d9dea66ae112e5cfd7,0x7eca689f0d3389d9dea66ae112e5cfd7,",
+ // md5('')
+ "1970-01-01T00:00:00.003Z,0xd41d8cd98f00b204e9800998ecf8427e,0xd41d8cd98f00b204e9800998ecf8427e,",
+ // md5(null)
+ "1970-01-01T00:00:00.004Z,null,null,",
+ // md5('Hello, World!')
+ "1970-01-01T00:00:00.005Z,0x65a8e27d8879283831b664bd8b7f0ad4,0x65a8e27d8879283831b664bd8b7f0ad4,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, md5(c_text) as \"md5(c_text)\", md5(c_string) as \"md5(c_string)\" FROM table1 where time < 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test the md5() function on the BLOB data type. */
+ @Test
+ public void testMd5OnBlob() {
+ String[] expectedHeader = new String[] {"time", "md5(c_blob)"};
+ String[] retArray =
+ new String[] {
+ // md5(x'74657374')
+ "1970-01-01T00:00:00.006Z,0x098f6bcd4621d373cade4e832627b4f6,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, md5(c_blob) as \"md5(c_blob)\" FROM table1 where time = 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test that the md5() function fails when provided with invalid arguments. */
+ @Test
+ public void testMd5FunctionOnInvalidInputs() {
+ // Construct the expected error message for semantic errors
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function md5 only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // Test with an invalid data type (INT32)
+ tableAssertTestFail("SELECT md5(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with too many arguments
+ tableAssertTestFail(
+ "SELECT md5(c_text, 'another_arg') FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail("SELECT md5() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBMurmur3ColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBMurmur3ColumnFunctionIT.java
new file mode 100644
index 000000000000..d72b291c7969
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBMurmur3ColumnFunctionIT.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBMurmur3ColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_murmur3_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_blob BLOB FIELD, c_int INT32 FIELD)",
+
+ // Test with a simple ASCII string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'hello', 'hello')",
+ // Test with Chinese characters (UTF-8)
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '你好', '你好')",
+ // Test with an empty string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // Test with null values, which should result in a null hash
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // Test with a string containing special characters and spaces
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'Hello, World!', 'Hello, World!')",
+ // Test with blob data (hex representation of 'test')
+ "INSERT INTO table1(time, c_blob) VALUES (6, x'74657374')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test the murmur3() function on TEXT and STRING data types with various inputs. */
+ @Test
+ public void testMurmur3OnTextAndString() {
+ String[] expectedHeader = new String[] {"time", "murmur3(c_text)", "murmur3(c_string)"};
+ String[] retArray =
+ new String[] {
+ // murmur3('hello') -> 5852166b
+ "1970-01-01T00:00:00.001Z,0x029bbd41b3a7d8cb191dae486a901e5b,0x029bbd41b3a7d8cb191dae486a901e5b,",
+ // murmur3('你好') -> 323a9688
+ "1970-01-01T00:00:00.002Z,0x2089985d4eb4023c3022351dc70b520c,0x2089985d4eb4023c3022351dc70b520c,",
+ // murmur3('') -> 00000000
+ "1970-01-01T00:00:00.003Z,0x00000000000000000000000000000000,0x00000000000000000000000000000000,",
+ // murmur3(null) -> null
+ "1970-01-01T00:00:00.004Z,null,null,",
+ // murmur3('Hello, World!') -> 06717d9f
+ "1970-01-01T00:00:00.005Z,0xc0a1b86f7365bc93bfae71678c2843aa,0xc0a1b86f7365bc93bfae71678c2843aa,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, murmur3(c_text) as \"murmur3(c_text)\", murmur3(c_string) as \"murmur3(c_string)\" FROM table1 where time < 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test the murmur3() function on the BLOB data type. */
+ @Test
+ public void testMurmur3OnBlob() {
+ String[] expectedHeader = new String[] {"time", "murmur3(c_blob)"};
+ String[] retArray =
+ new String[] {
+ // murmur3(x'74657374')
+ "1970-01-01T00:00:00.006Z,0x9de1bd74cc287dac824dbdf93182129a,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, murmur3(c_blob) as \"murmur3(c_blob)\" FROM table1 where time = 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test that the murmur3() function fails when provided with invalid arguments. */
+ @Test
+ public void testMurmur3FunctionOnInvalidInputs() {
+ // Construct the expected error message for semantic errors
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function murmur3 only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // Test with an invalid data type (INT32)
+ tableAssertTestFail("SELECT murmur3(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with too many arguments
+ tableAssertTestFail(
+ "SELECT murmur3(c_text, 'another_arg') FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail("SELECT murmur3() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBReverseFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBReverseFunctionIT.java
new file mode 100644
index 000000000000..35f590c7c162
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBReverseFunctionIT.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBReverseFunctionIT {
+
+ private static final String DATABASE_NAME = "test_reverse_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_blob BLOB FIELD, c_int INT32 FIELD)",
+
+ // 1. Test with a simple ASCII string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'hello', 'hello')",
+ // 2. Test with multi-byte UTF-8 characters and mixed content
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '你好, world', '你好, world')",
+ // 3. Test with an empty string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // 4. Test with null values, which should result in a null output
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // 5. Test with blob data for byte-wise reversal
+ "INSERT INTO table1(time, c_blob) VALUES (5, x'01020304')",
+ // 6. Test with an empty blob
+ "INSERT INTO table1(time, c_blob) VALUES (6, x'')",
+ // 7. Test with a more complex blob
+ "INSERT INTO table1(time, c_blob) VALUES (7, x'AABBCCDD')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test the REVERSE() function on TEXT and STRING data types for character-wise reversal. */
+ @Test
+ public void testReverseOnTextAndString() {
+ String[] expectedHeader = new String[] {"time", "reverse(c_text)", "reverse(c_string)"};
+ String[] retArray =
+ new String[] {
+ // REVERSE('hello') -> 'olleh'
+ "1970-01-01T00:00:00.001Z,olleh,olleh,",
+ // REVERSE('你好, world') -> 'dlrow ,好你'
+ "1970-01-01T00:00:00.002Z,dlrow ,好你,dlrow ,好你,",
+ // REVERSE('') -> ''
+ "1970-01-01T00:00:00.003Z,,,",
+ // REVERSE(null) -> null
+ "1970-01-01T00:00:00.004Z,null,null,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, REVERSE(c_text) as \"reverse(c_text)\", REVERSE(c_string) as \"reverse(c_string)\" FROM table1 where time < 5",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test the REVERSE() function on the BLOB data type for byte-wise reversal. */
+ @Test
+ public void testReverseOnBlob() {
+ String[] expectedHeader = new String[] {"time", "reverse(c_blob)"};
+ String[] retArray =
+ new String[] {
+ // REVERSE(x'01020304') -> 0x04030201
+ "1970-01-01T00:00:00.005Z,0x04030201,",
+ // REVERSE(x'') -> 0x
+ "1970-01-01T00:00:00.006Z,0x,",
+ // REVERSE(x'AABBCCDD') -> 0xDDCCBBAA
+ "1970-01-01T00:00:00.007Z,0xddccbbaa,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, REVERSE(c_blob) as \"reverse(c_blob)\" FROM table1 where time >= 5",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test that the REVERSE() function fails when provided with invalid arguments. */
+ @Test
+ public void testReverseFunctionOnInvalidInputs() {
+ // Construct the expected error message for semantic errors
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function reverse only accepts one argument and it must be TEXT, STRING, or BlOB data type.";
+
+ // Test with an invalid data type (INT32)
+ tableAssertTestFail("SELECT REVERSE(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with too many arguments
+ tableAssertTestFail(
+ "SELECT REVERSE(c_text, 'another_arg') FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail("SELECT REVERSE() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBRpadFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBRpadFunctionIT.java
new file mode 100644
index 000000000000..35a536307781
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBRpadFunctionIT.java
@@ -0,0 +1,230 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBRpadFunctionIT {
+
+ private static final String DATABASE_NAME = "test_rpad_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE t1("
+ + "id INT32 FIELD, "
+ + "c_blob_data BLOB FIELD, "
+ + "c_blob_pad BLOB FIELD, "
+ + "c_text_data TEXT FIELD, "
+ + "c_int_data INT32 FIELD, "
+ + "c_int_size INT32 FIELD)",
+
+ // 1. Base data for padding, also has non-blob types for error testing
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad, c_text_data, c_int_data) VALUES (1, 1, x'AABB', x'00', 'text', 123)",
+ // 2. Data for multi-byte and truncated padding
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad) VALUES (2, 2, x'FF', x'12345678')",
+ // 3. Data for truncation
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad) VALUES (3, 3, x'0102030405060708', x'FF')",
+ // 4. Data for equal length test
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad) VALUES (4, 4, x'ABCDEF', x'00')",
+ // 5. Data for empty source blob test
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad) VALUES (5, 5, x'', x'AB')",
+ // 6. Row with NULL blob_data
+ "INSERT INTO t1(time, id, c_blob_pad, c_int_size) VALUES (6, 6, x'00', 5)",
+ // 7. Row with NULL blob_pad
+ "INSERT INTO t1(time, id, c_blob_data, c_int_size) VALUES (7, 7, x'AA', 5)",
+ // 8. Row with NULL c_int_size
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad) VALUES (8, 8, x'AA', x'00')",
+ // 9. Row with invalid (empty) pad data
+ "INSERT INTO t1(time, id, c_blob_data, c_blob_pad) VALUES (9, 9, x'AA', x'')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test cases where the source blob's length is less than the target size, requiring padding. */
+ @Test
+ public void testPaddingCases() {
+ String[] expectedHeader = new String[] {"time", "rpad"};
+
+ // Test simple padding: RPAD(x'AABB', 5, x'00') -> x'aabb000000'
+ tableResultSetEqualTest(
+ "SELECT time, RPAD(c_blob_data, 5, c_blob_pad) as \"rpad\" FROM t1 WHERE id = 1",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.001Z,0xaabb000000,"},
+ DATABASE_NAME);
+
+ // Test full repetition of pad data: RPAD(x'FF', 7, x'12345678') -> x'ff123456781234'
+ tableResultSetEqualTest(
+ "SELECT time, RPAD(c_blob_data, 7, c_blob_pad) as \"rpad\" FROM t1 WHERE id = 2",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.002Z,0xff123456781234,"},
+ DATABASE_NAME);
+
+ // Test truncated repetition of pad data: RPAD(x'FF', 6, x'123456') -> x'ff1234561234'
+ tableResultSetEqualTest(
+ "SELECT time, RPAD(c_blob_data, 6, c_blob_pad) as \"rpad\" FROM t1 WHERE id = 2",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.002Z,0xff1234567812,"},
+ DATABASE_NAME);
+
+ // Test padding an empty blob: RPAD(x'', 4, x'AB') -> x'abababab'
+ tableResultSetEqualTest(
+ "SELECT time, RPAD(c_blob_data, 4, c_blob_pad) as \"rpad\" FROM t1 WHERE id = 5",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.005Z,0xabababab,"},
+ DATABASE_NAME);
+ }
+
+ /**
+ * Test cases where the source blob's length is greater than the target size, requiring
+ * truncation.
+ */
+ @Test
+ public void testTruncationCases() {
+ String[] expectedHeader = new String[] {"time", "rpad"};
+
+ // Test standard truncation: RPAD(x'0102030405060708', 4, x'FF') -> x'01020304'
+ tableResultSetEqualTest(
+ "SELECT time, RPAD(c_blob_data, 4, c_blob_pad) as \"rpad\" FROM t1 WHERE id = 3",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.003Z,0x01020304,"},
+ DATABASE_NAME);
+
+ // Test truncation to zero length: RPAD(x'AABB', 0, x'00') -> x''
+ tableResultSetEqualTest(
+ "SELECT time, RPAD(c_blob_data, 0, c_blob_pad) as \"rpad\" FROM t1 WHERE id = 1",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.001Z,0x,"},
+ DATABASE_NAME);
+ }
+
+ /** Test the case where the source blob's length is equal to the target size. */
+ @Test
+ public void testEqualLengthCase() {
+ String[] expectedHeader = new String[] {"time", "rpad"};
+ String[] retArray =
+ new String[] {
+ // RPAD(x'ABCDEF', 3, x'00') -> x'abcdef' (no change)
+ "1970-01-01T00:00:00.004Z,0xabcdef,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, RPAD(c_blob_data, 3, c_blob_pad) as \"rpad\" FROM t1 where id = 4",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test that if any argument is NULL from the table, the result is NULL. */
+ @Test
+ public void testNullInputCases() {
+ String[] expectedHeader = new String[] {"time", "rpad_result"};
+
+ // Case 1: 'data' argument is NULL. Read from row where id=6.
+ tableResultSetEqualTest(
+ "SELECT time, RPAD(c_blob_data, c_int_size, c_blob_pad) as \"rpad_result\" FROM t1 WHERE id = 6",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.006Z,null,"},
+ DATABASE_NAME);
+
+ // Case 2: 'paddata' argument is NULL. Read from row where id=7.
+ tableResultSetEqualTest(
+ "SELECT time, RPAD(c_blob_data, c_int_size, c_blob_pad) as \"rpad_result\" FROM t1 WHERE id = 7",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.007Z,null,"},
+ DATABASE_NAME);
+
+ // Case 3: 'size' argument is NULL. Read from row where id=8.
+ tableResultSetEqualTest(
+ "SELECT time, RPAD(c_blob_data, c_int_size, c_blob_pad) as \"rpad_result\" FROM t1 WHERE id = 8",
+ expectedHeader,
+ new String[] {"1970-01-01T00:00:00.008Z,null,"},
+ DATABASE_NAME);
+ }
+
+ /** Test invalid parameter values that should cause the function to fail. */
+ @Test
+ public void testInvalidParameters() {
+ // Test with a negative size
+ tableAssertTestFail(
+ "SELECT RPAD(c_blob_data, -1, c_blob_pad) FROM t1 WHERE id = 1",
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'Rpad' due to the value 0xaabb corresponding to a invalid target size, the allowed range is [0, 2147483647].",
+ DATABASE_NAME);
+
+ // Test with an empty paddata blob
+ tableAssertTestFail(
+ "SELECT RPAD(c_blob_data, 5, c_blob_pad) FROM t1 WHERE id = 9",
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Failed to execute function 'Rpad' due the value 0xaa corresponding to a empty padding string.",
+ DATABASE_NAME);
+ }
+
+ /** Test argument type validation for the RPAD function. */
+ @Test
+ public void testInvalidTypes() {
+ String wrongTypeErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function rpad only accepts three arguments, first argument must be BlOB type, "
+ + "second argument must be int32 or int64 type, third argument must be BLOB type.";
+
+ // First argument is not a BLOB
+ tableAssertTestFail(
+ "SELECT RPAD(c_text_data, 5, c_blob_pad) FROM t1 WHERE id = 1",
+ wrongTypeErrorMessage,
+ DATABASE_NAME);
+
+ // Second argument is not an INTEGER
+ tableAssertTestFail(
+ "SELECT RPAD(c_blob_data, '5', c_blob_pad) FROM t1 WHERE id = 1",
+ wrongTypeErrorMessage,
+ DATABASE_NAME);
+
+ // Third argument is not a BLOB
+ tableAssertTestFail(
+ "SELECT RPAD(c_blob_data, 5, c_int_data) FROM t1 WHERE id = 1",
+ wrongTypeErrorMessage,
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSha1ColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSha1ColumnFunctionIT.java
new file mode 100644
index 000000000000..ff7cde3b4cc0
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSha1ColumnFunctionIT.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBSha1ColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_sha1_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_blob BLOB FIELD, c_int INT32 FIELD)",
+
+ // Test with a simple ASCII string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'hello', 'hello')",
+ // Test with Chinese characters (UTF-8)
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '你好', '你好')",
+ // Test with an empty string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // Test with null values, which should result in a null hash
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // Test with a string containing special characters and spaces
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'Hello, World!', 'Hello, World!')",
+ // Test with blob data (hex representation of 'test')
+ "INSERT INTO table1(time, c_blob) VALUES (6, x'74657374')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test the sha1() function on TEXT and STRING data types with various inputs. */
+ @Test
+ public void testSha1OnTextAndString() {
+ String[] expectedHeader = new String[] {"time", "sha1(c_text)", "sha1(c_string)"};
+ String[] retArray =
+ new String[] {
+ // sha1('hello')
+ "1970-01-01T00:00:00.001Z,0xaaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d,0xaaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d,",
+ // sha1('你好')
+ "1970-01-01T00:00:00.002Z,0x440ee0853ad1e99f962b63e459ef992d7c211722,0x440ee0853ad1e99f962b63e459ef992d7c211722,",
+ // sha1('')
+ "1970-01-01T00:00:00.003Z,0xda39a3ee5e6b4b0d3255bfef95601890afd80709,0xda39a3ee5e6b4b0d3255bfef95601890afd80709,",
+ // sha1(null)
+ "1970-01-01T00:00:00.004Z,null,null,",
+ // sha1('Hello, World!')
+ "1970-01-01T00:00:00.005Z,0x0a0a9f2a6772942557ab5355d76af442f8f65e01,0x0a0a9f2a6772942557ab5355d76af442f8f65e01,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, sha1(c_text) as \"sha1(c_text)\", sha1(c_string) as \"sha1(c_string)\" FROM table1 where time < 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test the sha1() function on the BLOB data type. */
+ @Test
+ public void testSha1OnBlob() {
+ String[] expectedHeader = new String[] {"time", "sha1(c_blob)"};
+ String[] retArray =
+ new String[] {
+ // sha1(x'74657374') which is 'test' -> a94a8fe5ccb19ba61c4c0873d391e987982fbbd3
+ "1970-01-01T00:00:00.006Z,0xa94a8fe5ccb19ba61c4c0873d391e987982fbbd3,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, sha1(c_blob) as \"sha1(c_blob)\" FROM table1 where time = 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test that the sha1() function fails when provided with invalid arguments. */
+ @Test
+ public void testSha1FunctionOnInvalidInputs() {
+ // Construct the expected error message for semantic errors
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function sha1 only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // Test with an invalid data type (INT32)
+ tableAssertTestFail("SELECT sha1(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with too many arguments
+ tableAssertTestFail(
+ "SELECT sha1(c_text, 'another_arg') FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail("SELECT sha1() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSha256ColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSha256ColumnFunctionIT.java
new file mode 100644
index 000000000000..b6077cfcf088
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSha256ColumnFunctionIT.java
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBSha256ColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_sha256_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_blob BLOB FIELD, c_int INT32 FIELD)",
+
+ // Test with a simple ASCII string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'hello', 'hello')",
+ // Test with Chinese characters (UTF-8)
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '你好', '你好')",
+ // Test with an empty string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // Test with null values, which should result in a null hash
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // Test with a string containing special characters and spaces
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'Hello, World!', 'Hello, World!')",
+ // Test with blob data (hex representation of 'test')
+ "INSERT INTO table1(time, c_blob) VALUES (6, x'74657374')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test the sha256() function on TEXT and STRING data types with various inputs. */
+ @Test
+ public void testSha256OnTextAndString() {
+ String[] expectedHeader = new String[] {"time", "sha256(c_text)", "sha256(c_string)"};
+ String[] retArray =
+ new String[] {
+ // sha256('hello') -> 0x2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
+ "1970-01-01T00:00:00.001Z,0x2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824,0x2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824,",
+ // sha256('你好') -> 0x670d9743542cae3ea7ebe36af56bd53648b0a1126162e78d81a32934a711302e
+ "1970-01-01T00:00:00.002Z,0x670d9743542cae3ea7ebe36af56bd53648b0a1126162e78d81a32934a711302e,0x670d9743542cae3ea7ebe36af56bd53648b0a1126162e78d81a32934a711302e,",
+ // sha256('') -> 0xe3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
+ "1970-01-01T00:00:00.003Z,0xe3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855,0xe3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855,",
+ // sha256(null) -> null
+ "1970-01-01T00:00:00.004Z,null,null,",
+ // sha256('Hello, World!') ->
+ // 0xdffd6021bb2bd5b0af676290809ec3a53191dd81c7f70a4b28688a362182986f
+ "1970-01-01T00:00:00.005Z,0xdffd6021bb2bd5b0af676290809ec3a53191dd81c7f70a4b28688a362182986f,0xdffd6021bb2bd5b0af676290809ec3a53191dd81c7f70a4b28688a362182986f,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, sha256(c_text) as \"sha256(c_text)\", sha256(c_string) as \"sha256(c_string)\" FROM table1 where time < 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test the sha256() function on the BLOB data type. */
+ @Test
+ public void testSha256OnBlob() {
+ String[] expectedHeader = new String[] {"time", "sha256(c_blob)"};
+ String[] retArray =
+ new String[] {
+ // sha256(x'74657374') which is 'test' ->
+ // 0x9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08
+ "1970-01-01T00:00:00.006Z,0x9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, sha256(c_blob) as \"sha256(c_blob)\" FROM table1 where time = 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test that the sha256() function fails when provided with invalid arguments. */
+ @Test
+ public void testSha256FunctionOnInvalidInputs() {
+ // Construct the expected error message for semantic errors
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function sha256 only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // Test with an invalid data type (INT32)
+ tableAssertTestFail("SELECT sha256(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with too many arguments
+ tableAssertTestFail(
+ "SELECT sha256(c_text, 'another_arg') FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail("SELECT sha256() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSha512ColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSha512ColumnFunctionIT.java
new file mode 100644
index 000000000000..a23007b734a5
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSha512ColumnFunctionIT.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBSha512ColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_sha512_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_blob BLOB FIELD, c_int INT32 FIELD)",
+
+ // Test with a simple ASCII string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'hello', 'hello')",
+ // Test with Chinese characters (UTF-8)
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '你好', '你好')",
+ // Test with an empty string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // Test with null values, which should result in a null hash
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // Test with a string containing special characters and spaces
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'Hello, World!', 'Hello, World!')",
+ // Test with blob data (hex representation of 'test')
+ "INSERT INTO table1(time, c_blob) VALUES (6, x'74657374')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test the sha512() function on TEXT and STRING data types with various inputs. */
+ @Test
+ public void testSha512OnTextAndString() {
+ String[] expectedHeader = new String[] {"time", "sha512(c_text)", "sha512(c_string)"};
+ String[] retArray =
+ new String[] {
+ // sha512('hello') ->
+ "1970-01-01T00:00:00.001Z,0x9b71d224bd62f3785d96d46ad3ea3d73319bfbc2890caadae2dff72519673ca72323c3d99ba5c11d7c7acc6e14b8c5da0c4663475c2e5c3adef46f73bcdec043,0x9b71d224bd62f3785d96d46ad3ea3d73319bfbc2890caadae2dff72519673ca72323c3d99ba5c11d7c7acc6e14b8c5da0c4663475c2e5c3adef46f73bcdec043,",
+ // sha512('你好') ->
+ "1970-01-01T00:00:00.002Z,0x5232181bc0d9888f5c9746e410b4740eb461706ba5dacfbc93587cecfc8d068bac7737e92870d6745b11a25e9cd78b55f4ffc706f73cfcae5345f1b53fb8f6b5,0x5232181bc0d9888f5c9746e410b4740eb461706ba5dacfbc93587cecfc8d068bac7737e92870d6745b11a25e9cd78b55f4ffc706f73cfcae5345f1b53fb8f6b5,",
+ // sha512('') ->
+ "1970-01-01T00:00:00.003Z,0xcf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e,0xcf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e,",
+ // sha512(null) -> null
+ "1970-01-01T00:00:00.004Z,null,null,",
+ // sha512('Hello, World!') ->
+ "1970-01-01T00:00:00.005Z,0x374d794a95cdcfd8b35993185fef9ba368f160d8daf432d08ba9f1ed1e5abe6cc69291e0fa2fe0006a52570ef18c19def4e617c33ce52ef0a6e5fbe318cb0387,0x374d794a95cdcfd8b35993185fef9ba368f160d8daf432d08ba9f1ed1e5abe6cc69291e0fa2fe0006a52570ef18c19def4e617c33ce52ef0a6e5fbe318cb0387,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, sha512(c_text) as \"sha512(c_text)\", sha512(c_string) as \"sha512(c_string)\" FROM table1 where time < 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test the sha512() function on the BLOB data type. */
+ @Test
+ public void testSha512OnBlob() {
+ String[] expectedHeader = new String[] {"time", "sha512(c_blob)"};
+ String[] retArray =
+ new String[] {
+ // sha512(x'74657374') which is 'test' ->
+ "1970-01-01T00:00:00.006Z,0xee26b0dd4af7e749aa1a8ee3c10ae9923f618980772e473f8819a5d4940e0db27ac185f8a0e1d5f84f88bc887fd67b143732c304cc5fa9ad8e6f57f50028a8ff,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, sha512(c_blob) as \"sha512(c_blob)\" FROM table1 where time = 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test that the sha512() function fails when provided with invalid arguments. */
+ @Test
+ public void testSha512FunctionOnInvalidInputs() {
+ // Construct the expected error message for semantic errors
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function sha512 only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // Test with an invalid data type (INT32)
+ tableAssertTestFail("SELECT sha512(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with too many arguments
+ tableAssertTestFail(
+ "SELECT sha512(c_text, 'another_arg') FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail("SELECT sha512() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSpookyHashV2_32FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSpookyHashV2_32FunctionIT.java
new file mode 100644
index 000000000000..2c8e1e8c1321
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSpookyHashV2_32FunctionIT.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBSpookyHashV2_32FunctionIT {
+
+ private static final String DATABASE_NAME = "test_spooky_hash_v2_32_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_blob BLOB FIELD, c_int INT32 FIELD)",
+ // 1. Test standard string 'hello'
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'hello', 'hello')",
+ // 2. Test Chinese characters '你好' (UTF-8)
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '你好', '你好')",
+ // 3. Test empty string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // 4. Test null values
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // 5. Test special characters and spaces
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'Hello, World!', 'Hello, World!')",
+ // 6. Test blob data (hex representation of 'test')
+ "INSERT INTO table1(time, c_blob) VALUES (6, x'74657374')",
+ // 7. Test standard benchmark string '123456789'
+ "INSERT INTO table1(time, c_text) VALUES (7, '123456789')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Validate the SpookyHashV2 32-bit checksum for TEXT/STRING types */
+ @Test
+ public void testSpookyHashV232OnTextString() {
+
+ String[] expectedHeader =
+ new String[] {
+ "time", "spooky_hash_v2_32(c_text)", "spooky_hash_v2_32(c_string)",
+ };
+ String[] retArray =
+ new String[] {
+ // 'hello' -> 0xd382e6ca
+ "1970-01-01T00:00:00.001Z,0xd382e6ca,0xd382e6ca,",
+ // '你好' (UTF-8) -> 0x38c1de3e
+ "1970-01-01T00:00:00.002Z,0x38c1de3e,0x38c1de3e,",
+ // '' -> 0x6bf50919 (default seed)
+ "1970-01-01T00:00:00.003Z,0x6bf50919,0x6bf50919,",
+ // null -> null
+ "1970-01-01T00:00:00.004Z,null,null,",
+ // 'Hello, World!' -> 0x4a0db65a
+ "1970-01-01T00:00:00.005Z,0x4a0db65a,0x4a0db65a,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, spooky_hash_v2_32(c_text) as \"spooky_hash_v2_32(c_text)\", spooky_hash_v2_32(c_string) as \"spooky_hash_v2_32(c_string)\" FROM table1 where time < 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Validate the SpookyHashV2 32-bit checksum for BLOB type */
+ @Test
+ public void testSpookyHashV232OnBlob() {
+ String[] expectedHeader = new String[] {"time", "spooky_hash_v2_32(c_blob)"};
+ String[] retArray =
+ new String[] {
+ // blob x'74657374' ('test') -> 0xec0d8b75
+ "1970-01-01T00:00:00.006Z,0xec0d8b75,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, spooky_hash_v2_32(c_blob) as \"spooky_hash_v2_32(c_blob)\" FROM table1 where time = 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Validate against a known industry-standard value */
+ @Test
+ public void testSpookyHashV232KnownValue() {
+ String[] expectedHeader = new String[] {"time", "spooky_hash_v2_32(c_text)"};
+ String[] retArray =
+ new String[] {
+ // '123456789' -> x'F022B81F'
+ "1970-01-01T00:00:00.007Z,0x98b9cd87,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, spooky_hash_v2_32(c_text) as \"spooky_hash_v2_32(c_text)\" FROM table1 where time = 7",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test that invalid input types or number of arguments are rejected */
+ @Test
+ public void testSpookyHashV232FunctionOnInvalidInputs() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function spooky_hash_v2_32 only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // Test with invalid data type (INT32)
+ tableAssertTestFail(
+ "SELECT spooky_hash_v2_32(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with multiple arguments
+ tableAssertTestFail(
+ "SELECT spooky_hash_v2_32(c_text, 'another_arg') FROM table1",
+ expectedErrorMessage,
+ DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail(
+ "SELECT spooky_hash_v2_32() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSpookyHashV2_64FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSpookyHashV2_64FunctionIT.java
new file mode 100644
index 000000000000..5f6f8a561eff
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBSpookyHashV2_64FunctionIT.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBSpookyHashV2_64FunctionIT {
+
+ private static final String DATABASE_NAME = "test_spooky_hash_v2_64_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_blob BLOB FIELD, c_int INT32 FIELD)",
+ // 1. Test standard string 'hello'
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'hello', 'hello')",
+ // 2. Test Chinese characters '你好' (UTF-8)
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '你好', '你好')",
+ // 3. Test empty string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // 4. Test null values
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // 5. Test special characters and spaces
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'Hello, World!', 'Hello, World!')",
+ // 6. Test blob data (hex representation of 'test')
+ "INSERT INTO table1(time, c_blob) VALUES (6, x'74657374')",
+ // 7. Test standard benchmark string '123456789'
+ "INSERT INTO table1(time, c_text) VALUES (7, '123456789')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Validate the SpookyHashV2 64-bit checksum for TEXT/STRING types */
+ @Test
+ public void testSpookyHashV264OnTextString() {
+
+ String[] expectedHeader =
+ new String[] {
+ "time", "spooky_hash_v2_64(c_text)", "spooky_hash_v2_64(c_string)",
+ };
+ String[] retArray =
+ new String[] {
+ // 'hello' -> 0x3768826ad382e6ca
+ "1970-01-01T00:00:00.001Z,0x3768826ad382e6ca,0x3768826ad382e6ca,",
+ // '你好' (UTF-8) -> 0x444b752899321350
+ "1970-01-01T00:00:00.002Z,0x116dbe1a38c1de3e,0x116dbe1a38c1de3e,",
+ // '' -> 0x232706fc6bf50919 (default seed)
+ "1970-01-01T00:00:00.003Z,0x232706fc6bf50919,0x232706fc6bf50919,",
+ // null -> null
+ "1970-01-01T00:00:00.004Z,null,null,",
+ // 'Hello, World!' -> 0x2c00a446755157a4
+ "1970-01-01T00:00:00.005Z,0x9c7ad9cc4a0db65a,0x9c7ad9cc4a0db65a,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, spooky_hash_v2_64(c_text) as \"spooky_hash_v2_64(c_text)\", spooky_hash_v2_64(c_string) as \"spooky_hash_v2_64(c_string)\" FROM table1 where time < 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Validate the SpookyHashV2 64-bit checksum for BLOB type */
+ @Test
+ public void testSpookyHashV264OnBlob() {
+ String[] expectedHeader = new String[] {"time", "spooky_hash_v2_64(c_blob)"};
+ String[] retArray =
+ new String[] {
+ // blob x'74657374' ('test') -> 0x7b01e8bcec0d8b75
+ "1970-01-01T00:00:00.006Z,0x7b01e8bcec0d8b75,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, spooky_hash_v2_64(c_blob) as \"spooky_hash_v2_64(c_blob)\" FROM table1 where time = 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Validate against a known industry-standard value */
+ @Test
+ public void testSpookyHashV264KnownValue() {
+ String[] expectedHeader = new String[] {"time", "spooky_hash_v2_64(c_text)"};
+ String[] retArray =
+ new String[] {
+ // '123456789' -> 0xb52b501c98b9cd87
+ "1970-01-01T00:00:00.007Z,0xb52b501c98b9cd87,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, spooky_hash_v2_64(c_text) as \"spooky_hash_v2_64(c_text)\" FROM table1 where time = 7",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test that invalid input types or number of arguments are rejected */
+ @Test
+ public void testSpookyHashV264FunctionOnInvalidInputs() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function spooky_hash_v2_64 only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // Test with invalid data type (INT32)
+ tableAssertTestFail(
+ "SELECT spooky_hash_v2_64(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with multiple arguments
+ tableAssertTestFail(
+ "SELECT spooky_hash_v2_64(c_text, 'another_arg') FROM table1",
+ expectedErrorMessage,
+ DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail(
+ "SELECT spooky_hash_v2_64() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBase32ColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBase32ColumnFunctionIT.java
new file mode 100644
index 000000000000..567db7abd92b
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBase32ColumnFunctionIT.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBToBase32ColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_to_base32_function";
+
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT, c_string STRING, c_blob BLOB, c_int INT32)",
+
+ // Case 1: Basic ASCII string for TEXT, STRING and equivalent BLOB
+ // 'Hello, IoTDB!' -> Hex: 0x48656c6c6f2c20496f54444221
+ "INSERT INTO table1(time, c_text, c_string, c_blob) VALUES (1, 'Hello, IoTDB!', 'Hello, IoTDB!', X'48656c6c6f2c20496f54444221')",
+
+ // Case 2: UTF-8 string for TEXT, STRING and equivalent BLOB
+ // '你好,世界' -> Hex: 0xe4bda0e5a5bd2ce4b896e7958c
+ "INSERT INTO table1(time, c_text, c_string, c_blob) VALUES (2, '你好,世界', '你好,世界', X'e4bda0e5a5bd2ce4b896e7958c')",
+
+ // Case 3: Empty string and empty BLOB
+ "INSERT INTO table1(time, c_text, c_string, c_blob) VALUES (3, '', '', X'')",
+
+ // Case 4: Null values for all supported types
+ "INSERT INTO table1(time, c_int) VALUES (4, 100)",
+
+ // Case 5: BLOB padding scenarios for 1-5 bytes
+ // 'f' (0x66) -> Base32: MY======
+ "INSERT INTO table1(time, c_blob) VALUES (5, X'66')",
+ // 'fo' (0x666f) -> Base32: MZXQ====
+ "INSERT INTO table1(time, c_blob) VALUES (6, X'666f')",
+ // 'foo' (0x666f6f) -> Base32: MZXW6===
+ "INSERT INTO table1(time, c_blob) VALUES (7, X'666f6f')",
+ // 'foob' (0x666f6f62) -> Base32: MZXW6YQ=
+ "INSERT INTO table1(time, c_blob) VALUES (8, X'666f6f62')",
+ // 'fooba' (0x666f6f6261) -> Base32: MZXW6YTB
+ "INSERT INTO table1(time, c_blob) VALUES (9, X'666f6f6261')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Validates the to_base32() function on various supported and valid inputs. */
+ @Test
+ public void testToBase32OnValidInputs() {
+ String[] expectedHeader =
+ new String[] {
+ "time", "to_base32(c_text)", "to_base32(c_string)", "to_base32(c_blob)",
+ };
+ String[] retArray =
+ new String[] {
+ // 1. Basic ASCII
+ "1970-01-01T00:00:00.001Z,JBSWY3DPFQQES32UIRBCC===,JBSWY3DPFQQES32UIRBCC===,JBSWY3DPFQQES32UIRBCC===,",
+ // 2. UTF-8
+ "1970-01-01T00:00:00.002Z,4S62BZNFXUWOJOEW46KYY===,4S62BZNFXUWOJOEW46KYY===,4S62BZNFXUWOJOEW46KYY===,",
+ // 3. Empty string/blob
+ "1970-01-01T00:00:00.003Z,,,,",
+ // 4. Null input
+ "1970-01-01T00:00:00.004Z,null,null,null,",
+ // 5. 1 byte with padding
+ "1970-01-01T00:00:00.005Z,null,null,MY======,",
+ // 6. 2 bytes with padding
+ "1970-01-01T00:00:00.006Z,null,null,MZXQ====,",
+ // 7. 3 bytes with padding
+ "1970-01-01T00:00:00.007Z,null,null,MZXW6===,",
+ // 8. 4 bytes with padding
+ "1970-01-01T00:00:00.008Z,null,null,MZXW6YQ=,",
+ // 9. 5 bytes with padding
+ "1970-01-01T00:00:00.009Z,null,null,MZXW6YTB,",
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, to_base32(c_text) as \"to_base32(c_text)\", to_base32(c_string) as \"to_base32(c_string)\", to_base32(c_blob) as \"to_base32(c_blob)\" FROM table1",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Tests for invalid arguments passed to the to_base32() function. */
+ @Test
+ public void testToBase32FunctionOnInvalidArguments() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function to_base32 only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // Test with invalid parameter type (INT32)
+ tableAssertTestFail("SELECT to_base32(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with invalid parameter count (0)
+ tableAssertTestFail("SELECT to_base32() FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with invalid parameter count (>1)
+ tableAssertTestFail(
+ "SELECT to_base32(c_text, c_blob) FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBase64ColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBase64ColumnFunctionIT.java
new file mode 100644
index 000000000000..0a9cbc29c783
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBase64ColumnFunctionIT.java
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBToBase64ColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_tobase64_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_blob BLOB FIELD, c_int INT32 FIELD)",
+
+ // Test string 'hello'
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'hello', 'hello')",
+ // Test Chinese characters '你好'
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '你好', '你好')",
+ // Test empty string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // Test null values
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // Test special characters and spaces
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'Hello, World!', 'Hello, World!')",
+ // Test blob data (hex representation of 'test')
+ "INSERT INTO table1(time, c_blob) VALUES (6, x'74657374')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Validate the normal encoding of to_base64() for TEXT/STRING type */
+ @Test
+ public void testToBase64OnTextString() {
+ String[] expectedHeader = new String[] {"time", "to_base64(c_text)", "to_base64(c_string)"};
+ String[] retArray =
+ new String[] {
+ // 'hello' -> 'aGVsbG8='
+ "1970-01-01T00:00:00.001Z,aGVsbG8=,aGVsbG8=,",
+ // '你好' -> '5L2g5aW9'
+ "1970-01-01T00:00:00.002Z,5L2g5aW9,5L2g5aW9,",
+ // '' -> ''
+ "1970-01-01T00:00:00.003Z,,,",
+ // null -> null
+ "1970-01-01T00:00:00.004Z,null,null,",
+ // 'Hello, World!' -> 'SGVsbG8sIFdvcmxkIQ=='
+ "1970-01-01T00:00:00.005Z,SGVsbG8sIFdvcmxkIQ==,SGVsbG8sIFdvcmxkIQ==,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, to_base64(c_text) as \"to_base64(c_text)\", to_base64(c_string) as \"to_base64(c_string)\" FROM table1 where time < 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Validate the normal encoding of to_base64() for BLOB type */
+ @Test
+ public void testToBase64OnBlob() {
+ String[] expectedHeader = new String[] {"time", "to_base64(c_blob)"};
+ String[] retArray =
+ new String[] {
+ // 0x74657374 ('test' in hex) -> 'dGVzdA=='
+ "1970-01-01T00:00:00.006Z,dGVzdA==,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, to_base64(c_blob) as \"to_base64(c_blob)\" FROM table1 where time = 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test roundtrip conversion: to_base64(from_base64(x)) should equal x */
+ @Test
+ public void testToBase64RoundTrip() {
+ String[] expectedHeader = new String[] {"time", "roundtrip_result"};
+ String[] retArray =
+ new String[] {
+ "1970-01-01T00:00:00.001Z,hello,",
+ "1970-01-01T00:00:00.002Z,你好,",
+ "1970-01-01T00:00:00.003Z,,",
+ "1970-01-01T00:00:00.005Z,Hello, World!,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, CAST(from_base64(to_base64(c_text)) AS TEXT) as roundtrip_result FROM table1 where time in (1, 2, 3, 5)",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Invalid input type or number of arguments should be rejected */
+ @Test
+ public void testToBase64FunctionOnInvalidInputs() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function to_base64 only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // Test with invalid data type (INT32)
+ tableAssertTestFail("SELECT to_base64(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with multiple arguments
+ tableAssertTestFail(
+ "SELECT to_base64(c_text, 1) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail("SELECT to_base64() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBase64UrlColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBase64UrlColumnFunctionIT.java
new file mode 100644
index 000000000000..56b8eb1d2fb4
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBase64UrlColumnFunctionIT.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBToBase64UrlColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_tobase64url_function";
+
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT, c_string STRING, c_blob BLOB, c_int INT32)",
+
+ // 'hello world' -> Base64URL: aGVsbG8gd29ybGQ
+ "INSERT INTO table1(time, c_text, c_string, c_blob) VALUES (1, 'hello world', 'hello world', X'68656c6c6f20776f726c64')",
+
+ // '你好, IoTDB!' -> Base64URL: 5L2g5aW9LCBJT1REQjEh
+ "INSERT INTO table1(time, c_text, c_string, c_blob) VALUES (2, '你好, IoTDB!', '你好, IoTDB!', X'e4bda0e5a5bd2c20496f54444221')",
+
+ // important: test cases with characters that are encoded to '+' and '/' in standard Base64
+ // Byte array [251, 255, 191] -> Standard Base64: ++/v -> Base64URL: --_v
+ "INSERT INTO table1(time, c_blob) VALUES (3, X'fbffbf')",
+
+ // '' -> Base64URL: ''
+ "INSERT INTO table1(time, c_text, c_string, c_blob) VALUES (4, '', '', X'')",
+
+ // null values
+ "INSERT INTO table1(time, c_int) VALUES (5, 100)",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** validate the correctness of the to_base64url() function on all supported data types */
+ @Test
+ public void testToBase64UrlOnAllTypes() {
+ String[] expectedHeader =
+ new String[] {
+ "time", "to_base64url(c_text)", "to_base64url(c_string)", "to_base64url(c_blob)"
+ };
+ String[] retArray =
+ new String[] {
+ // 1. 'hello world'
+ "1970-01-01T00:00:00.001Z,aGVsbG8gd29ybGQ,aGVsbG8gd29ybGQ,aGVsbG8gd29ybGQ,",
+ // 2. '你好, IoTDB!'
+ "1970-01-01T00:00:00.002Z,5L2g5aW9LCBJb1REQiE,5L2g5aW9LCBJb1REQiE,5L2g5aW9LCBJb1REQiE,",
+ // 3. special characters that are encoded to '+' and '/' in standard Base64
+ "1970-01-01T00:00:00.003Z,null,null,-_-_,",
+ // 4. empty string
+ "1970-01-01T00:00:00.004Z,,,,",
+ // null
+ "1970-01-01T00:00:00.005Z,null,null,null,"
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, to_base64url(c_text) as \"to_base64url(c_text)\", to_base64url(c_string) as \"to_base64url(c_string)\" , to_base64url(c_blob) as \"to_base64url(c_blob)\" FROM table1",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * validate that when the to_base64url() function receives invalid parameters (type or number),
+ */
+ @Test
+ public void testToBase64UrlFunctionOnInvalidInputs() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function to_base64url only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // test cases for invalid parameter types
+ tableAssertTestFail(
+ "SELECT to_base64url(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // test cases for invalid parameter numbers (0)
+ tableAssertTestFail("SELECT to_base64url() FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // test cases for invalid parameter numbers (>1)
+ tableAssertTestFail(
+ "SELECT to_base64url(c_text, c_string) FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBigEndian32FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBigEndian32FunctionIT.java
new file mode 100644
index 000000000000..6b76df4f29a0
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBigEndian32FunctionIT.java
@@ -0,0 +1,143 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBToBigEndian32FunctionIT {
+
+ private static final String DATABASE_NAME = "test_to_big_endian_32";
+
+ // SQL statements to set up the database and table for testing
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_int32 INT32, c_text TEXT, c_boolean BOOLEAN)",
+
+ // Case 1: A common positive integer (0x01020304)
+ "INSERT INTO table1(time, c_int32) VALUES (1, 16909060)",
+
+ // Case 2: A negative integer (-1, which is 0xFFFFFFFF in two's complement)
+ "INSERT INTO table1(time, c_int32) VALUES (2, -1)",
+
+ // Case 3: Zero (0x00000000)
+ "INSERT INTO table1(time, c_int32) VALUES (3, 0)",
+
+ // Case 4: Maximum INT32 value (0x7FFFFFFF)
+ "INSERT INTO table1(time, c_int32) VALUES (4, 2147483647)",
+
+ // Case 5: Minimum INT32 value (0x80000000)
+ "INSERT INTO table1(time, c_int32) VALUES (5, -2147483648)",
+
+ // Case 6: Null input value, also populate other columns for invalid type testing
+ "INSERT INTO table1(time, c_int32, c_text, c_boolean) VALUES (6, null, 'text_value', true)",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /**
+ * Validates the to_big_endian_32() function on various valid INT32 inputs. This test covers
+ * positive, negative, zero, min/max, and null values.
+ */
+ @Test
+ public void testToBigEndian32OnValidInputs() {
+ String[] expectedHeader =
+ new String[] {
+ "time", "to_big_endian_32(c_int32)",
+ };
+ String[] retArray =
+ new String[] {
+ // 1. Positive integer 16909060 -> 0x01020304
+ "1970-01-01T00:00:00.001Z,0x01020304,",
+ // 2. Negative integer -1 -> 0xffffffff
+ "1970-01-01T00:00:00.002Z,0xffffffff,",
+ // 3. Zero -> 0x00000000
+ "1970-01-01T00:00:00.003Z,0x00000000,",
+ // 4. Max INT32 -> 0x7fffffff
+ "1970-01-01T00:00:00.004Z,0x7fffffff,",
+ // 5. Min INT32 -> 0x80000000
+ "1970-01-01T00:00:00.005Z,0x80000000,",
+ // 6. Null input -> null output
+ "1970-01-01T00:00:00.006Z,null,",
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, to_big_endian_32(c_int32) as \"to_big_endian_32(c_int32)\" FROM table1",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * Tests for invalid arguments passed to the to_big_endian_32() function. This includes wrong
+ * argument count and wrong data types.
+ */
+ @Test
+ public void testToBigEndian32OnInvalidArguments() {
+ // Define the expected error message for semantic errors
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function to_big_endian_32 only accepts one argument and it must be Int32 data type.";
+
+ // Test with an invalid parameter type (TEXT)
+ tableAssertTestFail(
+ "SELECT to_big_endian_32(c_text) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with an invalid parameter type (BOOLEAN)
+ tableAssertTestFail(
+ "SELECT to_big_endian_32(c_boolean) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with an invalid parameter count (0 arguments)
+ tableAssertTestFail(
+ "SELECT to_big_endian_32() FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with an invalid parameter count (>1 arguments)
+ tableAssertTestFail(
+ "SELECT to_big_endian_32(c_int32, c_int32) FROM table1",
+ expectedErrorMessage,
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBigEndian64FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBigEndian64FunctionIT.java
new file mode 100644
index 000000000000..ff53e32c299c
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToBigEndian64FunctionIT.java
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBToBigEndian64FunctionIT {
+
+ private static final String DATABASE_NAME = "test_to_big_endian_64";
+
+ // SQL statements to set up the database and table for testing
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_int64 INT64, c_int32 INT32, c_text TEXT)",
+
+ // Case 1: A common positive integer (0x0102030405060708)
+ "INSERT INTO table1(time, c_int64) VALUES (1, 72623859790382856)",
+
+ // Case 2: A negative integer (-1, which is 0xFFFFFFFFFFFFFFFF in two's complement)
+ "INSERT INTO table1(time, c_int64) VALUES (2, -1)",
+
+ // Case 3: Zero (0x0000000000000000)
+ "INSERT INTO table1(time, c_int64) VALUES (3, 0)",
+
+ // Case 4: Maximum INT64 value (0x7FFFFFFFFFFFFFFF)
+ "INSERT INTO table1(time, c_int64) VALUES (4, 9223372036854775807)",
+
+ // Case 5: Minimum INT64 value (0x8000000000000000)
+ "INSERT INTO table1(time, c_int64) VALUES (5, -9223372036854775808)",
+
+ // Case 6: Null input value, also populate other columns for invalid type testing
+ "INSERT INTO table1(time, c_int64, c_int32, c_text) VALUES (6, null, 123, 'some_text')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /**
+ * Validates the to_big_endian_64() function on various valid INT64 inputs. This test covers
+ * positive, negative, zero, min/max, and null values.
+ */
+ @Test
+ public void testToBigEndian64OnValidInputs() {
+ String[] expectedHeader =
+ new String[] {
+ "time", "to_big_endian_64(c_int64)",
+ };
+ String[] retArray =
+ new String[] {
+ // 1. Positive integer 72623859790382856 -> 0x0102030405060708
+ "1970-01-01T00:00:00.001Z,0x0102030405060708,",
+ // 2. Negative integer -1 -> 0xffffffffffffffff
+ "1970-01-01T00:00:00.002Z,0xffffffffffffffff,",
+ // 3. Zero -> 0x0000000000000000
+ "1970-01-01T00:00:00.003Z,0x0000000000000000,",
+ // 4. Max INT64 -> 0x7fffffffffffffff
+ "1970-01-01T00:00:00.004Z,0x7fffffffffffffff,",
+ // 5. Min INT64 -> 0x8000000000000000
+ "1970-01-01T00:00:00.005Z,0x8000000000000000,",
+ // 6. Null input -> null output
+ "1970-01-01T00:00:00.006Z,null,",
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, to_big_endian_64(c_int64) as \"to_big_endian_64(c_int64)\" FROM table1",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * Tests for invalid arguments passed to the to_big_endian_64() function. This includes wrong
+ * argument count and wrong data types.
+ */
+ @Test
+ public void testToBigEndian64OnInvalidInputs() {
+ // Define the expected error message for semantic errors
+ String errorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function to_big_endian_64 only accepts one argument and it must be Int64 data type.";
+
+ // Test with an invalid parameter type (INT32)
+ tableAssertTestFail(
+ "SELECT to_big_endian_64(c_int32) FROM table1", errorMessage, DATABASE_NAME);
+
+ // Test with an invalid parameter type (TEXT)
+ tableAssertTestFail("SELECT to_big_endian_64(c_text) FROM table1", errorMessage, DATABASE_NAME);
+
+ // Test with an invalid parameter count (0 arguments)
+ tableAssertTestFail("SELECT to_big_endian_64() FROM table1", errorMessage, DATABASE_NAME);
+
+ // Test with an invalid parameter count (>1 arguments)
+ tableAssertTestFail(
+ "SELECT to_big_endian_64(c_int64, c_int64) FROM table1", errorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToHexColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToHexColumnFunctionIT.java
new file mode 100644
index 000000000000..7e31095c50f1
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToHexColumnFunctionIT.java
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBToHexColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_tohex_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_blob BLOB FIELD, c_int INT32 FIELD)",
+
+ // Test string 'hello'
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'hello', 'hello')",
+ // Test Chinese characters '你好' (UTF-8)
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '你好', '你好')",
+ // Test empty string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // Test null values
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // Test special characters and spaces
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'Hello, World!', 'Hello, World!')",
+ // Test blob data (hex representation of 'test')
+ "INSERT INTO table1(time, c_blob) VALUES (6, x'74657374')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Validate the normal encoding of to_hex() for TEXT/STRING type */
+ @Test
+ public void testToHexOnTextString() {
+ String[] expectedHeader = new String[] {"time", "to_hex(c_text)", "to_hex(c_string)"};
+ String[] retArray =
+ new String[] {
+ // 'hello' -> '68656c6c6f'
+ "1970-01-01T00:00:00.001Z,68656c6c6f,68656c6c6f,",
+ // '你好' (UTF-8) -> 'e4bda0e5a5bd'
+ "1970-01-01T00:00:00.002Z,e4bda0e5a5bd,e4bda0e5a5bd,",
+ // '' -> ''
+ "1970-01-01T00:00:00.003Z,,,",
+ // null -> null
+ "1970-01-01T00:00:00.004Z,null,null,",
+ // 'Hello, World!' -> '48656c6c6f2c20576f726c6421'
+ "1970-01-01T00:00:00.005Z,48656c6c6f2c20576f726c6421,48656c6c6f2c20576f726c6421,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, to_hex(c_text) as \"to_hex(c_text)\", to_hex(c_string) as \"to_hex(c_string)\" FROM table1 where time < 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Validate the normal encoding of to_hex() for BLOB type */
+ @Test
+ public void testToHexOnBlob() {
+ String[] expectedHeader = new String[] {"time", "to_hex(c_blob)"};
+ String[] retArray =
+ new String[] {
+ // x'74657374' -> '74657374'
+ "1970-01-01T00:00:00.006Z,74657374,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, to_hex(c_blob) as \"to_hex(c_blob)\" FROM table1 where time = 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test roundtrip conversion: from_hex(to_hex(x)) should equal x */
+ @Test
+ public void testToHexRoundTrip() {
+ String[] expectedHeader = new String[] {"time", "roundtrip_result"};
+ String[] retArray =
+ new String[] {
+ "1970-01-01T00:00:00.001Z,hello,",
+ "1970-01-01T00:00:00.002Z,你好,",
+ "1970-01-01T00:00:00.003Z,,",
+ "1970-01-01T00:00:00.005Z,Hello, World!,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, CAST(from_hex(to_hex(c_text)) AS TEXT) as roundtrip_result FROM table1 where time in (1, 2, 3, 5)",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Invalid input type or number of arguments should be rejected */
+ @Test
+ public void testToHexFunctionOnInvalidInputs() {
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function to_hex only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // Test with invalid data type (INT32)
+ tableAssertTestFail("SELECT to_hex(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with multiple arguments
+ tableAssertTestFail(
+ "SELECT to_hex(c_text, 1) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail("SELECT to_hex() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToIEEE754_32FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToIEEE754_32FunctionIT.java
new file mode 100644
index 000000000000..7fdd71164d2c
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToIEEE754_32FunctionIT.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBToIEEE754_32FunctionIT {
+
+ private static final String DATABASE_NAME = "test_to_ieee754_32_big_endian";
+
+ // test data: valid and invalid inputs for to_ieee754_32 function
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_float FLOAT, c_int32 INT32, c_text TEXT)",
+
+ // Case 1: 1.25f
+ "INSERT INTO table1(time, c_float) VALUES (1, 1.25)",
+ // Case 2: -2.5f
+ "INSERT INTO table1(time, c_float) VALUES (2, -2.5)",
+ // Case 3: 0.0f
+ "INSERT INTO table1(time, c_float) VALUES (3, 0.0)",
+ // Case 4: 3.1415f
+ "INSERT INTO table1(time, c_float) VALUES (4, 3.1415)",
+ // Case 5: null
+ "INSERT INTO table1(time, c_float) VALUES (5, null)",
+ // invalid type inputs for error handling tests
+ "INSERT INTO table1(time, c_int32, c_text) VALUES (6, 100, 'invalid_type')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ @Test
+ public void testToIEEE754_32OnValidInputs() {
+ String[] expectedHeader = {"time", "to_ieee754_32(c_float)"};
+ String[] retArray = {
+ "1970-01-01T00:00:00.001Z,0x3fa00000,",
+ "1970-01-01T00:00:00.002Z,0xc0200000,",
+ "1970-01-01T00:00:00.003Z,0x00000000,",
+ "1970-01-01T00:00:00.004Z,0x40490e56,",
+ "1970-01-01T00:00:00.005Z,null,"
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, to_ieee754_32(c_float) AS \"to_ieee754_32(c_float)\" FROM table1 WHERE time <= 5",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** test the invalid data input */
+ @Test
+ public void testToIEEE754_32OnInvalidInputs() {
+ String errorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function to_ieee754_32 only accepts one argument and it must be Float data type.";
+
+ // test with invalid parameter type (INT32)
+ tableAssertTestFail(
+ "SELECT to_ieee754_32(c_int32) FROM table1 WHERE time = 6", errorMsg, DATABASE_NAME);
+
+ // test with invalid parameter type (TEXT)
+ tableAssertTestFail(
+ "SELECT to_ieee754_32(c_text) FROM table1 WHERE time = 6", errorMsg, DATABASE_NAME);
+
+ // test with no parameter
+ tableAssertTestFail("SELECT to_ieee754_32() FROM table1", errorMsg, DATABASE_NAME);
+
+ // test with two parameters
+ tableAssertTestFail(
+ "SELECT to_ieee754_32(c_float, c_float) FROM table1 WHERE time = 1",
+ errorMsg,
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToIEEE754_64FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToIEEE754_64FunctionIT.java
new file mode 100644
index 000000000000..f1d04c6162da
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToIEEE754_64FunctionIT.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBToIEEE754_64FunctionIT {
+
+ private static final String DATABASE_NAME = "test_to_ieee754_64_big_endian";
+
+ // Test data: Insert valid DOUBLE values to verify conversion to big-endian IEEE 754 64-bit BLOBs
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_double DOUBLE, c_int64 INT64, c_text TEXT)",
+
+ // Case 1: 1.25d → IEEE 754 64-bit: 0x3FF4000000000000 (big-endian BLOB)
+ "INSERT INTO table1(time, c_double) VALUES (1, 1.25)",
+ // Case 2: -2.5d → IEEE 754 64-bit: 0xC004000000000000 (big-endian BLOB)
+ "INSERT INTO table1(time, c_double) VALUES (2, -2.5)",
+ // Case 3: 0.0d → IEEE 754 64-bit: 0x0000000000000000 (big-endian BLOB)
+ "INSERT INTO table1(time, c_double) VALUES (3, 0.0)",
+ // Case 4: 3.1415926535d → IEEE 754 64-bit: 0x400921FB54442D18 (big-endian BLOB)
+ "INSERT INTO table1(time, c_double) VALUES (4, 3.1415926535)",
+ // Case 5: Null input → null output
+ "INSERT INTO table1(time, c_double) VALUES (5, null)",
+ // Invalid type test data
+ "INSERT INTO table1(time, c_int64, c_text) VALUES (6, 1000, 'invalid_type')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Verify valid conversions: big-endian BLOB matches IEEE 754 64-bit standard encoding */
+ @Test
+ public void testToIEEE754_64OnValidInputs() {
+ String[] expectedHeader = {"time", "to_ieee754_64(c_double)"};
+ String[] retArray = {
+ "1970-01-01T00:00:00.001Z,0x3ff4000000000000,", // 1.25d → 0x3ff4000000000000
+ "1970-01-01T00:00:00.002Z,0xc004000000000000,", // -2.5d → 0xc004000000000000
+ "1970-01-01T00:00:00.003Z,0x0000000000000000,", // 0.0d → 0x0000000000000000
+ "1970-01-01T00:00:00.004Z,0x400921fb54411744,", // 3.1415926535d → 0x400921fb54411744
+ "1970-01-01T00:00:00.005Z,null," // Null input → null
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, to_ieee754_64(c_double) AS \"to_ieee754_64(c_double)\" FROM table1 WHERE time <= 5",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Verify error handling for invalid inputs (wrong type/argument count) */
+ @Test
+ public void testToIEEE754_64OnInvalidInputs() {
+ String errorMsg =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function to_ieee754_64 only accepts one argument and it must be Double data type.";
+
+ // Test non-DOUBLE input (INT64)
+ tableAssertTestFail(
+ "SELECT to_ieee754_64(c_int64) FROM table1 WHERE time = 6", errorMsg, DATABASE_NAME);
+
+ // Test non-DOUBLE input (TEXT)
+ tableAssertTestFail(
+ "SELECT to_ieee754_64(c_text) FROM table1 WHERE time = 6", errorMsg, DATABASE_NAME);
+
+ // Test no arguments
+ tableAssertTestFail("SELECT to_ieee754_64() FROM table1", errorMsg, DATABASE_NAME);
+
+ // Test multiple arguments
+ tableAssertTestFail(
+ "SELECT to_ieee754_64(c_double, c_double) FROM table1 WHERE time = 1",
+ errorMsg,
+ DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToLittleEndian32FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToLittleEndian32FunctionIT.java
new file mode 100644
index 000000000000..3b6f2c0c6fb6
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToLittleEndian32FunctionIT.java
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBToLittleEndian32FunctionIT {
+
+ private static final String DATABASE_NAME = "test_to_little_endian_32";
+
+ // SQL statements to set up the database and table for testing
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_int32 INT32, c_int64 INT64, c_text TEXT)",
+
+ // Case 1: A common positive integer (16909060, which is 0x01020304 in hex)
+ "INSERT INTO table1(time, c_int32) VALUES (1, 16909060)",
+
+ // Case 2: A negative integer (-1, which is 0xFFFFFFFF in two's complement)
+ "INSERT INTO table1(time, c_int32) VALUES (2, -1)",
+
+ // Case 3: Zero (0x00000000)
+ "INSERT INTO table1(time, c_int32) VALUES (3, 0)",
+
+ // Case 4: Maximum INT32 value (2147483647, which is 0x7FFFFFFF)
+ "INSERT INTO table1(time, c_int32) VALUES (4, 2147483647)",
+
+ // Case 5: Minimum INT32 value (-2147483648, which is 0x80000000)
+ "INSERT INTO table1(time, c_int32) VALUES (5, -2147483648)",
+
+ // Case 6: Null input value, also populate other columns for invalid type testing
+ "INSERT INTO table1(time, c_int32, c_int64, c_text) VALUES (6, null, 123, 'some_text')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /**
+ * Validates the to_little_endian_32() function on various valid INT32 inputs. This test covers
+ * positive, negative, zero, min/max, and null values.
+ */
+ @Test
+ public void testToLittleEndian32OnValidInputs() {
+ String[] expectedHeader =
+ new String[] {
+ "time", "to_little_endian_32(c_int32)",
+ };
+ String[] retArray =
+ new String[] {
+ // 1. Positive integer 16909060 (0x01020304) -> 0x04030201
+ "1970-01-01T00:00:00.001Z,0x04030201,",
+ // 2. Negative integer -1 (0xffffffff) -> 0xffffffff
+ "1970-01-01T00:00:00.002Z,0xffffffff,",
+ // 3. Zero (0x00000000) -> 0x00000000
+ "1970-01-01T00:00:00.003Z,0x00000000,",
+ // 4. Max INT32 (0x7fffffff) -> 0xffffff7f
+ "1970-01-01T00:00:00.004Z,0xffffff7f,",
+ // 5. Min INT32 (0x80000000) -> 0x00000080
+ "1970-01-01T00:00:00.005Z,0x00000080,",
+ // 6. Null input -> null output
+ "1970-01-01T00:00:00.006Z,null,",
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, to_little_endian_32(c_int32) as \"to_little_endian_32(c_int32)\" FROM table1",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * Tests for invalid arguments passed to the to_little_endian_32() function. This includes wrong
+ * argument count and wrong data types.
+ */
+ @Test
+ public void testToLittleEndian32OnInvalidInputs() {
+ // Define the expected error message for semantic errors
+ String errorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function to_little_endian_32 only accepts one argument and it must be Int32 data type.";
+
+ // Test with an invalid parameter type (INT64)
+ tableAssertTestFail(
+ "SELECT to_little_endian_32(c_int64) FROM table1", errorMessage, DATABASE_NAME);
+
+ // Test with an invalid parameter type (TEXT)
+ tableAssertTestFail(
+ "SELECT to_little_endian_32(c_text) FROM table1", errorMessage, DATABASE_NAME);
+
+ // Test with an invalid parameter count (0 arguments)
+ tableAssertTestFail("SELECT to_little_endian_32() FROM table1", errorMessage, DATABASE_NAME);
+
+ // Test with an invalid parameter count (>1 arguments)
+ tableAssertTestFail(
+ "SELECT to_little_endian_32(c_int32, c_int32) FROM table1", errorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToLittleEndian64FunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToLittleEndian64FunctionIT.java
new file mode 100644
index 000000000000..7e610a122e1b
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBToLittleEndian64FunctionIT.java
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBToLittleEndian64FunctionIT {
+
+ private static final String DATABASE_NAME = "test_to_little_endian_64";
+
+ // SQL statements to set up the database and table for testing
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_int64 INT64, c_int32 INT32, c_text TEXT)",
+
+ // Case 1: A common positive integer (72623859790382856, which is 0x0102030405060708 in hex)
+ "INSERT INTO table1(time, c_int64) VALUES (1, 72623859790382856)",
+
+ // Case 2: A negative integer (-1, which is 0xFFFFFFFFFFFFFFFF in two's complement)
+ "INSERT INTO table1(time, c_int64) VALUES (2, -1)",
+
+ // Case 3: Zero (0x0000000000000000)
+ "INSERT INTO table1(time, c_int64) VALUES (3, 0)",
+
+ // Case 4: Maximum INT64 value (9223372036854775807, which is 0x7FFFFFFFFFFFFFFF)
+ "INSERT INTO table1(time, c_int64) VALUES (4, 9223372036854775807)",
+
+ // Case 5: Minimum INT64 value (-9223372036854775808, which is 0x8000000000000000)
+ "INSERT INTO table1(time, c_int64) VALUES (5, -9223372036854775808)",
+
+ // Case 6: Null input value, also populate other columns for invalid type testing
+ "INSERT INTO table1(time, c_int64, c_int32, c_text) VALUES (6, null, 123, 'some_text')",
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /**
+ * Validates the to_little_endian_64() function on various valid INT64 inputs. This test covers
+ * positive, negative, zero, min/max, and null values.
+ */
+ @Test
+ public void testToLittleEndian64OnValidInputs() {
+ String[] expectedHeader =
+ new String[] {
+ "time", "to_little_endian_64(c_int64)",
+ };
+ String[] retArray =
+ new String[] {
+ // 1. Positive integer 72623859790382856 (0x0102030405060708) -> 0x0807060504030201
+ "1970-01-01T00:00:00.001Z,0x0807060504030201,",
+ // 2. Negative integer -1 (0xffffffffffffffff) -> 0xffffffffffffffff
+ "1970-01-01T00:00:00.002Z,0xffffffffffffffff,",
+ // 3. Zero (0x0000000000000000) -> 0x0000000000000000
+ "1970-01-01T00:00:00.003Z,0x0000000000000000,",
+ // 4. Max INT64 (0x7fffffffffffffff) -> 0xffffffffffffff7f
+ "1970-01-01T00:00:00.004Z,0xffffffffffffff7f,",
+ // 5. Min INT64 (0x8000000000000000) -> 0x0000000000000080
+ "1970-01-01T00:00:00.005Z,0x0000000000000080,",
+ // 6. Null input -> null output
+ "1970-01-01T00:00:00.006Z,null,",
+ };
+
+ tableResultSetEqualTest(
+ "SELECT time, to_little_endian_64(c_int64) as \"to_little_endian_64(c_int64)\" FROM table1",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /**
+ * Tests for invalid arguments passed to the to_little_endian_64() function. This includes wrong
+ * argument count and wrong data types.
+ */
+ @Test
+ public void testToLittleEndian64OnInvalidInputs() {
+ // Define the expected error message for semantic errors
+ String errorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function to_little_endian_64 only accepts one argument and it must be Int64 data type.";
+
+ // Test with an invalid parameter type (INT32)
+ tableAssertTestFail(
+ "SELECT to_little_endian_64(c_int32) FROM table1", errorMessage, DATABASE_NAME);
+
+ // Test with an invalid parameter type (TEXT)
+ tableAssertTestFail(
+ "SELECT to_little_endian_64(c_text) FROM table1", errorMessage, DATABASE_NAME);
+
+ // Test with an invalid parameter count (0 arguments)
+ tableAssertTestFail("SELECT to_little_endian_64() FROM table1", errorMessage, DATABASE_NAME);
+
+ // Test with an invalid parameter count (>1 arguments)
+ tableAssertTestFail(
+ "SELECT to_little_endian_64(c_int64, c_int64) FROM table1", errorMessage, DATABASE_NAME);
+ }
+}
diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBXXHash64ColumnFunctionIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBXXHash64ColumnFunctionIT.java
new file mode 100644
index 000000000000..7954236f0e1d
--- /dev/null
+++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/scalar/IoTDBXXHash64ColumnFunctionIT.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.query.recent.scalar;
+
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.TSStatusCode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail;
+import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
+
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBXXHash64ColumnFunctionIT {
+
+ private static final String DATABASE_NAME = "test_xxhash64_function";
+ private static final String[] createSqls =
+ new String[] {
+ "CREATE DATABASE " + DATABASE_NAME,
+ "USE " + DATABASE_NAME,
+ "CREATE TABLE table1(c_text TEXT FIELD, c_string STRING FIELD, c_blob BLOB FIELD, c_int INT32 FIELD)",
+
+ // Test with a simple ASCII string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (1, 'hello', 'hello')",
+ // Test with Chinese characters (UTF-8)
+ "INSERT INTO table1(time, c_text, c_string) VALUES (2, '你好', '你好')",
+ // Test with an empty string
+ "INSERT INTO table1(time, c_text, c_string) VALUES (3, '', '')",
+ // Test with null values, which should result in a null hash
+ "INSERT INTO table1(time, c_int) VALUES (4, 404)",
+ // Test with a string containing special characters and spaces
+ "INSERT INTO table1(time, c_text, c_string) VALUES (5, 'Hello, World!', 'Hello, World!')",
+ // Test with blob data (hex representation of 'test')
+ "INSERT INTO table1(time, c_blob) VALUES (6, x'74657374')"
+ };
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ EnvFactory.getEnv().initClusterEnvironment();
+ prepareTableData(createSqls);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ EnvFactory.getEnv().cleanClusterEnvironment();
+ }
+
+ /** Test the xxhash64() function on TEXT and STRING data types with various inputs. */
+ @Test
+ public void testXxhash64OnTextAndString() {
+ String[] expectedHeader = new String[] {"time", "xxhash64(c_text)", "xxhash64(c_string)"};
+ String[] retArray =
+ new String[] {
+ // xxhash64('hello')
+ "1970-01-01T00:00:00.001Z,0x26c7827d889f6da3,0x26c7827d889f6da3,",
+ // xxhash64('你好')
+ "1970-01-01T00:00:00.002Z,0x8b7c90cd33d92633,0x8b7c90cd33d92633,",
+ // xxhash64('')
+ "1970-01-01T00:00:00.003Z,0xef46db3751d8e999,0xef46db3751d8e999,",
+ // xxhash64(null)
+ "1970-01-01T00:00:00.004Z,null,null,",
+ // xxhash64('Hello, World!')
+ "1970-01-01T00:00:00.005Z,0xc49aacf8080fe47f,0xc49aacf8080fe47f,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, xxhash64(c_text) as \"xxhash64(c_text)\", xxhash64(c_string) as \"xxhash64(c_string)\" FROM table1 where time < 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test the xxhash64() function on the BLOB data type. */
+ @Test
+ public void testXxhash64OnBlob() {
+ String[] expectedHeader = new String[] {"time", "xxhash64(c_blob)"};
+ String[] retArray =
+ new String[] {
+ // xxhash64(x'74657374') which is 'test'
+ "1970-01-01T00:00:00.006Z,0x4fdcca5ddb678139,"
+ };
+ tableResultSetEqualTest(
+ "SELECT time, xxhash64(c_blob) as \"xxhash64(c_blob)\" FROM table1 where time = 6",
+ expectedHeader,
+ retArray,
+ DATABASE_NAME);
+ }
+
+ /** Test that the xxhash64() function fails when provided with invalid arguments. */
+ @Test
+ public void testXxhash64FunctionOnInvalidInputs() {
+ // Construct the expected error message for semantic errors
+ String expectedErrorMessage =
+ TSStatusCode.SEMANTIC_ERROR.getStatusCode()
+ + ": Scalar function xxhash64 only accepts one argument and it must be TEXT, STRING, or BLOB data type.";
+
+ // Test with an invalid data type (INT32)
+ tableAssertTestFail("SELECT xxhash64(c_int) FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with too many arguments
+ tableAssertTestFail(
+ "SELECT xxhash64(c_text, 'another_arg') FROM table1", expectedErrorMessage, DATABASE_NAME);
+
+ // Test with no arguments
+ tableAssertTestFail("SELECT xxhash64() FROM table1", expectedErrorMessage, DATABASE_NAME);
+ }
+}
diff --git a/iotdb-core/datanode/pom.xml b/iotdb-core/datanode/pom.xml
index ebb1b2807f16..c0a182130f53 100644
--- a/iotdb-core/datanode/pom.xml
+++ b/iotdb-core/datanode/pom.xml
@@ -309,6 +309,10 @@
org.java-websocket
Java-WebSocket
+
+ org.lz4
+ lz4-java
+
junit
junit
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/relational/ColumnTransformerBuilder.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/relational/ColumnTransformerBuilder.java
index 455172993dc4..6361b9edf0bc 100644
--- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/relational/ColumnTransformerBuilder.java
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/relational/ColumnTransformerBuilder.java
@@ -82,7 +82,9 @@
import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.CompareLessEqualColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.CompareLessThanColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.CompareNonEqualColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.HmacColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.Like2ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.factory.HmacStrategiesFactory;
import org.apache.iotdb.db.queryengine.transformation.dag.column.leaf.ConstantColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.leaf.IdentityColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.leaf.LeafColumnTransformer;
@@ -101,6 +103,8 @@
import org.apache.iotdb.db.queryengine.transformation.dag.column.multi.LogicalOrMultiColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.ternary.BetweenColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.ternary.Like3ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ternary.LpadColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ternary.RpadColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.udf.UserDefineScalarFunctionTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.IsNullColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.LikeColumnTransformer;
@@ -124,6 +128,11 @@
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BitwiseRightShiftColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BitwiseXor2ColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BitwiseXorColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BytesToDoubleColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BytesToFloatColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BytesToIntColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BytesToLongColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.CRC32Transformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.CastFunctionColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.CeilColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.Concat2ColumnTransformer;
@@ -135,17 +144,23 @@
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.DegreesColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.DiffColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.DiffFunctionColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.DoubleToBytesColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.EndsWith2ColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.EndsWithColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.ExpColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.ExtractTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.FloatToBytesColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.FloorColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.FormatColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.HmacConstantKeyColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntToBytesColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.LTrim2ColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.LTrimColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.LengthColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.LnColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.Log10ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.LongToBytesColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.LowerColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.RTrim2ColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.RTrimColumnTransformer;
@@ -176,6 +191,8 @@
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.TrimColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.TryCastFunctionColumnTransformer;
import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.UpperColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
import org.apache.iotdb.udf.api.customizer.analysis.ScalarFunctionAnalysis;
import org.apache.iotdb.udf.api.customizer.parameter.FunctionArguments;
import org.apache.iotdb.udf.api.relational.ScalarFunction;
@@ -208,12 +225,18 @@
import static com.google.common.base.Preconditions.checkArgument;
import static org.apache.iotdb.db.queryengine.plan.expression.unary.LikeExpression.getEscapeCharacter;
import static org.apache.iotdb.db.queryengine.plan.relational.analyzer.predicate.PredicatePushIntoMetadataChecker.isStringLiteral;
+import static org.apache.iotdb.db.queryengine.plan.relational.metadata.TableMetadataImpl.isCharType;
import static org.apache.iotdb.db.queryengine.plan.relational.type.InternalTypeManager.getTSDataType;
import static org.apache.iotdb.db.queryengine.plan.relational.type.TypeSignatureTranslator.toTypeSignature;
import static org.apache.iotdb.db.queryengine.transformation.dag.column.FailFunctionColumnTransformer.FAIL_FUNCTION_NAME;
+import static org.apache.iotdb.db.queryengine.transformation.dag.column.binary.factory.HmacStrategiesFactory.createConstantKeyHmacMd5Strategy;
+import static org.apache.iotdb.db.queryengine.transformation.dag.column.binary.factory.HmacStrategiesFactory.createConstantKeyHmacSha1Strategy;
+import static org.apache.iotdb.db.queryengine.transformation.dag.column.binary.factory.HmacStrategiesFactory.createConstantKeyHmacSha256Strategy;
+import static org.apache.iotdb.db.queryengine.transformation.dag.column.binary.factory.HmacStrategiesFactory.createConstantKeyHmacSha512Strategy;
import static org.apache.tsfile.read.common.type.BlobType.BLOB;
import static org.apache.tsfile.read.common.type.BooleanType.BOOLEAN;
import static org.apache.tsfile.read.common.type.DoubleType.DOUBLE;
+import static org.apache.tsfile.read.common.type.FloatType.FLOAT;
import static org.apache.tsfile.read.common.type.IntType.INT32;
import static org.apache.tsfile.read.common.type.LongType.INT64;
import static org.apache.tsfile.read.common.type.StringType.STRING;
@@ -1118,6 +1141,311 @@ private ColumnTransformer getFunctionColumnTransformer(
return new BitwiseRightShiftArithmetic2ColumnTransformer(
first.getType(), first, this.process(children.get(1), context));
}
+ } else if (TableBuiltinScalarFunction.TO_BASE64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ STRING, first, CodecStrategiesFactory.TO_BASE64, functionName, first.getType());
+
+ } else if (TableBuiltinScalarFunction.FROM_BASE64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ BLOB, first, CodecStrategiesFactory.FROM_BASE64, functionName, first.getType());
+
+ } else if (TableBuiltinScalarFunction.TO_BASE64URL
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ STRING, first, CodecStrategiesFactory.TO_BASE64URL, functionName, first.getType());
+
+ } else if (TableBuiltinScalarFunction.FROM_BASE64URL
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ BLOB, first, CodecStrategiesFactory.FROM_BASE64URL, functionName, first.getType());
+
+ } else if (TableBuiltinScalarFunction.TO_BASE32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ STRING, first, CodecStrategiesFactory.TO_BASE32, functionName, first.getType());
+
+ } else if (TableBuiltinScalarFunction.FROM_BASE32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ BLOB, first, CodecStrategiesFactory.FROM_BASE32, functionName, first.getType());
+
+ } else if (TableBuiltinScalarFunction.SHA256.getFunctionName().equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ if (children.size() == 1) {
+ return new GenericCodecColumnTransformer(
+ BLOB, first, CodecStrategiesFactory.SHA256, functionName, first.getType());
+ }
+ } else if (TableBuiltinScalarFunction.SHA512.getFunctionName().equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ BLOB, first, CodecStrategiesFactory.SHA512, functionName, first.getType());
+
+ } else if (TableBuiltinScalarFunction.SHA1.getFunctionName().equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ BLOB, first, CodecStrategiesFactory.SHA1, functionName, first.getType());
+
+ } else if (TableBuiltinScalarFunction.MD5.getFunctionName().equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ BLOB, first, CodecStrategiesFactory.MD5, functionName, first.getType());
+
+ } else if (TableBuiltinScalarFunction.XXHASH64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ BLOB, first, CodecStrategiesFactory.XXHASH64, functionName, first.getType());
+
+ } else if (TableBuiltinScalarFunction.MURMUR3
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ BLOB, first, CodecStrategiesFactory.MURMUR3, functionName, first.getType());
+ } else if (TableBuiltinScalarFunction.TO_HEX.getFunctionName().equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ STRING, first, CodecStrategiesFactory.TO_HEX, functionName, first.getType());
+ } else if (TableBuiltinScalarFunction.FROM_HEX
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ BLOB, first, CodecStrategiesFactory.FROM_HEX, functionName, first.getType());
+ } else if (TableBuiltinScalarFunction.REVERSE
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ if (isCharType(first.getType())) {
+ return new GenericCodecColumnTransformer(
+ first.getType(),
+ first,
+ CodecStrategiesFactory.REVERSE_CHARS,
+ functionName,
+ first.getType());
+ } else {
+ return new GenericCodecColumnTransformer(
+ first.getType(),
+ first,
+ CodecStrategiesFactory.REVERSE_BYTES,
+ functionName,
+ first.getType());
+ }
+
+ } else if (TableBuiltinScalarFunction.HMAC_MD5
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ if (isStringLiteral(children.get(1))) {
+ String key = ((StringLiteral) children.get(1)).getValue();
+ return new HmacConstantKeyColumnTransformer(
+ BLOB,
+ first,
+ createConstantKeyHmacMd5Strategy(key.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ return new HmacColumnTransformer(
+ BLOB,
+ first,
+ this.process(children.get(1), context),
+ HmacStrategiesFactory.HMAC_MD5,
+ functionName,
+ first.getType());
+ }
+ } else if (TableBuiltinScalarFunction.HMAC_SHA1
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ if (isStringLiteral(children.get(1))) {
+ String key = ((StringLiteral) children.get(1)).getValue();
+ return new HmacConstantKeyColumnTransformer(
+ BLOB,
+ first,
+ createConstantKeyHmacSha1Strategy(key.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ return new HmacColumnTransformer(
+ BLOB,
+ first,
+ this.process(children.get(1), context),
+ HmacStrategiesFactory.HMAC_SHA1,
+ functionName,
+ first.getType());
+ }
+ } else if (TableBuiltinScalarFunction.HMAC_SHA256
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ if (isStringLiteral(children.get(1))) {
+ String key = ((StringLiteral) children.get(1)).getValue();
+ return new HmacConstantKeyColumnTransformer(
+ BLOB,
+ first,
+ createConstantKeyHmacSha256Strategy(key.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ return new HmacColumnTransformer(
+ BLOB,
+ first,
+ this.process(children.get(1), context),
+ HmacStrategiesFactory.HMAC_SHA256,
+ functionName,
+ first.getType());
+ }
+ } else if (TableBuiltinScalarFunction.HMAC_SHA512
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ if (isStringLiteral(children.get(1))) {
+ String key = ((StringLiteral) children.get(1)).getValue();
+ return new HmacConstantKeyColumnTransformer(
+ BLOB,
+ first,
+ createConstantKeyHmacSha512Strategy(key.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ return new HmacColumnTransformer(
+ BLOB,
+ first,
+ this.process(children.get(1), context),
+ HmacStrategiesFactory.HMAC_SHA512,
+ functionName,
+ first.getType());
+ }
+ } else if (TableBuiltinScalarFunction.TO_BIG_ENDIAN_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new IntToBytesColumnTransformer(
+ BLOB, first, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_32);
+ } else if (TableBuiltinScalarFunction.FROM_BIG_ENDIAN_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new BytesToIntColumnTransformer(
+ INT32,
+ first,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_32,
+ functionName,
+ first.getType());
+ } else if (TableBuiltinScalarFunction.TO_BIG_ENDIAN_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new LongToBytesColumnTransformer(
+ BLOB, first, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_64);
+ } else if (TableBuiltinScalarFunction.FROM_BIG_ENDIAN_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new BytesToLongColumnTransformer(
+ INT64,
+ first,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_64,
+ functionName,
+ first.getType());
+ } else if (TableBuiltinScalarFunction.TO_LITTLE_ENDIAN_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new IntToBytesColumnTransformer(
+ BLOB, first, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_32);
+ } else if (TableBuiltinScalarFunction.FROM_LITTLE_ENDIAN_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new BytesToIntColumnTransformer(
+ INT32,
+ first,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_32,
+ functionName,
+ first.getType());
+ } else if (TableBuiltinScalarFunction.TO_LITTLE_ENDIAN_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new LongToBytesColumnTransformer(
+ BLOB, first, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_64);
+ } else if (TableBuiltinScalarFunction.FROM_LITTLE_ENDIAN_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new BytesToLongColumnTransformer(
+ INT64,
+ first,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_64,
+ functionName,
+ first.getType());
+ } else if (TableBuiltinScalarFunction.TO_IEEE754_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new FloatToBytesColumnTransformer(
+ BLOB, first, NumericCodecStrategiesFactory.TO_IEEE754_32_BIG_ENDIAN);
+ } else if (TableBuiltinScalarFunction.FROM_IEEE754_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new BytesToFloatColumnTransformer(
+ FLOAT,
+ first,
+ NumericCodecStrategiesFactory.FROM_IEEE754_32_BIG_ENDIAN,
+ functionName,
+ first.getType());
+ } else if (TableBuiltinScalarFunction.TO_IEEE754_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new DoubleToBytesColumnTransformer(
+ BLOB, first, NumericCodecStrategiesFactory.TO_IEEE754_64_BIG_ENDIAN);
+ } else if (TableBuiltinScalarFunction.FROM_IEEE754_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new BytesToDoubleColumnTransformer(
+ DOUBLE,
+ first,
+ NumericCodecStrategiesFactory.FROM_IEEE754_64_BIG_ENDIAN,
+ functionName,
+ first.getType());
+ } else if (TableBuiltinScalarFunction.CRC32.getFunctionName().equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new CRC32Transformer(INT64, first);
+ } else if (TableBuiltinScalarFunction.SPOOKY_HASH_V2_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ BLOB, first, CodecStrategiesFactory.spooky_hash_v2_32, functionName, first.getType());
+ } else if (TableBuiltinScalarFunction.SPOOKY_HASH_V2_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ ColumnTransformer first = this.process(children.get(0), context);
+ return new GenericCodecColumnTransformer(
+ BLOB, first, CodecStrategiesFactory.spooky_hash_v2_64, functionName, first.getType());
+ } else if (TableBuiltinScalarFunction.LPAD.getFunctionName().equalsIgnoreCase(functionName)) {
+ return new LpadColumnTransformer(
+ BLOB,
+ this.process(children.get(0), context),
+ this.process(children.get(1), context),
+ this.process(children.get(2), context));
+ } else if (TableBuiltinScalarFunction.RPAD.getFunctionName().equalsIgnoreCase(functionName)) {
+ return new RpadColumnTransformer(
+ BLOB,
+ this.process(children.get(0), context),
+ this.process(children.get(1), context),
+ this.process(children.get(2), context));
} else {
// user defined function
if (TableUDFUtils.isScalarFunction(functionName)) {
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/metadata/TableMetadataImpl.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/metadata/TableMetadataImpl.java
index a12add71ffc5..f83e52711385 100644
--- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/metadata/TableMetadataImpl.java
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/metadata/TableMetadataImpl.java
@@ -65,7 +65,6 @@
import org.apache.iotdb.udf.api.relational.TableFunction;
import org.apache.tsfile.file.metadata.IDeviceID;
-import org.apache.tsfile.read.common.type.BlobType;
import org.apache.tsfile.read.common.type.StringType;
import org.apache.tsfile.read.common.type.Type;
import org.apache.tsfile.read.common.type.TypeFactory;
@@ -79,6 +78,7 @@
import static org.apache.iotdb.db.queryengine.transformation.dag.column.FailFunctionColumnTransformer.FAIL_FUNCTION_NAME;
import static org.apache.tsfile.read.common.type.BinaryType.TEXT;
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
import static org.apache.tsfile.read.common.type.BooleanType.BOOLEAN;
import static org.apache.tsfile.read.common.type.DateType.DATE;
import static org.apache.tsfile.read.common.type.DoubleType.DOUBLE;
@@ -611,6 +611,471 @@ && isIntegerNumber(argumentTypes.get(2)))) {
functionName));
}
return argumentTypes.get(0);
+ } else if (TableBuiltinScalarFunction.TO_BASE64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (!(argumentTypes.size() == 1
+ && (isCharType(argumentTypes.get(0)) || isBlobType(argumentTypes.get(0))))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return STRING;
+ } else if (TableBuiltinScalarFunction.FROM_BASE64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isCharType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT or STRING data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.TO_BASE64URL
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (!(argumentTypes.size() == 1
+ && (isCharType(argumentTypes.get(0)) || isBlobType(argumentTypes.get(0))))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return STRING;
+ } else if (TableBuiltinScalarFunction.FROM_BASE64URL
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isCharType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT or STRING data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.TO_BASE32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (!(argumentTypes.size() == 1
+ && (isCharType(argumentTypes.get(0)) || isBlobType(argumentTypes.get(0))))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return STRING;
+ } else if (TableBuiltinScalarFunction.FROM_BASE32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isCharType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT or STRING data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.SHA256.getFunctionName().equalsIgnoreCase(functionName)) {
+ if (!(argumentTypes.size() == 1
+ && (isCharType(argumentTypes.get(0)) || isBlobType(argumentTypes.get(0))))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.SHA512.getFunctionName().equalsIgnoreCase(functionName)) {
+ if (!(argumentTypes.size() == 1
+ && (isCharType(argumentTypes.get(0)) || isBlobType(argumentTypes.get(0))))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.SHA1.getFunctionName().equalsIgnoreCase(functionName)) {
+ if (!(argumentTypes.size() == 1
+ && (isCharType(argumentTypes.get(0)) || isBlobType(argumentTypes.get(0))))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.MD5.getFunctionName().equalsIgnoreCase(functionName)) {
+ if (!(argumentTypes.size() == 1
+ && (isCharType(argumentTypes.get(0)) || isBlobType(argumentTypes.get(0))))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.XXHASH64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (!(argumentTypes.size() == 1
+ && (isCharType(argumentTypes.get(0)) || isBlobType(argumentTypes.get(0))))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.MURMUR3
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (!(argumentTypes.size() == 1
+ && (isCharType(argumentTypes.get(0)) || isBlobType(argumentTypes.get(0))))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.TO_HEX.getFunctionName().equalsIgnoreCase(functionName)) {
+ if (!(argumentTypes.size() == 1
+ && (isCharType(argumentTypes.get(0)) || isBlobType(argumentTypes.get(0))))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return STRING;
+ } else if (TableBuiltinScalarFunction.FROM_HEX
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isCharType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT or STRING data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.REVERSE
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1
+ || !(isCharType(argumentTypes.get(0)) || isBlobType(argumentTypes.get(0)))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BlOB data type.",
+ functionName));
+ }
+ return argumentTypes.get(0);
+ } else if (TableBuiltinScalarFunction.HMAC_MD5.getFunctionName().equalsIgnoreCase(functionName)
+ || TableBuiltinScalarFunction.HMAC_SHA1.getFunctionName().equalsIgnoreCase(functionName)
+ || TableBuiltinScalarFunction.HMAC_SHA256.getFunctionName().equalsIgnoreCase(functionName)
+ || TableBuiltinScalarFunction.HMAC_SHA512
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 2
+ || !(isCharType(argumentTypes.get(0)) || isBlobType(argumentTypes.get(0)))
+ || !isCharType(argumentTypes.get(1))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts two arguments, first argument must be TEXT, STRING, or BlOB type, second argument must be STRING OR TEXT type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.TO_BIG_ENDIAN_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !INT32.equals(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be Int32 data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.FROM_BIG_ENDIAN_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isBlobType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be BLOB data type.",
+ functionName));
+ }
+ return INT32;
+ } else if (TableBuiltinScalarFunction.TO_BIG_ENDIAN_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !INT64.equals(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be Int64 data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.FROM_BIG_ENDIAN_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isBlobType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be BLOB data type.",
+ functionName));
+ }
+ return INT64;
+ } else if (TableBuiltinScalarFunction.TO_LITTLE_ENDIAN_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !INT32.equals(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be Int32 data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.FROM_LITTLE_ENDIAN_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isBlobType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be BLOB data type.",
+ functionName));
+ }
+ return INT32;
+ } else if (TableBuiltinScalarFunction.TO_LITTLE_ENDIAN_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !INT64.equals(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be Int64 data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.FROM_LITTLE_ENDIAN_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isBlobType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be BLOB data type.",
+ functionName));
+ }
+ return INT64;
+ } else if (TableBuiltinScalarFunction.TO_IEEE754_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !FLOAT.equals(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be Float data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.FROM_IEEE754_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isBlobType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be BLOB data type.",
+ functionName));
+ }
+ return FLOAT;
+ } else if (TableBuiltinScalarFunction.TO_IEEE754_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !DOUBLE.equals(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be Double data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.FROM_IEEE754_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isBlobType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be BLOB data type.",
+ functionName));
+ }
+ return DOUBLE;
+ } else if (TableBuiltinScalarFunction.CRC32.getFunctionName().equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1
+ || !(isBlobType(argumentTypes.get(0)) || isCharType(argumentTypes.get(0)))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return INT64;
+ } else if (TableBuiltinScalarFunction.SPOOKY_HASH_V2_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)
+ || TableBuiltinScalarFunction.SPOOKY_HASH_V2_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1
+ || !(isBlobType(argumentTypes.get(0)) || isCharType(argumentTypes.get(0)))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.TO_BIG_ENDIAN_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !INT32.equals(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be Int32 data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.FROM_BIG_ENDIAN_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isBlobType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be BLOB data type.",
+ functionName));
+ }
+ return INT32;
+ } else if (TableBuiltinScalarFunction.TO_BIG_ENDIAN_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !INT64.equals(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be Int64 data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.FROM_BIG_ENDIAN_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isBlobType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be BLOB data type.",
+ functionName));
+ }
+ return INT64;
+ } else if (TableBuiltinScalarFunction.TO_LITTLE_ENDIAN_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !INT32.equals(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be Int32 data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.FROM_LITTLE_ENDIAN_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isBlobType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be BLOB data type.",
+ functionName));
+ }
+ return INT32;
+ } else if (TableBuiltinScalarFunction.TO_LITTLE_ENDIAN_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !INT64.equals(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be Int64 data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.FROM_LITTLE_ENDIAN_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isBlobType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be BLOB data type.",
+ functionName));
+ }
+ return INT64;
+ } else if (TableBuiltinScalarFunction.TO_IEEE754_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !FLOAT.equals(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be Float data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.FROM_IEEE754_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isBlobType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be BLOB data type.",
+ functionName));
+ }
+ return FLOAT;
+ } else if (TableBuiltinScalarFunction.TO_IEEE754_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !DOUBLE.equals(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be Double data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.FROM_IEEE754_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1 || !isBlobType(argumentTypes.get(0))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be BLOB data type.",
+ functionName));
+ }
+ return DOUBLE;
+ } else if (TableBuiltinScalarFunction.CRC32.getFunctionName().equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1
+ || !(isBlobType(argumentTypes.get(0)) || isCharType(argumentTypes.get(0)))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return INT64;
+ } else if (TableBuiltinScalarFunction.SPOOKY_HASH_V2_32
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)
+ || TableBuiltinScalarFunction.SPOOKY_HASH_V2_64
+ .getFunctionName()
+ .equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 1
+ || !(isBlobType(argumentTypes.get(0)) || isCharType(argumentTypes.get(0)))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts one argument and it must be TEXT, STRING, or BLOB data type.",
+ functionName));
+ }
+ return BLOB;
+ } else if (TableBuiltinScalarFunction.LPAD.getFunctionName().equalsIgnoreCase(functionName)
+ || TableBuiltinScalarFunction.RPAD.getFunctionName().equalsIgnoreCase(functionName)) {
+ if (argumentTypes.size() != 3
+ || !isBlobType(argumentTypes.get(0))
+ || !isIntegerNumber(argumentTypes.get(1))
+ || !isBlobType(argumentTypes.get(2))) {
+ throw new SemanticException(
+ String.format(
+ "Scalar function %s only accepts three arguments, first argument must be BlOB type, "
+ + "second argument must be int32 or int64 type, third argument must be BLOB type.",
+ functionName));
+ }
+ return BLOB;
}
// builtin aggregation function
@@ -1037,7 +1502,7 @@ public static boolean isCharType(Type type) {
}
public static boolean isBlobType(Type type) {
- return BlobType.BLOB.equals(type);
+ return BLOB.equals(type);
}
public static boolean isBool(Type type) {
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/HmacColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/HmacColumnTransformer.java
new file mode 100644
index 000000000000..a3872608a326
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/HmacColumnTransformer.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.binary;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.strategies.HmacStrategy;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+
+import static org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.util.TransformerDebugUtils.generateOriginalValue;
+
+public class HmacColumnTransformer extends BinaryColumnTransformer {
+
+ private final HmacStrategy hmacStrategy;
+ private final String functionName;
+ private final Type inputType;
+
+ public HmacColumnTransformer(
+ Type returnType,
+ ColumnTransformer leftTransformer,
+ ColumnTransformer rightTransformer,
+ HmacStrategy hmacStrategy,
+ String functionName,
+ Type inputType) {
+ super(returnType, leftTransformer, rightTransformer);
+ this.hmacStrategy = hmacStrategy;
+ this.functionName = functionName;
+ this.inputType = inputType;
+ }
+
+ @Override
+ protected void doTransform(
+ Column leftColumn, Column rightColumn, ColumnBuilder builder, int positionCount) {
+ for (int i = 0; i < positionCount; i++) {
+ if (leftColumn.isNull(i) || rightColumn.isNull(i)) {
+ builder.appendNull();
+ } else {
+ byte[] data = leftColumn.getBinary(i).getValues();
+ byte[] key = rightColumn.getBinary(i).getValues();
+ byte[] hmacBytes;
+ try {
+ hmacBytes = hmacStrategy.hmacTransform(data, key);
+ } catch (IllegalArgumentException e) {
+ String errorMessage =
+ String.format(
+ "Failed to execute function '%s' due to an invalid input format. the value '%s' corresponding to a empty key, the empty key is not allowed in HMAC operation.",
+ functionName, generateOriginalValue(data, inputType));
+ throw new SemanticException(errorMessage);
+ }
+ builder.writeBinary(new Binary(hmacBytes));
+ }
+ }
+ }
+
+ @Override
+ protected void doTransform(
+ Column leftColumn,
+ Column rightColumn,
+ ColumnBuilder builder,
+ int positionCount,
+ boolean[] selection) {
+
+ for (int i = 0; i < positionCount; i++) {
+ if (selection[i] && !leftColumn.isNull(i) && !rightColumn.isNull(i)) {
+ byte[] data = leftColumn.getBinary(i).getValues();
+ byte[] key = rightColumn.getBinary(i).getValues();
+ byte[] hmacBytes;
+ try {
+ hmacBytes = hmacStrategy.hmacTransform(data, key);
+
+ } catch (IllegalArgumentException e) {
+ String errorMessage =
+ String.format(
+ "Failed to execute function '%s' due to an invalid input format. the value '%s' corresponding to a empty key, the empty key is not allowed in HMAC operation.",
+ functionName, generateOriginalValue(data, inputType));
+ throw new SemanticException(errorMessage);
+ }
+ builder.writeBinary(new Binary(hmacBytes));
+ } else {
+ builder.appendNull();
+ }
+ }
+ }
+
+ @Override
+ protected void checkType() {}
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/factory/HmacStrategiesFactory.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/factory/HmacStrategiesFactory.java
new file mode 100644
index 000000000000..9228c728eb2e
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/factory/HmacStrategiesFactory.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.binary.factory;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.strategies.HmacStrategy;
+
+import com.google.common.hash.HashFunction;
+import com.google.common.hash.Hashing;
+
+public final class HmacStrategiesFactory {
+
+ private HmacStrategiesFactory() {}
+
+ // --- variable keys HmacStrategy instances ---
+ // keypoint: create the HashFunction with the variable key every time
+ public static final HmacStrategy HMAC_MD5 =
+ (data, key) -> Hashing.hmacMd5(key).hashBytes(data).asBytes();
+
+ public static final HmacStrategy HMAC_SHA1 =
+ (data, key) -> Hashing.hmacSha1(key).hashBytes(data).asBytes();
+
+ public static final HmacStrategy HMAC_SHA256 =
+ (data, key) -> Hashing.hmacSha256(key).hashBytes(data).asBytes();
+
+ public static final HmacStrategy HMAC_SHA512 =
+ (data, key) -> Hashing.hmacSha512(key).hashBytes(data).asBytes();
+
+ // ---static factory methods for creating HmacStrategy with a constant key---
+ // keypoint: precompute the HashFunction with the constant key
+ // the ignoredKey parameter is ignored because we use the constantKey
+
+ public static HmacStrategy createConstantKeyHmacMd5Strategy(byte[] constantKey) {
+ final HashFunction hashFunction;
+ try {
+ hashFunction = Hashing.hmacMd5(constantKey);
+ } catch (IllegalArgumentException e) {
+ throw new SemanticException(
+ "Failed to execute function hmac_md5 due to an invalid input format, the empty key is not allowed in HMAC operation.");
+ }
+ return (data, ignoredKey) -> hashFunction.hashBytes(data).asBytes();
+ }
+
+ public static HmacStrategy createConstantKeyHmacSha1Strategy(byte[] constantKey) {
+ final HashFunction hashFunction;
+ try {
+ hashFunction = Hashing.hmacSha1(constantKey);
+ } catch (IllegalArgumentException e) {
+ throw new SemanticException(
+ "Failed to execute function hmac_sha1 due to an invalid input format, the empty key is not allowed in HMAC operation.");
+ }
+ return (data, ignoredKey) -> hashFunction.hashBytes(data).asBytes();
+ }
+
+ public static HmacStrategy createConstantKeyHmacSha256Strategy(byte[] constantKey) {
+ final HashFunction hashFunction;
+ try {
+ hashFunction = Hashing.hmacSha256(constantKey);
+ } catch (IllegalArgumentException e) {
+ throw new SemanticException(
+ "Failed to execute function hmac_sha256 due to an invalid input format, the empty key is not allowed in HMAC operation.");
+ }
+ return (data, ignoredKey) -> hashFunction.hashBytes(data).asBytes();
+ }
+
+ public static HmacStrategy createConstantKeyHmacSha512Strategy(byte[] constantKey) {
+ final HashFunction hashFunction;
+ try {
+ hashFunction = Hashing.hmacSha512(constantKey);
+ } catch (IllegalArgumentException e) {
+ throw new SemanticException(
+ "Failed to execute function hmac_sha512 due to an invalid input format, the empty key is not allowed in HMAC operation.");
+ }
+ return (data, ignoredKey) -> hashFunction.hashBytes(data).asBytes();
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/strategies/HmacStrategy.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/strategies/HmacStrategy.java
new file mode 100644
index 000000000000..01f83cce52b7
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/strategies/HmacStrategy.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.binary.strategies;
+
+@FunctionalInterface
+public interface HmacStrategy {
+ byte[] hmacTransform(byte[] data, byte[] key);
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/LpadColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/LpadColumnTransformer.java
new file mode 100644
index 000000000000..db4d17d75f19
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/LpadColumnTransformer.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.ternary;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ternary.utils.BytePaddingUtils;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+
+public class LpadColumnTransformer extends TernaryColumnTransformer {
+
+ public LpadColumnTransformer(
+ Type returnType,
+ ColumnTransformer firstColumnTransformer,
+ ColumnTransformer secondColumnTransformer,
+ ColumnTransformer thirdColumnTransformer) {
+ super(returnType, firstColumnTransformer, secondColumnTransformer, thirdColumnTransformer);
+ }
+
+ @Override
+ protected void doTransform(
+ Column inputData,
+ Column targetLength,
+ Column paddingData,
+ ColumnBuilder builder,
+ int positionCount) {
+ for (int i = 0; i < positionCount; i++) {
+ if (!inputData.isNull(i) && !targetLength.isNull(i) && !paddingData.isNull(i)) {
+ byte[] bytes =
+ BytePaddingUtils.padBytes(
+ inputData.getBinary(i).getValues(),
+ targetLength.getLong(i),
+ paddingData.getBinary(i).getValues(),
+ 0,
+ "Lpad");
+ builder.writeBinary(new Binary(bytes));
+ } else {
+ builder.appendNull();
+ }
+ }
+ }
+
+ @Override
+ protected void doTransform(
+ Column inputData,
+ Column targetLength,
+ Column paddingData,
+ ColumnBuilder builder,
+ int positionCount,
+ boolean[] selection) {
+ for (int i = 0; i < positionCount; i++) {
+ if (selection[i]
+ && !inputData.isNull(i)
+ && !targetLength.isNull(i)
+ && !paddingData.isNull(i)) {
+ byte[] bytes =
+ BytePaddingUtils.padBytes(
+ inputData.getBinary(i).getValues(),
+ targetLength.getLong(i),
+ paddingData.getBinary(i).getValues(),
+ 0,
+ "Lpad");
+ builder.writeBinary(new Binary(bytes));
+ } else {
+ builder.appendNull();
+ }
+ }
+ }
+
+ @Override
+ protected void checkType() {}
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/RpadColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/RpadColumnTransformer.java
new file mode 100644
index 000000000000..ea05be1319a6
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/RpadColumnTransformer.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.ternary;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ternary.utils.BytePaddingUtils;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+
+public class RpadColumnTransformer extends TernaryColumnTransformer {
+
+ public RpadColumnTransformer(
+ Type returnType,
+ ColumnTransformer firstColumnTransformer,
+ ColumnTransformer secondColumnTransformer,
+ ColumnTransformer thirdColumnTransformer) {
+ super(returnType, firstColumnTransformer, secondColumnTransformer, thirdColumnTransformer);
+ }
+
+ @Override
+ protected void doTransform(
+ Column inputData,
+ Column targetLength,
+ Column paddingData,
+ ColumnBuilder builder,
+ int positionCount) {
+ for (int i = 0; i < positionCount; i++) {
+ if (!inputData.isNull(i) && !targetLength.isNull(i) && !paddingData.isNull(i)) {
+ byte[] bytes =
+ BytePaddingUtils.padBytes(
+ inputData.getBinary(i).getValues(),
+ targetLength.getLong(i),
+ paddingData.getBinary(i).getValues(),
+ inputData.getBinary(i).getValues().length,
+ "Rpad");
+ builder.writeBinary(new Binary(bytes));
+ } else {
+ builder.appendNull();
+ }
+ }
+ }
+
+ @Override
+ protected void doTransform(
+ Column inputData,
+ Column targetLength,
+ Column paddingData,
+ ColumnBuilder builder,
+ int positionCount,
+ boolean[] selection) {
+ for (int i = 0; i < positionCount; i++) {
+ if (selection[i]
+ && !inputData.isNull(i)
+ && !targetLength.isNull(i)
+ && !paddingData.isNull(i)) {
+ byte[] bytes =
+ BytePaddingUtils.padBytes(
+ inputData.getBinary(i).getValues(),
+ targetLength.getLong(i),
+ paddingData.getBinary(i).getValues(),
+ inputData.getBinary(i).getValues().length,
+ "Rpad");
+ builder.writeBinary(new Binary(bytes));
+ } else {
+ builder.appendNull();
+ }
+ }
+ }
+
+ @Override
+ protected void checkType() {}
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/TernaryColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/TernaryColumnTransformer.java
index 1c4dbe4985bc..3a9dccd8e8fd 100644
--- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/TernaryColumnTransformer.java
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/TernaryColumnTransformer.java
@@ -46,7 +46,7 @@ protected TernaryColumnTransformer(
}
@Override
- protected void evaluate() {
+ public void evaluate() {
firstColumnTransformer.tryEvaluate();
secondColumnTransformer.tryEvaluate();
thirdColumnTransformer.tryEvaluate();
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/utils/BytePaddingUtils.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/utils/BytePaddingUtils.java
new file mode 100644
index 000000000000..1e5f3ed20a87
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/utils/BytePaddingUtils.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.ternary.utils;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+
+public class BytePaddingUtils {
+
+ private static String HexToString(byte[] inputBytes) {
+ StringBuilder hexString = new StringBuilder("0x");
+ for (byte inputByte : inputBytes) {
+ hexString.append(String.format("%02x", inputByte));
+ }
+ return hexString.toString();
+ }
+
+ // support for the lpad and rpad function
+ public static byte[] padBytes(
+ byte[] originBytes,
+ long targetLength,
+ byte[] paddingByte,
+ int paddingOffset,
+ String functionName) {
+
+ if (targetLength < 0 || targetLength > Integer.MAX_VALUE) {
+ throw new SemanticException(
+ String.format(
+ "Failed to execute function '%s' due to the value %s corresponding to a invalid target size, the allowed range is [0, %d].",
+ functionName, HexToString(originBytes), Integer.MAX_VALUE));
+ }
+
+ if (paddingByte.length == 0) {
+ throw new SemanticException(
+ String.format(
+ "Failed to execute function '%s' due the value %s corresponding to a empty padding string.",
+ functionName, HexToString(originBytes)));
+ }
+
+ int inputLength = originBytes.length;
+ int resultLength = (int) targetLength;
+
+ if (inputLength == resultLength) {
+ return originBytes;
+ }
+ if (inputLength > resultLength) {
+ byte[] resultBytes = new byte[resultLength];
+ System.arraycopy(originBytes, 0, resultBytes, 0, resultLength);
+ return resultBytes;
+ }
+
+ // copy the existing bytes to the result bytes
+ byte[] resultBytes = new byte[resultLength];
+ int fillLength = resultLength - inputLength;
+ int startIndex = (paddingOffset + fillLength) % resultLength;
+ System.arraycopy(originBytes, 0, resultBytes, startIndex, inputLength);
+
+ // fill the remaining bytes with the padding bytes
+ int byteIndex = paddingOffset;
+ for (int i = 0; i < fillLength / paddingByte.length; i++) {
+ System.arraycopy(paddingByte, 0, resultBytes, byteIndex, paddingByte.length);
+ byteIndex += paddingByte.length;
+ }
+
+ // fill the last few bytes
+ System.arraycopy(
+ paddingByte, 0, resultBytes, byteIndex, paddingOffset + fillLength - byteIndex);
+
+ return resultBytes;
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/BytesToDoubleColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/BytesToDoubleColumnTransformer.java
new file mode 100644
index 000000000000..e62fdfcbe237
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/BytesToDoubleColumnTransformer.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.UnaryColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+
+import static org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.util.TransformerDebugUtils.generateOriginalValue;
+
+public class BytesToDoubleColumnTransformer extends UnaryColumnTransformer {
+
+ private final NumericCodecStrategiesFactory.BytesToDoubleStrategy bytesToDoubleStrategy;
+ private final String functionName;
+ private final Type inputType;
+
+ public BytesToDoubleColumnTransformer(
+ Type returnType,
+ ColumnTransformer childColumnTransformer,
+ NumericCodecStrategiesFactory.BytesToDoubleStrategy bytesToDoubleStrategy,
+ String functionName,
+ Type inputType) {
+ super(returnType, childColumnTransformer);
+ this.bytesToDoubleStrategy = bytesToDoubleStrategy;
+ this.functionName = functionName;
+ this.inputType = inputType;
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder) {
+ doTransform(column, columnBuilder, null);
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder, boolean[] selection) {
+ for (int i = 0, n = column.getPositionCount(); i < n; i++) {
+ if (column.isNull(i) || (selection != null && !selection[i])) {
+ columnBuilder.appendNull();
+ continue;
+ }
+
+ byte[] inputBytes = column.getBinary(i).getValues();
+ try {
+ double outputValue = bytesToDoubleStrategy.numericCodeCTransform(inputBytes);
+ columnBuilder.writeDouble(outputValue);
+ } catch (SemanticException e) {
+ String problematicValue = generateOriginalValue(inputBytes, inputType);
+ String errorMessage =
+ String.format(
+ "Failed to execute function '%s' due to an invalid input format. Problematic value: %s",
+ functionName, problematicValue);
+ throw new SemanticException(errorMessage);
+ }
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/BytesToFloatColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/BytesToFloatColumnTransformer.java
new file mode 100644
index 000000000000..15af6deed2df
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/BytesToFloatColumnTransformer.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.UnaryColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+
+import static org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.util.TransformerDebugUtils.generateOriginalValue;
+
+public class BytesToFloatColumnTransformer extends UnaryColumnTransformer {
+
+ private final NumericCodecStrategiesFactory.BytesToFloatStrategy bytesToFloatStrategy;
+ private final String functionName;
+ private final Type inputType;
+
+ public BytesToFloatColumnTransformer(
+ Type returnType,
+ ColumnTransformer childColumnTransformer,
+ NumericCodecStrategiesFactory.BytesToFloatStrategy bytesToFloatStrategy,
+ String functionName,
+ Type inputType) {
+ super(returnType, childColumnTransformer);
+ this.bytesToFloatStrategy = bytesToFloatStrategy;
+ this.functionName = functionName;
+ this.inputType = inputType;
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder) {
+ doTransform(column, columnBuilder, null);
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder, boolean[] selection) {
+ for (int i = 0, n = column.getPositionCount(); i < n; i++) {
+ if (column.isNull(i) || (selection != null && !selection[i])) {
+ columnBuilder.appendNull();
+ continue;
+ }
+
+ byte[] inputBytes = column.getBinary(i).getValues();
+ try {
+ float outputValue = bytesToFloatStrategy.numericCodeCTransform(inputBytes);
+ columnBuilder.writeFloat(outputValue);
+
+ } catch (SemanticException e) {
+ String problematicValue = generateOriginalValue(inputBytes, inputType);
+ String errorMessage =
+ String.format(
+ "Failed to execute function '%s' due to an invalid input format. Problematic value: %s",
+ functionName, problematicValue);
+ throw new SemanticException(errorMessage);
+ }
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/BytesToIntColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/BytesToIntColumnTransformer.java
new file mode 100644
index 000000000000..0e42a3a713af
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/BytesToIntColumnTransformer.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.UnaryColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+
+import static org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.util.TransformerDebugUtils.generateOriginalValue;
+
+public class BytesToIntColumnTransformer extends UnaryColumnTransformer {
+
+ private final NumericCodecStrategiesFactory.BytesToIntStrategy bytesToIntStrategy;
+ private final String functionName;
+ private final Type inputType;
+
+ public BytesToIntColumnTransformer(
+ Type returnType,
+ ColumnTransformer childColumnTransformer,
+ NumericCodecStrategiesFactory.BytesToIntStrategy bytesToIntStrategy,
+ String functionName,
+ Type inputType) {
+ super(returnType, childColumnTransformer);
+ this.bytesToIntStrategy = bytesToIntStrategy;
+ this.functionName = functionName;
+ this.inputType = inputType;
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder) {
+ doTransform(column, columnBuilder, null);
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder, boolean[] selection) {
+ for (int i = 0, n = column.getPositionCount(); i < n; i++) {
+ if (column.isNull(i) || (selection != null && !selection[i])) {
+ columnBuilder.appendNull();
+ continue;
+ }
+
+ byte[] inputBytes = column.getBinary(i).getValues();
+ try {
+ // use the composed Codec strategy to perform the core
+ int outputValue = bytesToIntStrategy.numericCodeCTransform(inputBytes);
+ columnBuilder.writeInt(outputValue);
+
+ } catch (SemanticException e) {
+ // show the original value in the error message to help users debug.
+ String problematicValue = generateOriginalValue(inputBytes, inputType);
+ String errorMessage =
+ String.format(
+ "Failed to execute function '%s' due to an invalid input format. Problematic value: %s",
+ functionName, problematicValue);
+ throw new SemanticException(errorMessage);
+ }
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/BytesToLongColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/BytesToLongColumnTransformer.java
new file mode 100644
index 000000000000..dcf0e4a08653
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/BytesToLongColumnTransformer.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.UnaryColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+
+import static org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.util.TransformerDebugUtils.generateOriginalValue;
+
+/** A transformer that converts byte array representations to long values */
+public class BytesToLongColumnTransformer extends UnaryColumnTransformer {
+
+ private final NumericCodecStrategiesFactory.BytesToLongStrategy bytesToLongStrategy;
+ private final String functionName;
+ private final Type inputType;
+
+ public BytesToLongColumnTransformer(
+ Type returnType,
+ ColumnTransformer childColumnTransformer,
+ NumericCodecStrategiesFactory.BytesToLongStrategy bytesToLongStrategy,
+ String functionName,
+ Type inputType) {
+ super(returnType, childColumnTransformer);
+ this.bytesToLongStrategy = bytesToLongStrategy;
+ this.functionName = functionName;
+ this.inputType = inputType;
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder) {
+ doTransform(column, columnBuilder, null);
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder, boolean[] selection) {
+ for (int i = 0, n = column.getPositionCount(); i < n; i++) {
+ if (column.isNull(i) || (selection != null && !selection[i])) {
+ columnBuilder.appendNull();
+ continue;
+ }
+
+ byte[] inputBytes = column.getBinary(i).getValues();
+ try {
+ long outputValue = bytesToLongStrategy.numericCodeCTransform(inputBytes);
+ columnBuilder.writeLong(outputValue);
+
+ } catch (SemanticException e) {
+ String problematicValue = generateOriginalValue(inputBytes, inputType);
+ String errorMessage =
+ String.format(
+ "Failed to execute function '%s' due to an invalid input format. Problematic value: %s",
+ functionName, problematicValue);
+ throw new SemanticException(errorMessage);
+ }
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/CRC32Transformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/CRC32Transformer.java
new file mode 100644
index 000000000000..d7e9dccb62a9
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/CRC32Transformer.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.UnaryColumnTransformer;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+
+import java.util.zip.CRC32;
+
+public class CRC32Transformer extends UnaryColumnTransformer {
+
+ private final CRC32 crc32 = new CRC32();
+
+ public CRC32Transformer(Type returnType, ColumnTransformer childColumnTransformer) {
+ super(returnType, childColumnTransformer);
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder) {
+ doTransform(column, columnBuilder, null);
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder, boolean[] selection) {
+ for (int i = 0, n = column.getPositionCount(); i < n; i++) {
+
+ if (column.isNull(i) || (selection != null && !selection[i])) {
+ columnBuilder.appendNull();
+ continue;
+ }
+
+ byte[] inputBytes = column.getBinary(i).getValues();
+
+ // use reset to recovery the internal state
+ this.crc32.reset();
+ this.crc32.update(inputBytes, 0, inputBytes.length);
+ columnBuilder.writeLong(this.crc32.getValue());
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/DoubleToBytesColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/DoubleToBytesColumnTransformer.java
new file mode 100644
index 000000000000..7b9c06147a53
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/DoubleToBytesColumnTransformer.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.UnaryColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+
+/** Transformer for double to bytes conversion using a specified numeric codec strategy. */
+public class DoubleToBytesColumnTransformer extends UnaryColumnTransformer {
+
+ private final NumericCodecStrategiesFactory.DoubleToBytesStrategy doubleToBytesStrategy;
+
+ public DoubleToBytesColumnTransformer(
+ Type returnType,
+ ColumnTransformer childColumnTransformer,
+ NumericCodecStrategiesFactory.DoubleToBytesStrategy doubleToBytesStrategy) {
+ super(returnType, childColumnTransformer);
+ this.doubleToBytesStrategy = doubleToBytesStrategy;
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder) {
+ doTransform(column, columnBuilder, null);
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder, boolean[] selection) {
+ for (int i = 0, n = column.getPositionCount(); i < n; i++) {
+ if (column.isNull(i) || (selection != null && !selection[i])) {
+ columnBuilder.appendNull();
+ continue;
+ }
+ double inputValue = column.getDouble(i);
+ byte[] outputBytes = doubleToBytesStrategy.numericCodeCTransform(inputValue);
+ columnBuilder.writeBinary(new Binary(outputBytes));
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/FloatToBytesColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/FloatToBytesColumnTransformer.java
new file mode 100644
index 000000000000..7d4d4795c721
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/FloatToBytesColumnTransformer.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.UnaryColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+
+/** Transformer for float to bytes conversion using a specified numeric codec strategy. */
+public class FloatToBytesColumnTransformer extends UnaryColumnTransformer {
+
+ private final NumericCodecStrategiesFactory.FloatToBytesStrategy floatToBytesStrategy;
+
+ public FloatToBytesColumnTransformer(
+ Type returnType,
+ ColumnTransformer childColumnTransformer,
+ NumericCodecStrategiesFactory.FloatToBytesStrategy floatToBytesStrategy) {
+ super(returnType, childColumnTransformer);
+ this.floatToBytesStrategy = floatToBytesStrategy;
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder) {
+ doTransform(column, columnBuilder, null);
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder, boolean[] selection) {
+ for (int i = 0, n = column.getPositionCount(); i < n; i++) {
+ if (column.isNull(i) || (selection != null && !selection[i])) {
+ columnBuilder.appendNull();
+ continue;
+ }
+ float inputValue = column.getFloat(i);
+ byte[] outputBytes = floatToBytesStrategy.numericCodeCTransform(inputValue);
+ columnBuilder.writeBinary(new Binary(outputBytes));
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/GenericCodecColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/GenericCodecColumnTransformer.java
new file mode 100644
index 000000000000..f387b8850e4f
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/GenericCodecColumnTransformer.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.UnaryColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.inteface.CodecStrategy;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+
+import static org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.util.TransformerDebugUtils.generateOriginalValue;
+
+/**
+ * A generic, reusable column converter for handling all codec-based encoding/decoding functions. It
+ * implements concrete conversion logic by composing a {@link CodecStrategy} strategy object,
+ */
+public class GenericCodecColumnTransformer extends UnaryColumnTransformer {
+
+ private final CodecStrategy strategy;
+ private final String functionName;
+ private final Type inputType;
+
+ /**
+ * @param strategy specific codec strategy for transformation
+ * @param functionName name of the function, used for error message
+ */
+ public GenericCodecColumnTransformer(
+ Type returnType,
+ ColumnTransformer childColumnTransformer,
+ CodecStrategy strategy,
+ String functionName,
+ Type inputType) {
+ super(returnType, childColumnTransformer);
+ this.strategy = strategy;
+ this.functionName = functionName;
+ this.inputType = inputType;
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder) {
+ doTransform(column, columnBuilder, null);
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder, boolean[] selection) {
+ for (int i = 0, n = column.getPositionCount(); i < n; i++) {
+ if (column.isNull(i) || (selection != null && !selection[i])) {
+ columnBuilder.appendNull();
+ continue;
+ }
+
+ byte[] inputBytes = column.getBinary(i).getValues();
+ try {
+ // use the composed Codec strategy to perform the core
+ byte[] outputBytes = strategy.codeCTransform(inputBytes);
+ columnBuilder.writeBinary(new Binary(outputBytes));
+
+ } catch (SemanticException e) {
+
+ // The decoding functions may throw IllegalArgumentException when the input is invalid.
+ // show the original value in the error message to help users debug.
+ String problematicValue = generateOriginalValue(inputBytes, inputType);
+ String errorMessage =
+ String.format(
+ "Failed to execute function '%s' due to an invalid input format. Problematic value: %s",
+ functionName, problematicValue);
+ throw new SemanticException(errorMessage);
+ }
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/HmacConstantKeyColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/HmacConstantKeyColumnTransformer.java
new file mode 100644
index 000000000000..daa5868b938d
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/HmacConstantKeyColumnTransformer.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.strategies.HmacStrategy;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.UnaryColumnTransformer;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+
+public class HmacConstantKeyColumnTransformer extends UnaryColumnTransformer {
+
+ private final HmacStrategy optimizedHmacStrategy;
+
+ public HmacConstantKeyColumnTransformer(
+ Type returnType,
+ ColumnTransformer childColumnTransformer,
+ HmacStrategy optimizedHmacStrategy) {
+ super(returnType, childColumnTransformer);
+ this.optimizedHmacStrategy = optimizedHmacStrategy;
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder) {
+ for (int i = 0, n = column.getPositionCount(); i < n; i++) {
+ if (!column.isNull(i)) {
+ byte[] values = column.getBinary(i).getValues();
+ byte[] hmacResult = optimizedHmacStrategy.hmacTransform(values, null);
+ columnBuilder.writeBinary(new Binary(hmacResult));
+ } else {
+ columnBuilder.appendNull();
+ }
+ }
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder, boolean[] selection) {
+ for (int i = 0, n = column.getPositionCount(); i < n; i++) {
+ if (selection[i] && !column.isNull(i)) {
+ byte[] values = column.getBinary(i).getValues();
+ byte[] hmacResult = optimizedHmacStrategy.hmacTransform(values, null);
+ columnBuilder.writeBinary(new Binary(hmacResult));
+ } else {
+ columnBuilder.appendNull();
+ }
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntToBytesColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntToBytesColumnTransformer.java
new file mode 100644
index 000000000000..5c25a949c8f6
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntToBytesColumnTransformer.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.UnaryColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+
+/** Transformer for int to bytes conversion using a specified numeric codec strategy. */
+public class IntToBytesColumnTransformer extends UnaryColumnTransformer {
+
+ private final NumericCodecStrategiesFactory.IntToBytesStrategy intToBytesStrategy;
+
+ public IntToBytesColumnTransformer(
+ Type returnType,
+ ColumnTransformer childColumnTransformer,
+ NumericCodecStrategiesFactory.IntToBytesStrategy intToBytesStrategy) {
+ super(returnType, childColumnTransformer);
+ this.intToBytesStrategy = intToBytesStrategy;
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder) {
+ doTransform(column, columnBuilder, null);
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder, boolean[] selection) {
+ for (int i = 0, n = column.getPositionCount(); i < n; i++) {
+ if (column.isNull(i) || (selection != null && !selection[i])) {
+ columnBuilder.appendNull();
+ continue;
+ }
+
+ int inputValue = column.getInt(i);
+ byte[] outputBytes = intToBytesStrategy.numericCodeCTransform(inputValue);
+ columnBuilder.writeBinary(new Binary(outputBytes));
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/LongToBytesColumnTransformer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/LongToBytesColumnTransformer.java
new file mode 100644
index 000000000000..72c3a5b2404a
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/LongToBytesColumnTransformer.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.UnaryColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+
+public class LongToBytesColumnTransformer extends UnaryColumnTransformer {
+
+ private final NumericCodecStrategiesFactory.LongToBytesStrategy longToBytesStrategy;
+
+ public LongToBytesColumnTransformer(
+ Type returnType,
+ ColumnTransformer childColumnTransformer,
+ NumericCodecStrategiesFactory.LongToBytesStrategy longToBytesStrategy) {
+ super(returnType, childColumnTransformer);
+ this.longToBytesStrategy = longToBytesStrategy;
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder) {
+ doTransform(column, columnBuilder, null);
+ }
+
+ @Override
+ protected void doTransform(Column column, ColumnBuilder columnBuilder, boolean[] selection) {
+ for (int i = 0, n = column.getPositionCount(); i < n; i++) {
+ if (column.isNull(i) || (selection != null && !selection[i])) {
+ columnBuilder.appendNull();
+ continue;
+ }
+ long inputValue = column.getLong(i);
+ byte[] outputBytes = longToBytesStrategy.numericCodeCTransform(inputValue);
+ columnBuilder.writeBinary(new Binary(outputBytes));
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/factory/CodecStrategiesFactory.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/factory/CodecStrategiesFactory.java
new file mode 100644
index 000000000000..546a8286d041
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/factory/CodecStrategiesFactory.java
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.inteface.CodecStrategy;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.util.HexUtils;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.util.SpookyHashV2Utils;
+
+import com.google.common.hash.Hashing;
+import com.google.common.io.BaseEncoding;
+import net.jpountz.xxhash.XXHashFactory;
+import org.apache.tsfile.common.conf.TSFileConfig;
+
+import java.nio.ByteBuffer;
+import java.util.Base64;
+
+/**
+ * for byte[] to byte[] codec transformations, including encoding/decoding and hashing functions for
+ * decoding errors, it has been wrapped into SemanticException and will be handled in the upper
+ * layer
+ */
+public final class CodecStrategiesFactory {
+
+ private static final BaseEncoding GUAVA_BASE32_ENCODING = BaseEncoding.base32();
+
+ // --- Base64 ---
+ public static final CodecStrategy TO_BASE64 = Base64.getEncoder()::encode;
+ public static final CodecStrategy FROM_BASE64 =
+ (input) -> {
+ try {
+ return Base64.getDecoder().decode(input);
+ } catch (IllegalArgumentException e) {
+ // wrap the specific exception in dependency into a general one for uniform handling in
+ // upper layer
+ throw new SemanticException("decode base64 error");
+ }
+ };
+
+ // --- Base64URL ---
+ public static final CodecStrategy TO_BASE64URL = Base64.getUrlEncoder().withoutPadding()::encode;
+ public static final CodecStrategy FROM_BASE64URL =
+ (input) -> {
+ try {
+ return Base64.getUrlDecoder().decode(input);
+ } catch (IllegalArgumentException e) {
+ throw new SemanticException("decode base64url error");
+ }
+ };
+
+ // --- Base32 ---
+ public static final CodecStrategy TO_BASE32 =
+ (data) -> GUAVA_BASE32_ENCODING.encode(data).getBytes(TSFileConfig.STRING_CHARSET);
+ public static final CodecStrategy FROM_BASE32 =
+ (input) -> {
+ try {
+ return GUAVA_BASE32_ENCODING.decode(new String(input, TSFileConfig.STRING_CHARSET));
+ } catch (IllegalArgumentException e) {
+ throw new SemanticException("decode base32 error");
+ }
+ };
+
+ // --- Hashing Function Strategies ---
+ public static final CodecStrategy SHA256 = input -> Hashing.sha256().hashBytes(input).asBytes();
+ public static final CodecStrategy SHA512 = input -> Hashing.sha512().hashBytes(input).asBytes();
+ public static final CodecStrategy SHA1 = input -> Hashing.sha1().hashBytes(input).asBytes();
+ public static final CodecStrategy MD5 = input -> Hashing.md5().hashBytes(input).asBytes();
+ public static final CodecStrategy XXHASH64 =
+ (input) ->
+ ByteBuffer.allocate(8)
+ .putLong(XXHashFactory.fastestInstance().hash64().hash(input, 0, input.length, 0L))
+ .array();
+
+ public static final CodecStrategy MURMUR3 =
+ (input) -> Hashing.murmur3_128().hashBytes(input).asBytes();
+
+ // --- Hex ---
+ public static final CodecStrategy TO_HEX = HexUtils::toHex;
+ public static final CodecStrategy FROM_HEX =
+ input -> {
+ try {
+ return HexUtils.fromHex(input);
+ } catch (IllegalArgumentException e) {
+ throw new SemanticException("decode hex error");
+ }
+ };
+
+ // --- Reverse ---
+ /** Reverses the order of bytes in the input array. Suitable for BLOB type. */
+ public static final CodecStrategy REVERSE_BYTES =
+ (input) -> {
+ int length = input.length;
+ byte[] reversed = new byte[length];
+ for (int i = 0; i < length; i++) {
+ reversed[i] = input[length - 1 - i];
+ }
+ return reversed;
+ };
+
+ /**
+ * Reverses the order of characters in the input string. Suitable for STRING and TEXT types. This
+ * involves converting bytes to a String, reversing it, and converting back to bytes.
+ */
+ public static final CodecStrategy REVERSE_CHARS =
+ (input) -> {
+ String originalString = new String(input, TSFileConfig.STRING_CHARSET);
+ String reversedString = new StringBuilder(originalString).reverse().toString();
+ return reversedString.getBytes(TSFileConfig.STRING_CHARSET);
+ };
+
+ public static final CodecStrategy spooky_hash_v2_32 = SpookyHashV2Utils::hash32;
+
+ public static final CodecStrategy spooky_hash_v2_64 = SpookyHashV2Utils::hash64;
+
+ private CodecStrategiesFactory() {}
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/factory/NumericCodecStrategiesFactory.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/factory/NumericCodecStrategiesFactory.java
new file mode 100644
index 000000000000..60d1e77f78b2
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/factory/NumericCodecStrategiesFactory.java
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.zip.CRC32;
+
+/**
+ * A factory for strategies that convert numeric types to/from byte arrays using various encoding
+ * schemes. the decoding exceptions are wrapped into SemanticException for uniform handling in the
+ * upper layer.
+ */
+public final class NumericCodecStrategiesFactory {
+
+ private NumericCodecStrategiesFactory() {}
+
+ @FunctionalInterface
+ public interface IntToBytesStrategy {
+ byte[] numericCodeCTransform(int input);
+ }
+
+ @FunctionalInterface
+ public interface LongToBytesStrategy {
+ byte[] numericCodeCTransform(long input);
+ }
+
+ @FunctionalInterface
+ public interface FloatToBytesStrategy {
+ byte[] numericCodeCTransform(float input);
+ }
+
+ @FunctionalInterface
+ public interface DoubleToBytesStrategy {
+ byte[] numericCodeCTransform(double input);
+ }
+
+ @FunctionalInterface
+ public interface BytesToIntStrategy {
+ int numericCodeCTransform(byte[] input);
+ }
+
+ @FunctionalInterface
+ public interface BytesToLongStrategy {
+ long numericCodeCTransform(byte[] input);
+ }
+
+ @FunctionalInterface
+ public interface BytesToFloatStrategy {
+ float numericCodeCTransform(byte[] input);
+ }
+
+ @FunctionalInterface
+ public interface BytesToDoubleStrategy {
+ double numericCodeCTransform(byte[] input);
+ }
+
+ // --- Strategy Implementations ---
+
+ // for Big Endian writes, leverage ByteBuffer's default order for maximum performance
+
+ public static final IntToBytesStrategy TO_BIG_ENDIAN_32 =
+ (input) -> ByteBuffer.allocate(4).putInt(input).array();
+
+ public static final LongToBytesStrategy TO_BIG_ENDIAN_64 =
+ (input) -> ByteBuffer.allocate(8).putLong(input).array();
+
+ public static final FloatToBytesStrategy TO_IEEE754_32_BIG_ENDIAN =
+ (input) -> ByteBuffer.allocate(4).putInt(Float.floatToIntBits(input)).array();
+
+ public static final DoubleToBytesStrategy TO_IEEE754_64_BIG_ENDIAN =
+ (input) -> ByteBuffer.allocate(8).putLong(Double.doubleToLongBits(input)).array();
+
+ // For Little Endian writes, reverse the bytes of the number first, then use the default
+ // (BigEndian) writer
+ public static final IntToBytesStrategy TO_LITTLE_ENDIAN_32 =
+ (input) -> ByteBuffer.allocate(4).putInt(Integer.reverseBytes(input)).array();
+
+ public static final LongToBytesStrategy TO_LITTLE_ENDIAN_64 =
+ (input) -> ByteBuffer.allocate(8).putLong(Long.reverseBytes(input)).array();
+
+ // Decoding Conversions (Bytes -> Numeric) ---
+ // For reads, ByteBuffer.wrap().order() is already highly efficient as it avoids data copies.
+
+ public static final BytesToIntStrategy FROM_BIG_ENDIAN_32 =
+ (input) -> {
+ // validate input length, if its length is not 4, throw exception
+ if (input.length != 4) {
+ throw new SemanticException(
+ "The length of the input BLOB of function from_big_endian_32 must be 4.");
+ }
+ return ByteBuffer.wrap(input).order(ByteOrder.BIG_ENDIAN).getInt();
+ };
+
+ public static final BytesToLongStrategy FROM_BIG_ENDIAN_64 =
+ (input) -> {
+ if (input.length != 8) {
+ throw new SemanticException(
+ "The length of the input BLOB of function from_big_endian_64 must be 8.");
+ }
+ return ByteBuffer.wrap(input).order(ByteOrder.BIG_ENDIAN).getLong();
+ };
+
+ public static final BytesToIntStrategy FROM_LITTLE_ENDIAN_32 =
+ (input) -> {
+ if (input.length != 4) {
+ throw new SemanticException(
+ "The length of the input BLOB of function from_little_endian_32 must be 4.");
+ }
+ return ByteBuffer.wrap(input).order(ByteOrder.LITTLE_ENDIAN).getInt();
+ };
+
+ public static final BytesToLongStrategy FROM_LITTLE_ENDIAN_64 =
+ (input) -> {
+ if (input.length != 8) {
+ throw new SemanticException(
+ "The length of the input BLOB of function from_little_endian_64 must be 8.");
+ }
+ return ByteBuffer.wrap(input).order(ByteOrder.LITTLE_ENDIAN).getLong();
+ };
+
+ public static final BytesToFloatStrategy FROM_IEEE754_32_BIG_ENDIAN =
+ (input) -> {
+ if (input.length != 4) {
+ throw new SemanticException(
+ "The length of the input BLOB of function from_ieee754_32_big_endian must be 4.");
+ }
+ return Float.intBitsToFloat(ByteBuffer.wrap(input).order(ByteOrder.BIG_ENDIAN).getInt());
+ };
+
+ public static final BytesToDoubleStrategy FROM_IEEE754_64_BIG_ENDIAN =
+ (input) -> {
+ if (input.length != 8) {
+ throw new SemanticException(
+ "The length of the input BLOB of function from_ieee754_64_big_endian must be 8.");
+ }
+ return Double.longBitsToDouble(
+ ByteBuffer.wrap(input).order(ByteOrder.BIG_ENDIAN).getLong());
+ };
+
+ // for CRC32
+ private static final ThreadLocal crc32ThreadLocal = ThreadLocal.withInitial(CRC32::new);
+ public static final BytesToLongStrategy CRC32 =
+ (input) -> {
+ CRC32 crc32 = crc32ThreadLocal.get();
+ crc32.reset();
+ crc32.update(input);
+ return crc32.getValue();
+ };
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/inteface/CodecStrategy.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/inteface/CodecStrategy.java
new file mode 100644
index 000000000000..7c2fd4833a13
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/inteface/CodecStrategy.java
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.inteface;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+
+@FunctionalInterface
+public interface CodecStrategy {
+ byte[] codeCTransform(byte[] input) throws SemanticException;
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/util/HexUtils.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/util/HexUtils.java
new file mode 100644
index 000000000000..15ed2b9d342d
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/util/HexUtils.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.util;
+
+import org.apache.tsfile.common.conf.TSFileConfig;
+
+public final class HexUtils {
+
+ private static final byte[] HEX_CHAR_TABLE =
+ "0123456789abcdef".getBytes(TSFileConfig.STRING_CHARSET);
+
+ private HexUtils() {
+ throw new UnsupportedOperationException("This is a utility class and cannot be instantiated");
+ }
+
+ /**
+ * Converts a byte array to its hexadecimal representation as a new byte array. No intermediate
+ * String objects are created.
+ */
+ public static byte[] toHex(byte[] input) {
+ byte[] hexBytes = new byte[input.length * 2];
+ for (int i = 0; i < input.length; i++) {
+ int v = input[i] & 0xFF;
+ hexBytes[i * 2] = HEX_CHAR_TABLE[v >>> 4];
+ hexBytes[i * 2 + 1] = HEX_CHAR_TABLE[v & 0x0F];
+ }
+ return hexBytes;
+ }
+
+ /**
+ * Converts a byte array representing a hexadecimal string back to its raw byte array. No
+ * intermediate String objects are created.
+ *
+ * @param input The byte array containing hexadecimal characters.
+ * @return The decoded raw byte array.
+ */
+ public static byte[] fromHex(byte[] input) {
+ if ((input.length & 1) != 0) {
+ throw new IllegalArgumentException();
+ }
+
+ byte[] rawBytes = new byte[input.length / 2];
+ for (int i = 0; i < rawBytes.length; i++) {
+ int high = hexCharToDigit(input[i * 2]);
+ int low = hexCharToDigit(input[i * 2 + 1]);
+ rawBytes[i] = (byte) ((high << 4) | low);
+ }
+ return rawBytes;
+ }
+
+ /**
+ * Converts a single byte representing a hex character to its integer value (0-15).
+ *
+ * @param c The byte representing the hex character (e.g., 'a', 'F', '9').
+ * @return The integer value from 0 to 15.
+ */
+ private static int hexCharToDigit(byte c) {
+ if (c >= '0' && c <= '9') {
+ return c - '0';
+ }
+ if (c >= 'a' && c <= 'f') {
+ return c - 'a' + 10;
+ }
+ if (c >= 'A' && c <= 'F') {
+ return c - 'A' + 10;
+ }
+ throw new IllegalArgumentException();
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/util/SpookyHashV2Utils.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/util/SpookyHashV2Utils.java
new file mode 100644
index 000000000000..e6ef386827e4
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/util/SpookyHashV2Utils.java
@@ -0,0 +1,603 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.util;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+import static java.lang.Long.rotateLeft;
+
+public class SpookyHashV2Utils {
+ private static final long MAGIC_CONSTANT = 0xDEAD_BEEF_DEAD_BEEFL;
+ private static final int SHORT_THRESHOLD = 192;
+ public static final byte SIZE_OF_LONG = 8;
+
+ public static long getLongFromBytesWithLittleEndian(byte[] data, int index) {
+
+ // little endian
+ return (data[index] & 0xFFL)
+ | ((data[index + 1] & 0xFFL) << 8)
+ | ((data[index + 2] & 0xFFL) << 16)
+ | ((data[index + 3] & 0xFFL) << 24)
+ | ((data[index + 4] & 0xFFL) << 32)
+ | ((data[index + 5] & 0xFFL) << 40)
+ | ((data[index + 6] & 0xFFL) << 48)
+ | ((data[index + 7] & 0xFFL) << 56);
+ }
+
+ public static long getUnsignedIntFromBytesWithLittleEndian(byte[] data, int index) {
+ return (data[index] & 0xFFL)
+ | ((data[index + 1] & 0xFFL) << 8)
+ | ((data[index + 2] & 0xFFL) << 16)
+ | ((data[index + 3] & 0xFFL) << 24);
+ }
+
+ public static byte[] intToBytes(int value) {
+ ByteBuffer buffer = ByteBuffer.allocate(Integer.BYTES); // Integer.BYTES == 4
+ buffer.order(ByteOrder.BIG_ENDIAN);
+ buffer.putInt(value);
+ return buffer.array();
+ }
+
+ public static byte[] longToBytes(long value) {
+
+ ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES); // Long.BYTES == 8
+ buffer.order(ByteOrder.BIG_ENDIAN);
+ buffer.putLong(value);
+ return buffer.array();
+ }
+
+ private SpookyHashV2Utils() {}
+
+ public static byte[] hash32(byte[] data) {
+ return intToBytes((int) hash64(data, 0, data.length, 0));
+ }
+
+ public static byte[] hash64(byte[] data) {
+ return longToBytes(hash64(data, 0, data.length, 0));
+ }
+
+ public static long hash64(byte[] data, int offset, int length, long seed) {
+ if (length < SHORT_THRESHOLD) {
+ return shortHash64(data, offset, length, seed);
+ }
+
+ return longHash64(data, offset, length, seed);
+ }
+
+ public static int hash32(byte[] data, int offset, int length, long seed) {
+ return (int) hash64(data, offset, length, seed);
+ }
+
+ private static long shortHash64(byte[] data, int offset, int length, long seed) {
+ int limit = offset + length;
+
+ long h0 = seed;
+ long h1 = seed;
+ long h2 = MAGIC_CONSTANT;
+ long h3 = MAGIC_CONSTANT;
+
+ int current = offset;
+ while (current <= limit - 32) {
+ h2 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h3 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+
+ // mix
+ h2 = rotateLeft(h2, 50);
+ h2 += h3;
+ h0 ^= h2;
+ h3 = rotateLeft(h3, 52);
+ h3 += h0;
+ h1 ^= h3;
+ h0 = rotateLeft(h0, 30);
+ h0 += h1;
+ h2 ^= h0;
+ h1 = rotateLeft(h1, 41);
+ h1 += h2;
+ h3 ^= h1;
+ h2 = rotateLeft(h2, 54);
+ h2 += h3;
+ h0 ^= h2;
+ h3 = rotateLeft(h3, 48);
+ h3 += h0;
+ h1 ^= h3;
+ h0 = rotateLeft(h0, 38);
+ h0 += h1;
+ h2 ^= h0;
+ h1 = rotateLeft(h1, 37);
+ h1 += h2;
+ h3 ^= h1;
+ h2 = rotateLeft(h2, 62);
+ h2 += h3;
+ h0 ^= h2;
+ h3 = rotateLeft(h3, 34);
+ h3 += h0;
+ h1 ^= h3;
+ h0 = rotateLeft(h0, 5);
+ h0 += h1;
+ h2 ^= h0;
+ h1 = rotateLeft(h1, 36);
+ h1 += h2;
+ h3 ^= h1;
+
+ h0 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+
+ h1 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ }
+
+ int remainder = limit - current;
+ if (remainder >= 16) {
+ h2 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ remainder -= SIZE_OF_LONG;
+
+ h3 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ remainder -= SIZE_OF_LONG;
+
+ // mix
+ h2 = rotateLeft(h2, 50);
+ h2 += h3;
+ h0 ^= h2;
+ h3 = rotateLeft(h3, 52);
+ h3 += h0;
+ h1 ^= h3;
+ h0 = rotateLeft(h0, 30);
+ h0 += h1;
+ h2 ^= h0;
+ h1 = rotateLeft(h1, 41);
+ h1 += h2;
+ h3 ^= h1;
+ h2 = rotateLeft(h2, 54);
+ h2 += h3;
+ h0 ^= h2;
+ h3 = rotateLeft(h3, 48);
+ h3 += h0;
+ h1 ^= h3;
+ h0 = rotateLeft(h0, 38);
+ h0 += h1;
+ h2 ^= h0;
+ h1 = rotateLeft(h1, 37);
+ h1 += h2;
+ h3 ^= h1;
+ h2 = rotateLeft(h2, 62);
+ h2 += h3;
+ h0 ^= h2;
+ h3 = rotateLeft(h3, 34);
+ h3 += h0;
+ h1 ^= h3;
+ h0 = rotateLeft(h0, 5);
+ h0 += h1;
+ h2 ^= h0;
+ h1 = rotateLeft(h1, 36);
+ h1 += h2;
+ h3 ^= h1;
+ }
+
+ // last 15 bytes
+ h3 += ((long) length) << 56;
+ switch (remainder) {
+ case 15:
+ h3 += (data[current + 14] & 0xFFL) << 48;
+ case 14:
+ h3 += (data[current + 13] & 0xFFL) << 40;
+ case 13:
+ h3 += (data[current + 12] & 0xFFL) << 32;
+ case 12:
+ h3 += getUnsignedIntFromBytesWithLittleEndian(data, current + 8);
+ h2 += getLongFromBytesWithLittleEndian(data, current);
+ break;
+ case 11:
+ h3 += (data[current + 10] & 0xFFL) << 16;
+ case 10:
+ h3 += (data[current + 9] & 0xFFL) << 8;
+ case 9:
+ h3 += (data[current + 8] & 0xFFL);
+ case 8:
+ h2 += getLongFromBytesWithLittleEndian(data, current);
+ break;
+ case 7:
+ h2 += (data[current + 6] & 0xFFL) << 48;
+ case 6:
+ h2 += (data[current + 5] & 0xFFL) << 40;
+ case 5:
+ h2 += (data[current + 4] & 0xFFL) << 32;
+ case 4:
+ h2 += getUnsignedIntFromBytesWithLittleEndian(data, current);
+ break;
+ case 3:
+ h2 += (data[current + 2] & 0xFFL) << 16;
+ case 2:
+ h2 += (data[current + 1] & 0xFFL) << 8;
+ case 1:
+ h2 += (data[current] & 0xFFL);
+ break;
+ case 0:
+ h2 += MAGIC_CONSTANT;
+ h3 += MAGIC_CONSTANT;
+ break;
+ default:
+ throw new AssertionError("Unexpected value for remainder: " + remainder);
+ }
+
+ // end
+ h3 ^= h2;
+ h2 = rotateLeft(h2, 15);
+ h3 += h2;
+ h0 ^= h3;
+ h3 = rotateLeft(h3, 52);
+ h0 += h3;
+ h1 ^= h0;
+ h0 = rotateLeft(h0, 26);
+ h1 += h0;
+ h2 ^= h1;
+ h1 = rotateLeft(h1, 51);
+ h2 += h1;
+ h3 ^= h2;
+ h2 = rotateLeft(h2, 28);
+ h3 += h2;
+ h0 ^= h3;
+ h3 = rotateLeft(h3, 9);
+ h0 += h3;
+ h1 ^= h0;
+ h0 = rotateLeft(h0, 47);
+ h1 += h0;
+ h2 ^= h1;
+ h1 = rotateLeft(h1, 54);
+ h2 += h1;
+ h3 ^= h2;
+ h2 = rotateLeft(h2, 32);
+ h3 += h2;
+ h0 ^= h3;
+ h3 = rotateLeft(h3, 25);
+ h0 += h3;
+ h1 ^= h0;
+ h0 = rotateLeft(h0, 63);
+ h1 += h0;
+
+ return h0;
+ }
+
+ private static long longHash64(byte[] data, int offset, int length, long seed) {
+ int limit = offset + length;
+
+ long h0 = seed;
+ long h1 = seed;
+ long h2 = MAGIC_CONSTANT;
+ long h3 = seed;
+ long h4 = seed;
+ long h5 = MAGIC_CONSTANT;
+ long h6 = seed;
+ long h7 = seed;
+ long h8 = MAGIC_CONSTANT;
+ long h9 = seed;
+ long h10 = seed;
+ long h11 = MAGIC_CONSTANT;
+
+ int current = offset;
+ while (current <= limit - 12 * SIZE_OF_LONG) {
+ h0 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h2 ^= h10;
+ h11 ^= h0;
+ h0 = rotateLeft(h0, 11);
+ h11 += h1;
+
+ h1 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h3 ^= h11;
+ h0 ^= h1;
+ h1 = rotateLeft(h1, 32);
+ h0 += h2;
+
+ h2 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h4 ^= h0;
+ h1 ^= h2;
+ h2 = rotateLeft(h2, 43);
+ h1 += h3;
+
+ h3 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h5 ^= h1;
+ h2 ^= h3;
+ h3 = rotateLeft(h3, 31);
+ h2 += h4;
+
+ h4 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h6 ^= h2;
+ h3 ^= h4;
+ h4 = rotateLeft(h4, 17);
+ h3 += h5;
+
+ h5 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h7 ^= h3;
+ h4 ^= h5;
+ h5 = rotateLeft(h5, 28);
+ h4 += h6;
+
+ h6 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h8 ^= h4;
+ h5 ^= h6;
+ h6 = rotateLeft(h6, 39);
+ h5 += h7;
+
+ h7 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h9 ^= h5;
+ h6 ^= h7;
+ h7 = rotateLeft(h7, 57);
+ h6 += h8;
+
+ h8 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h10 ^= h6;
+ h7 ^= h8;
+ h8 = rotateLeft(h8, 55);
+ h7 += h9;
+
+ h9 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h11 ^= h7;
+ h8 ^= h9;
+ h9 = rotateLeft(h9, 54);
+ h8 += h10;
+
+ h10 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h0 ^= h8;
+ h9 ^= h10;
+ h10 = rotateLeft(h10, 22);
+ h9 += h11;
+
+ h11 += getLongFromBytesWithLittleEndian(data, current);
+ current += SIZE_OF_LONG;
+ h1 ^= h9;
+ h10 ^= h11;
+ h11 = rotateLeft(h11, 46);
+ h10 += h0;
+ }
+
+ int remaining = limit - current;
+ int sequences = remaining / SIZE_OF_LONG;
+
+ // handle remaining whole 8-byte sequences
+ switch (sequences) {
+ case 11:
+ h10 += getLongFromBytesWithLittleEndian(data, current + 10 * SIZE_OF_LONG);
+ case 10:
+ h9 += getLongFromBytesWithLittleEndian(data, current + 9 * SIZE_OF_LONG);
+ case 9:
+ h8 += getLongFromBytesWithLittleEndian(data, current + 8 * SIZE_OF_LONG);
+ case 8:
+ h7 += getLongFromBytesWithLittleEndian(data, current + 7 * SIZE_OF_LONG);
+ case 7:
+ h6 += getLongFromBytesWithLittleEndian(data, current + 6 * SIZE_OF_LONG);
+ case 6:
+ h5 += getLongFromBytesWithLittleEndian(data, current + 5 * SIZE_OF_LONG);
+ case 5:
+ h4 += getLongFromBytesWithLittleEndian(data, current + 4 * SIZE_OF_LONG);
+ case 4:
+ h3 += getLongFromBytesWithLittleEndian(data, current + 3 * SIZE_OF_LONG);
+ case 3:
+ h2 += getLongFromBytesWithLittleEndian(data, current + 2 * SIZE_OF_LONG);
+ case 2:
+ h1 += getLongFromBytesWithLittleEndian(data, current + SIZE_OF_LONG);
+ case 1:
+ h0 += getLongFromBytesWithLittleEndian(data, current);
+ case 0:
+ break;
+ default:
+ throw new AssertionError("Unexpected value for sequences: " + sequences);
+ }
+
+ current += SIZE_OF_LONG * sequences;
+
+ // read the last sequence of 0-7 bytes
+ long last = 0;
+ switch (limit - current) {
+ case 7:
+ last |= (data[current + 6] & 0xFFL) << 48;
+ case 6:
+ last |= (data[current + 5] & 0xFFL) << 40;
+ case 5:
+ last |= (data[current + 4] & 0xFFL) << 32;
+ case 4:
+ last |= (data[current + 3] & 0xFFL) << 24;
+ case 3:
+ last |= (data[current + 2] & 0xFFL) << 16;
+ case 2:
+ last |= (data[current + 1] & 0xFFL) << 8;
+ case 1:
+ last |= (data[current] & 0xFFL);
+ case 0:
+ break;
+ default:
+ throw new AssertionError("Unexpected size for last sequence: " + (limit - current));
+ }
+
+ switch (sequences) {
+ case 11:
+ h11 += last;
+ break;
+ case 10:
+ h10 += last;
+ break;
+ case 9:
+ h9 += last;
+ break;
+ case 8:
+ h8 += last;
+ break;
+ case 7:
+ h7 += last;
+ break;
+ case 6:
+ h6 += last;
+ break;
+ case 5:
+ h5 += last;
+ break;
+ case 4:
+ h4 += last;
+ break;
+ case 3:
+ h3 += last;
+ break;
+ case 2:
+ h2 += last;
+ break;
+ case 1:
+ h1 += last;
+ break;
+ case 0:
+ h0 += last;
+ break;
+ default:
+ throw new AssertionError("Unexpected value for sequences: " + sequences);
+ }
+
+ // Place "remaining" as the value of the last byte of the block
+ h11 += ((long) remaining) << 56;
+
+ // end 1
+ h11 += h1;
+ h2 ^= h11;
+ h1 = rotateLeft(h1, 44);
+ h0 += h2;
+ h3 ^= h0;
+ h2 = rotateLeft(h2, 15);
+ h1 += h3;
+ h4 ^= h1;
+ h3 = rotateLeft(h3, 34);
+ h2 += h4;
+ h5 ^= h2;
+ h4 = rotateLeft(h4, 21);
+ h3 += h5;
+ h6 ^= h3;
+ h5 = rotateLeft(h5, 38);
+ h4 += h6;
+ h7 ^= h4;
+ h6 = rotateLeft(h6, 33);
+ h5 += h7;
+ h8 ^= h5;
+ h7 = rotateLeft(h7, 10);
+ h6 += h8;
+ h9 ^= h6;
+ h8 = rotateLeft(h8, 13);
+ h7 += h9;
+ h10 ^= h7;
+ h9 = rotateLeft(h9, 38);
+ h8 += h10;
+ h11 ^= h8;
+ h10 = rotateLeft(h10, 53);
+ h9 += h11;
+ h0 ^= h9;
+ h11 = rotateLeft(h11, 42);
+ h10 += h0;
+ h1 ^= h10;
+ h0 = rotateLeft(h0, 54);
+
+ // end 2
+ h11 += h1;
+ h2 ^= h11;
+ h1 = rotateLeft(h1, 44);
+ h0 += h2;
+ h3 ^= h0;
+ h2 = rotateLeft(h2, 15);
+ h1 += h3;
+ h4 ^= h1;
+ h3 = rotateLeft(h3, 34);
+ h2 += h4;
+ h5 ^= h2;
+ h4 = rotateLeft(h4, 21);
+ h3 += h5;
+ h6 ^= h3;
+ h5 = rotateLeft(h5, 38);
+ h4 += h6;
+ h7 ^= h4;
+ h6 = rotateLeft(h6, 33);
+ h5 += h7;
+ h8 ^= h5;
+ h7 = rotateLeft(h7, 10);
+ h6 += h8;
+ h9 ^= h6;
+ h8 = rotateLeft(h8, 13);
+ h7 += h9;
+ h10 ^= h7;
+ h9 = rotateLeft(h9, 38);
+ h8 += h10;
+ h11 ^= h8;
+ h10 = rotateLeft(h10, 53);
+ h9 += h11;
+ h0 ^= h9;
+ h11 = rotateLeft(h11, 42);
+ h10 += h0;
+ h1 ^= h10;
+ h0 = rotateLeft(h0, 54);
+
+ // end 3
+ h11 += h1;
+ h2 ^= h11;
+ h1 = rotateLeft(h1, 44);
+ h0 += h2;
+ h3 ^= h0;
+ h2 = rotateLeft(h2, 15);
+ h1 += h3;
+ h4 ^= h1;
+ h3 = rotateLeft(h3, 34);
+ h2 += h4;
+ h5 ^= h2;
+ h4 = rotateLeft(h4, 21);
+ h3 += h5;
+ h6 ^= h3;
+ h5 = rotateLeft(h5, 38);
+ h4 += h6;
+ h7 ^= h4;
+ h6 = rotateLeft(h6, 33);
+ h5 += h7;
+ h8 ^= h5;
+ h7 = rotateLeft(h7, 10);
+ h6 += h8;
+ h9 ^= h6;
+ h8 = rotateLeft(h8, 13);
+ h7 += h9;
+ h10 ^= h7;
+ h9 = rotateLeft(h9, 38);
+ h8 += h10;
+ h11 ^= h8;
+ h10 = rotateLeft(h10, 53);
+ h9 += h11;
+ h0 ^= h9;
+ h11 = rotateLeft(h11, 42);
+ h10 += h0;
+ h1 ^= h10;
+ h0 = rotateLeft(h0, 54);
+
+ return h0;
+ }
+}
diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/util/TransformerDebugUtils.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/util/TransformerDebugUtils.java
new file mode 100644
index 000000000000..2ad8e6a29a88
--- /dev/null
+++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/util/TransformerDebugUtils.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.util;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.type.Type;
+
+import static org.apache.iotdb.db.queryengine.plan.relational.metadata.TableMetadataImpl.isBlobType;
+import static org.apache.iotdb.db.queryengine.plan.relational.metadata.TableMetadataImpl.isCharType;
+
+/**
+ * A utility class for generating user-friendly debug information during the execution of
+ * ColumnTransformers, especially for error reporting.
+ */
+public final class TransformerDebugUtils {
+
+ private TransformerDebugUtils() {}
+
+ /**
+ * Generates a user-friendly string representation of a raw byte array based on its original data
+ * type. This is primarily used for creating informative error messages when a decoding operation
+ * fails.
+ */
+ public static String generateOriginalValue(byte[] inputBytes, Type originalType) {
+
+ // If the original type was character-based, interpret it as a string.
+ if (isCharType(originalType)) {
+ return new String(inputBytes, TSFileConfig.STRING_CHARSET);
+ }
+
+ // If the original type was BLOB, represent it in hexadecimal format.
+ if (isBlobType(originalType)) {
+ StringBuilder hexString = new StringBuilder("0x");
+ for (byte inputByte : inputBytes) {
+ hexString.append(String.format("%02x", inputByte));
+ }
+ return hexString.toString();
+ }
+
+ throw new SemanticException(
+ "type " + originalType + " is not supported in generateProblematicValueString()");
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/hmac/HmacMd5ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/hmac/HmacMd5ColumnTransformerTest.java
new file mode 100644
index 000000000000..ba6afb33c5a7
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/hmac/HmacMd5ColumnTransformerTest.java
@@ -0,0 +1,420 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.binary.hmac;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.HmacColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.factory.HmacStrategiesFactory;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.strategies.HmacStrategy;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.HmacConstantKeyColumnTransformer;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumnBuilder;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.read.common.type.StringType;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import javax.crypto.Mac;
+import javax.crypto.spec.SecretKeySpec;
+
+import java.security.InvalidKeyException;
+import java.security.NoSuchAlgorithmException;
+
+import static org.junit.Assert.assertThrows;
+
+public class HmacMd5ColumnTransformerTest {
+
+ private static final Type returnType = BlobType.BLOB;
+
+ /**
+ * Helper method to calculate the expected HMAC-MD5 hash using standard Java crypto libraries.
+ *
+ * @param data The message bytes.
+ * @param key The key bytes.
+ * @return The resulting HMAC-MD5 hash.
+ */
+ private byte[] calculateHmacMd5(byte[] data, byte[] key)
+ throws NoSuchAlgorithmException, InvalidKeyException {
+ if (key == null || key.length == 0) {
+ throw new InvalidKeyException("Key cannot be null or empty for HMAC-MD5 calculation.");
+ }
+ Mac mac = Mac.getInstance("HmacMD5");
+ SecretKeySpec secretKeySpec = new SecretKeySpec(key, "HmacMD5");
+ mac.init(secretKeySpec);
+ return mac.doFinal(data);
+ }
+
+ /** Helper method to create a mocked ColumnTransformer that returns a predefined Column. */
+ private ColumnTransformer mockColumnTransformer(Column column) {
+ ColumnTransformer mockTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockTransformer.getColumn()).thenReturn(column);
+ // Ensure that tryEvaluate (or other evaluation methods) don't throw exceptions
+ Mockito.doNothing().when(mockTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockTransformer).clearCache();
+ Mockito.when(mockTransformer.getColumnCachePositionCount())
+ .thenReturn(column.getPositionCount());
+ return mockTransformer;
+ }
+
+ /** Test case with standard STRING inputs for both data and key. */
+ @Test
+ public void testHmacMd5WithStringInputs() throws Exception {
+ String dataStr = "Hello IoTDB";
+ String keyStr = "secret_key";
+ byte[] dataBytes = dataStr.getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_MD5,
+ "hmac_md5",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ byte[] expectedHash = calculateHmacMd5(dataBytes, keyBytes);
+
+ Assert.assertEquals(1, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedHash, resultColumn.getBinary(0).getValues());
+ }
+
+ /** Test case with multi-row inputs, including NULL values for data and key. */
+ @Test
+ public void testHmacMd5WithMultiRowsAndNulls() throws Exception {
+ String[] dataStrings = {"data1", null, "data3", "data4"};
+ String[] keyStrings = {"key1", "key2", null, "key4"};
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ ColumnBuilder keyBuilder = new BinaryColumnBuilder(null, keyStrings.length);
+
+ for (String s : dataStrings) {
+ if (s != null) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ dataBuilder.appendNull();
+ }
+ }
+
+ for (String s : keyStrings) {
+ if (s != null) {
+ keyBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ keyBuilder.appendNull();
+ }
+ }
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyBuilder.build());
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_MD5,
+ "hmac_md5",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ Assert.assertEquals(4, resultColumn.getPositionCount());
+
+ // Row 0: Valid data and key -> should have a valid hash
+ byte[] expected0 = calculateHmacMd5(dataStrings[0].getBytes(), keyStrings[0].getBytes());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Null data -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Null key -> result should be null
+ Assert.assertTrue(resultColumn.isNull(2));
+
+ // Row 3: Valid data and key -> should have a valid hash
+ byte[] expected3 = calculateHmacMd5(dataStrings[3].getBytes(), keyStrings[3].getBytes());
+ Assert.assertFalse(resultColumn.isNull(3));
+ Assert.assertArrayEquals(expected3, resultColumn.getBinary(3).getValues());
+ }
+
+ /** Test case for an empty string as data, which is a valid input. */
+ @Test
+ public void testHmacMd5WithEmptyData() throws Exception {
+ byte[] dataBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = "some_key".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_MD5,
+ "hmac_md5",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ byte[] expectedHash = calculateHmacMd5(dataBytes, keyBytes);
+
+ Assert.assertEquals(1, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedHash, resultColumn.getBinary(0).getValues());
+ }
+
+ /** An empty key is invalid for HMAC operations and should throw a SemanticException. */
+ @Test
+ public void testHmacMd5WithEmptyKeyThrowsException() {
+ byte[] dataBytes = "some_data".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = "".getBytes(TSFileConfig.STRING_CHARSET); // Empty key
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_MD5,
+ "hmac_md5",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+
+ // Assert that calling evaluate throws the expected exception
+ SemanticException thrown = assertThrows(SemanticException.class, hmacTransformer::evaluate);
+
+ Assert.assertTrue(
+ "The exception message should indicate that an empty key is not allowed.",
+ thrown.getMessage().contains("the empty key is not allowed in HMAC operation"));
+ }
+
+ /** Test case with Unicode characters in both data and key. */
+ @Test
+ public void testHmacMd5WithUnicode() throws Exception {
+ String dataStr = "你好世界";
+ String keyStr = "这是一个密钥";
+ byte[] dataBytes = dataStr.getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_MD5,
+ "hmac_md5",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ byte[] expectedHash = calculateHmacMd5(dataBytes, keyBytes);
+
+ Assert.assertEquals(1, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedHash, resultColumn.getBinary(0).getValues());
+ }
+
+ /** Test HMAC-MD5 with a selection array to process only a subset of rows. */
+ @Test
+ public void testHmacMd5WithSelection() throws Exception {
+ String[] dataStrings = {"Apache", "IoTDB", "rocks"};
+ String[] keyStrings = {"key1", "key2", "key3"};
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ ColumnBuilder keyBuilder = new BinaryColumnBuilder(null, keyStrings.length);
+
+ for (String s : dataStrings) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ }
+ for (String s : keyStrings) {
+ keyBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ }
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyBuilder.build());
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_MD5,
+ "hmac_md5",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+
+ // Select only the first and third rows for processing.
+ boolean[] selection = {true, false, true};
+ hmacTransformer.evaluateWithSelection(selection);
+ Column resultColumn = hmacTransformer.getColumn();
+
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+
+ // Row 0: Selected -> should have a valid hash
+ byte[] expected0 = calculateHmacMd5(dataStrings[0].getBytes(), keyStrings[0].getBytes());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Not selected -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Selected -> should have a valid hash
+ byte[] expected2 = calculateHmacMd5(dataStrings[2].getBytes(), keyStrings[2].getBytes());
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expected2, resultColumn.getBinary(2).getValues());
+ }
+
+ /**
+ * Test case with multi-row inputs (including nulls) and a constant key. This tests the
+ * HmacConstantKeyColumnTransformer path.
+ */
+ @Test
+ public void testHmacMd5WithConstantKey_MultiRowAndNulls() throws Exception {
+ // 1. Arrange
+ String[] dataStrings = {"data1", null, "data3"};
+ String keyStr = "constant_secret";
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ for (String s : dataStrings) {
+ if (s != null) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ dataBuilder.appendNull();
+ }
+ }
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+
+ // Get the optimized strategy for a constant key
+ HmacStrategy strategy =
+ HmacStrategiesFactory.createConstantKeyHmacMd5Strategy(
+ keyStr.getBytes(TSFileConfig.STRING_CHARSET));
+ HmacConstantKeyColumnTransformer hmacTransformer =
+ new HmacConstantKeyColumnTransformer(returnType, dataTransformer, strategy);
+
+ // 2. Act
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ // 3. Assert
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+
+ // Row 0: Valid data -> should have a valid hash
+ byte[] expected0 = calculateHmacMd5(dataStrings[0].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Null data -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Valid data -> should have a valid hash
+ byte[] expected2 = calculateHmacMd5(dataStrings[2].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expected2, resultColumn.getBinary(2).getValues());
+ }
+
+ /**
+ * Test HMAC-MD5 with a constant key and a selection array. This tests the
+ * HmacConstantKeyColumnTransformer path with selection.
+ */
+ @Test
+ public void testHmacMd5WithConstantKey_WithSelection() throws Exception {
+ // 1. Arrange
+ String[] dataStrings = {"Apache", "IoTDB", "rocks"};
+ String keyStr = "super_secret_key";
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ for (String s : dataStrings) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ }
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+
+ HmacStrategy strategy =
+ HmacStrategiesFactory.createConstantKeyHmacMd5Strategy(
+ keyStr.getBytes(TSFileConfig.STRING_CHARSET));
+ HmacConstantKeyColumnTransformer hmacTransformer =
+ new HmacConstantKeyColumnTransformer(returnType, dataTransformer, strategy);
+
+ // 2. Act
+ hmacTransformer.addReferenceCount();
+ boolean[] selection = {true, false, true};
+ hmacTransformer.evaluateWithSelection(selection);
+ Column resultColumn = hmacTransformer.getColumn();
+
+ // 3. Assert
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+
+ // Row 0: Selected -> should have a valid hash
+ byte[] expected0 = calculateHmacMd5(dataStrings[0].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Not selected -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Selected -> should have a valid hash
+ byte[] expected2 = calculateHmacMd5(dataStrings[2].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expected2, resultColumn.getBinary(2).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/hmac/HmacSha1ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/hmac/HmacSha1ColumnTransformerTest.java
new file mode 100644
index 000000000000..e80367f52b9e
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/hmac/HmacSha1ColumnTransformerTest.java
@@ -0,0 +1,420 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.binary.hmac;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.HmacColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.factory.HmacStrategiesFactory;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.strategies.HmacStrategy;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.HmacConstantKeyColumnTransformer;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumnBuilder;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.read.common.type.StringType;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import javax.crypto.Mac;
+import javax.crypto.spec.SecretKeySpec;
+
+import java.security.InvalidKeyException;
+import java.security.NoSuchAlgorithmException;
+
+import static org.junit.Assert.assertThrows;
+
+public class HmacSha1ColumnTransformerTest {
+
+ private static final Type returnType = BlobType.BLOB;
+
+ /**
+ * Helper method to calculate the expected HMAC-SHA1 hash using standard Java crypto libraries.
+ *
+ * @param data The message bytes.
+ * @param key The key bytes.
+ * @return The resulting HMAC-SHA1 hash.
+ */
+ private byte[] calculateHmacSha1(byte[] data, byte[] key)
+ throws NoSuchAlgorithmException, InvalidKeyException {
+ if (key == null || key.length == 0) {
+ throw new InvalidKeyException("Key cannot be null or empty for HMAC-SHA1 calculation.");
+ }
+ Mac mac = Mac.getInstance("HmacSHA1");
+ SecretKeySpec secretKeySpec = new SecretKeySpec(key, "HmacSHA1");
+ mac.init(secretKeySpec);
+ return mac.doFinal(data);
+ }
+
+ /** Helper method to create a mocked ColumnTransformer that returns a predefined Column. */
+ private ColumnTransformer mockColumnTransformer(Column column) {
+ ColumnTransformer mockTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockTransformer.getColumn()).thenReturn(column);
+ // Ensure that tryEvaluate (or other evaluation methods) don't throw exceptions
+ Mockito.doNothing().when(mockTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockTransformer).clearCache();
+ Mockito.when(mockTransformer.getColumnCachePositionCount())
+ .thenReturn(column.getPositionCount());
+ return mockTransformer;
+ }
+
+ /** Test case with standard STRING inputs for both data and key. */
+ @Test
+ public void testHmacSha1WithStringInputs() throws Exception {
+ String dataStr = "Hello IoTDB";
+ String keyStr = "secret_key";
+ byte[] dataBytes = dataStr.getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA1,
+ "hmac_sha1",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ byte[] expectedHash = calculateHmacSha1(dataBytes, keyBytes);
+
+ Assert.assertEquals(1, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedHash, resultColumn.getBinary(0).getValues());
+ }
+
+ /** Test case with multi-row inputs, including NULL values for data and key. */
+ @Test
+ public void testHmacSha1WithMultiRowsAndNulls() throws Exception {
+ String[] dataStrings = {"data1", null, "data3", "data4"};
+ String[] keyStrings = {"key1", "key2", null, "key4"};
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ ColumnBuilder keyBuilder = new BinaryColumnBuilder(null, keyStrings.length);
+
+ for (String s : dataStrings) {
+ if (s != null) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ dataBuilder.appendNull();
+ }
+ }
+
+ for (String s : keyStrings) {
+ if (s != null) {
+ keyBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ keyBuilder.appendNull();
+ }
+ }
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyBuilder.build());
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA1,
+ "hmac_sha1",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ Assert.assertEquals(4, resultColumn.getPositionCount());
+
+ // Row 0: Valid data and key -> should have a valid hash
+ byte[] expected0 = calculateHmacSha1(dataStrings[0].getBytes(), keyStrings[0].getBytes());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Null data -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Null key -> result should be null
+ Assert.assertTrue(resultColumn.isNull(2));
+
+ // Row 3: Valid data and key -> should have a valid hash
+ byte[] expected3 = calculateHmacSha1(dataStrings[3].getBytes(), keyStrings[3].getBytes());
+ Assert.assertFalse(resultColumn.isNull(3));
+ Assert.assertArrayEquals(expected3, resultColumn.getBinary(3).getValues());
+ }
+
+ /** Test case for an empty string as data, which is a valid input. */
+ @Test
+ public void testHmacSha1WithEmptyData() throws Exception {
+ byte[] dataBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = "some_key".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA1,
+ "hmac_sha1",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ byte[] expectedHash = calculateHmacSha1(dataBytes, keyBytes);
+
+ Assert.assertEquals(1, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedHash, resultColumn.getBinary(0).getValues());
+ }
+
+ /** An empty key is invalid for HMAC operations and should throw a SemanticException. */
+ @Test
+ public void testHmacSha1WithEmptyKeyThrowsException() {
+ byte[] dataBytes = "some_data".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = "".getBytes(TSFileConfig.STRING_CHARSET); // Empty key
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA1,
+ "hmac_sha1",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+
+ // Assert that calling evaluate throws the expected exception
+ SemanticException thrown = assertThrows(SemanticException.class, hmacTransformer::evaluate);
+
+ Assert.assertTrue(
+ "The exception message should indicate that an empty key is not allowed.",
+ thrown.getMessage().contains("the empty key is not allowed in HMAC operation"));
+ }
+
+ /** Test case with Unicode characters in both data and key. */
+ @Test
+ public void testHmacSha1WithUnicode() throws Exception {
+ String dataStr = "你好世界";
+ String keyStr = "这是一个密钥";
+ byte[] dataBytes = dataStr.getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA1,
+ "hmac_sha1",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ byte[] expectedHash = calculateHmacSha1(dataBytes, keyBytes);
+
+ Assert.assertEquals(1, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedHash, resultColumn.getBinary(0).getValues());
+ }
+
+ /** Test HMAC-SHA1 with a selection array to process only a subset of rows. */
+ @Test
+ public void testHmacSha1WithSelection() throws Exception {
+ String[] dataStrings = {"Apache", "IoTDB", "rocks"};
+ String[] keyStrings = {"key1", "key2", "key3"};
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ ColumnBuilder keyBuilder = new BinaryColumnBuilder(null, keyStrings.length);
+
+ for (String s : dataStrings) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ }
+ for (String s : keyStrings) {
+ keyBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ }
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyBuilder.build());
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA1,
+ "hmac_sha1",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+
+ // Select only the first and third rows for processing.
+ boolean[] selection = {true, false, true};
+ hmacTransformer.evaluateWithSelection(selection);
+ Column resultColumn = hmacTransformer.getColumn();
+
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+
+ // Row 0: Selected -> should have a valid hash
+ byte[] expected0 = calculateHmacSha1(dataStrings[0].getBytes(), keyStrings[0].getBytes());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Not selected -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Selected -> should have a valid hash
+ byte[] expected2 = calculateHmacSha1(dataStrings[2].getBytes(), keyStrings[2].getBytes());
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expected2, resultColumn.getBinary(2).getValues());
+ }
+
+ /**
+ * Test case with multi-row inputs (including nulls) and a constant key. This tests the
+ * HmacConstantKeyColumnTransformer path.
+ */
+ @Test
+ public void testHmacSha1WithConstantKey_MultiRowAndNulls() throws Exception {
+ // 1. Arrange
+ String[] dataStrings = {"data1", null, "data3"};
+ String keyStr = "constant_secret";
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ for (String s : dataStrings) {
+ if (s != null) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ dataBuilder.appendNull();
+ }
+ }
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+
+ // Get the optimized strategy for a constant key
+ HmacStrategy strategy =
+ HmacStrategiesFactory.createConstantKeyHmacSha1Strategy(
+ keyStr.getBytes(TSFileConfig.STRING_CHARSET));
+ HmacConstantKeyColumnTransformer hmacTransformer =
+ new HmacConstantKeyColumnTransformer(returnType, dataTransformer, strategy);
+
+ // 2. Act
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ // 3. Assert
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+
+ // Row 0: Valid data -> should have a valid hash
+ byte[] expected0 = calculateHmacSha1(dataStrings[0].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Null data -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Valid data -> should have a valid hash
+ byte[] expected2 = calculateHmacSha1(dataStrings[2].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expected2, resultColumn.getBinary(2).getValues());
+ }
+
+ /**
+ * Test HMAC-SHA1 with a constant key and a selection array. This tests the
+ * HmacConstantKeyColumnTransformer path with selection.
+ */
+ @Test
+ public void testHmacSha1WithConstantKey_WithSelection() throws Exception {
+ // 1. Arrange
+ String[] dataStrings = {"Apache", "IoTDB", "rocks"};
+ String keyStr = "super_secret_key";
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ for (String s : dataStrings) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ }
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+
+ HmacStrategy strategy =
+ HmacStrategiesFactory.createConstantKeyHmacSha1Strategy(
+ keyStr.getBytes(TSFileConfig.STRING_CHARSET));
+ HmacConstantKeyColumnTransformer hmacTransformer =
+ new HmacConstantKeyColumnTransformer(returnType, dataTransformer, strategy);
+
+ // 2. Act
+ hmacTransformer.addReferenceCount();
+ boolean[] selection = {true, false, true};
+ hmacTransformer.evaluateWithSelection(selection);
+ Column resultColumn = hmacTransformer.getColumn();
+
+ // 3. Assert
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+
+ // Row 0: Selected -> should have a valid hash
+ byte[] expected0 = calculateHmacSha1(dataStrings[0].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Not selected -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Selected -> should have a valid hash
+ byte[] expected2 = calculateHmacSha1(dataStrings[2].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expected2, resultColumn.getBinary(2).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/hmac/HmacSha256ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/hmac/HmacSha256ColumnTransformerTest.java
new file mode 100644
index 000000000000..73792cabdb90
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/hmac/HmacSha256ColumnTransformerTest.java
@@ -0,0 +1,420 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.binary.hmac;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.HmacColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.factory.HmacStrategiesFactory;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.strategies.HmacStrategy;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.HmacConstantKeyColumnTransformer;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumnBuilder;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.read.common.type.StringType;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import javax.crypto.Mac;
+import javax.crypto.spec.SecretKeySpec;
+
+import java.security.InvalidKeyException;
+import java.security.NoSuchAlgorithmException;
+
+import static org.junit.Assert.assertThrows;
+
+public class HmacSha256ColumnTransformerTest {
+
+ private static final Type returnType = BlobType.BLOB;
+
+ /**
+ * Helper method to calculate the expected HMAC-SHA256 hash using standard Java crypto libraries.
+ *
+ * @param data The message bytes.
+ * @param key The key bytes.
+ * @return The resulting HMAC-SHA256 hash.
+ */
+ private byte[] calculateHmacSha256(byte[] data, byte[] key)
+ throws NoSuchAlgorithmException, InvalidKeyException {
+ if (key == null || key.length == 0) {
+ throw new InvalidKeyException("Key cannot be null or empty for HMAC-SHA256 calculation.");
+ }
+ Mac mac = Mac.getInstance("HmacSHA256");
+ SecretKeySpec secretKeySpec = new SecretKeySpec(key, "HmacSHA256");
+ mac.init(secretKeySpec);
+ return mac.doFinal(data);
+ }
+
+ /** Helper method to create a mocked ColumnTransformer that returns a predefined Column. */
+ private ColumnTransformer mockColumnTransformer(Column column) {
+ ColumnTransformer mockTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockTransformer.getColumn()).thenReturn(column);
+ // Ensure that tryEvaluate (or other evaluation methods) don't throw exceptions
+ Mockito.doNothing().when(mockTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockTransformer).clearCache();
+ Mockito.when(mockTransformer.getColumnCachePositionCount())
+ .thenReturn(column.getPositionCount());
+ return mockTransformer;
+ }
+
+ /** Test case with standard STRING inputs for both data and key. */
+ @Test
+ public void testHmacSha256WithStringInputs() throws Exception {
+ String dataStr = "Hello IoTDB";
+ String keyStr = "secret_key";
+ byte[] dataBytes = dataStr.getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA256,
+ "hmac_sha256",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ byte[] expectedHash = calculateHmacSha256(dataBytes, keyBytes);
+
+ Assert.assertEquals(1, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedHash, resultColumn.getBinary(0).getValues());
+ }
+
+ /** Test case with multi-row inputs, including NULL values for data and key. */
+ @Test
+ public void testHmacSha256WithMultiRowsAndNulls() throws Exception {
+ String[] dataStrings = {"data1", null, "data3", "data4"};
+ String[] keyStrings = {"key1", "key2", null, "key4"};
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ ColumnBuilder keyBuilder = new BinaryColumnBuilder(null, keyStrings.length);
+
+ for (String s : dataStrings) {
+ if (s != null) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ dataBuilder.appendNull();
+ }
+ }
+
+ for (String s : keyStrings) {
+ if (s != null) {
+ keyBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ keyBuilder.appendNull();
+ }
+ }
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyBuilder.build());
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA256,
+ "hmac_sha256",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ Assert.assertEquals(4, resultColumn.getPositionCount());
+
+ // Row 0: Valid data and key -> should have a valid hash
+ byte[] expected0 = calculateHmacSha256(dataStrings[0].getBytes(), keyStrings[0].getBytes());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Null data -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Null key -> result should be null
+ Assert.assertTrue(resultColumn.isNull(2));
+
+ // Row 3: Valid data and key -> should have a valid hash
+ byte[] expected3 = calculateHmacSha256(dataStrings[3].getBytes(), keyStrings[3].getBytes());
+ Assert.assertFalse(resultColumn.isNull(3));
+ Assert.assertArrayEquals(expected3, resultColumn.getBinary(3).getValues());
+ }
+
+ /** Test case for an empty string as data, which is a valid input. */
+ @Test
+ public void testHmacSha256WithEmptyData() throws Exception {
+ byte[] dataBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = "some_key".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA256,
+ "hmac_sha256",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ byte[] expectedHash = calculateHmacSha256(dataBytes, keyBytes);
+
+ Assert.assertEquals(1, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedHash, resultColumn.getBinary(0).getValues());
+ }
+
+ /** An empty key is invalid for HMAC operations and should throw a SemanticException. */
+ @Test
+ public void testHmacSha256WithEmptyKeyThrowsException() {
+ byte[] dataBytes = "some_data".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = "".getBytes(TSFileConfig.STRING_CHARSET); // Empty key
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA256,
+ "hmac_sha256",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+
+ // Assert that calling evaluate throws the expected exception
+ SemanticException thrown = assertThrows(SemanticException.class, hmacTransformer::evaluate);
+
+ Assert.assertTrue(
+ "The exception message should indicate that an empty key is not allowed.",
+ thrown.getMessage().contains("the empty key is not allowed in HMAC operation"));
+ }
+
+ /** Test case with Unicode characters in both data and key. */
+ @Test
+ public void testHmacSha256WithUnicode() throws Exception {
+ String dataStr = "你好世界";
+ String keyStr = "这是一个密钥";
+ byte[] dataBytes = dataStr.getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA256,
+ "hmac_sha256",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ byte[] expectedHash = calculateHmacSha256(dataBytes, keyBytes);
+
+ Assert.assertEquals(1, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedHash, resultColumn.getBinary(0).getValues());
+ }
+
+ /** Test HMAC-SHA256 with a selection array to process only a subset of rows. */
+ @Test
+ public void testHmacSha256WithSelection() throws Exception {
+ String[] dataStrings = {"Apache", "IoTDB", "rocks"};
+ String[] keyStrings = {"key1", "key2", "key3"};
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ ColumnBuilder keyBuilder = new BinaryColumnBuilder(null, keyStrings.length);
+
+ for (String s : dataStrings) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ }
+ for (String s : keyStrings) {
+ keyBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ }
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyBuilder.build());
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA256,
+ "hmac_sha256",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+
+ // Select only the first and third rows for processing.
+ boolean[] selection = {true, false, true};
+ hmacTransformer.evaluateWithSelection(selection);
+ Column resultColumn = hmacTransformer.getColumn();
+
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+
+ // Row 0: Selected -> should have a valid hash
+ byte[] expected0 = calculateHmacSha256(dataStrings[0].getBytes(), keyStrings[0].getBytes());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Not selected -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Selected -> should have a valid hash
+ byte[] expected2 = calculateHmacSha256(dataStrings[2].getBytes(), keyStrings[2].getBytes());
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expected2, resultColumn.getBinary(2).getValues());
+ }
+
+ /**
+ * Test case with multi-row inputs (including nulls) and a constant key. This tests the
+ * HmacConstantKeyColumnTransformer path.
+ */
+ @Test
+ public void testHmacSha256WithConstantKey_MultiRowAndNulls() throws Exception {
+ // 1. Arrange
+ String[] dataStrings = {"data1", null, "data3"};
+ String keyStr = "constant_secret";
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ for (String s : dataStrings) {
+ if (s != null) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ dataBuilder.appendNull();
+ }
+ }
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+
+ // Get the optimized strategy for a constant key
+ HmacStrategy strategy =
+ HmacStrategiesFactory.createConstantKeyHmacSha256Strategy(
+ keyStr.getBytes(TSFileConfig.STRING_CHARSET));
+ HmacConstantKeyColumnTransformer hmacTransformer =
+ new HmacConstantKeyColumnTransformer(returnType, dataTransformer, strategy);
+
+ // 2. Act
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ // 3. Assert
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+
+ // Row 0: Valid data -> should have a valid hash
+ byte[] expected0 = calculateHmacSha256(dataStrings[0].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Null data -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Valid data -> should have a valid hash
+ byte[] expected2 = calculateHmacSha256(dataStrings[2].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expected2, resultColumn.getBinary(2).getValues());
+ }
+
+ /**
+ * Test HMAC-SHA256 with a constant key and a selection array. This tests the
+ * HmacConstantKeyColumnTransformer path with selection.
+ */
+ @Test
+ public void testHmacSha256WithConstantKey_WithSelection() throws Exception {
+ // 1. Arrange
+ String[] dataStrings = {"Apache", "IoTDB", "rocks"};
+ String keyStr = "super_secret_key";
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ for (String s : dataStrings) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ }
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+
+ HmacStrategy strategy =
+ HmacStrategiesFactory.createConstantKeyHmacSha256Strategy(
+ keyStr.getBytes(TSFileConfig.STRING_CHARSET));
+ HmacConstantKeyColumnTransformer hmacTransformer =
+ new HmacConstantKeyColumnTransformer(returnType, dataTransformer, strategy);
+
+ // 2. Act
+ hmacTransformer.addReferenceCount();
+ boolean[] selection = {true, false, true};
+ hmacTransformer.evaluateWithSelection(selection);
+ Column resultColumn = hmacTransformer.getColumn();
+
+ // 3. Assert
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+
+ // Row 0: Selected -> should have a valid hash
+ byte[] expected0 = calculateHmacSha256(dataStrings[0].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Not selected -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Selected -> should have a valid hash
+ byte[] expected2 = calculateHmacSha256(dataStrings[2].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expected2, resultColumn.getBinary(2).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/hmac/HmacSha512ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/hmac/HmacSha512ColumnTransformerTest.java
new file mode 100644
index 000000000000..f88604ecaeae
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/binary/hmac/HmacSha512ColumnTransformerTest.java
@@ -0,0 +1,356 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.binary.hmac;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.HmacColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.factory.HmacStrategiesFactory;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.binary.strategies.HmacStrategy;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.HmacConstantKeyColumnTransformer;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.block.column.ColumnBuilder;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumnBuilder;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.read.common.type.StringType;
+import org.apache.tsfile.read.common.type.Type;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import javax.crypto.Mac;
+import javax.crypto.spec.SecretKeySpec;
+
+import java.security.InvalidKeyException;
+import java.security.NoSuchAlgorithmException;
+
+import static org.junit.Assert.assertThrows;
+
+public class HmacSha512ColumnTransformerTest {
+
+ private static final Type returnType = BlobType.BLOB;
+
+ /**
+ * Helper method to calculate the expected HMAC-SHA512 hash using standard Java crypto libraries.
+ *
+ * @param data The message bytes.
+ * @param key The key bytes.
+ * @return The resulting HMAC-SHA512 hash.
+ */
+ private byte[] calculateHmacSha512(byte[] data, byte[] key)
+ throws NoSuchAlgorithmException, InvalidKeyException {
+ if (key == null || key.length == 0) {
+ throw new InvalidKeyException("Key cannot be null or empty for HMAC-SHA512 calculation.");
+ }
+ Mac mac = Mac.getInstance("HmacSHA512");
+ SecretKeySpec secretKeySpec = new SecretKeySpec(key, "HmacSHA512");
+ mac.init(secretKeySpec);
+ return mac.doFinal(data);
+ }
+
+ /** Helper method to create a mocked ColumnTransformer that returns a predefined Column. */
+ private ColumnTransformer mockColumnTransformer(Column column) {
+ ColumnTransformer mockTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockTransformer.getColumn()).thenReturn(column);
+ // Ensure that tryEvaluate (or other evaluation methods) don't throw exceptions
+ Mockito.doNothing().when(mockTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockTransformer).clearCache();
+ Mockito.when(mockTransformer.getColumnCachePositionCount())
+ .thenReturn(column.getPositionCount());
+ return mockTransformer;
+ }
+
+ // region Tests for (Column, Column) scenario using HmacColumnTransformer
+
+ /** Test case with standard STRING inputs for both data and key. */
+ @Test
+ public void testHmacSha512WithStringInputs() throws Exception {
+ String dataStr = "Hello IoTDB";
+ String keyStr = "secret_key";
+ byte[] dataBytes = dataStr.getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA512,
+ "hmac_sha512",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ byte[] expectedHash = calculateHmacSha512(dataBytes, keyBytes);
+
+ Assert.assertEquals(1, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedHash, resultColumn.getBinary(0).getValues());
+ }
+
+ /** Test case with multi-row inputs, including NULL values for data and key. */
+ @Test
+ public void testHmacSha512WithMultiRowsAndNulls() throws Exception {
+ String[] dataStrings = {"data1", null, "data3", "data4"};
+ String[] keyStrings = {"key1", "key2", null, "key4"};
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ ColumnBuilder keyBuilder = new BinaryColumnBuilder(null, keyStrings.length);
+
+ for (String s : dataStrings) {
+ if (s != null) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ dataBuilder.appendNull();
+ }
+ }
+
+ for (String s : keyStrings) {
+ if (s != null) {
+ keyBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ keyBuilder.appendNull();
+ }
+ }
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyBuilder.build());
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA512,
+ "hmac_sha512",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ Assert.assertEquals(4, resultColumn.getPositionCount());
+
+ // Row 0: Valid data and key -> should have a valid hash
+ byte[] expected0 = calculateHmacSha512(dataStrings[0].getBytes(), keyStrings[0].getBytes());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Null data -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Null key -> result should be null
+ Assert.assertTrue(resultColumn.isNull(2));
+
+ // Row 3: Valid data and key -> should have a valid hash
+ byte[] expected3 = calculateHmacSha512(dataStrings[3].getBytes(), keyStrings[3].getBytes());
+ Assert.assertFalse(resultColumn.isNull(3));
+ Assert.assertArrayEquals(expected3, resultColumn.getBinary(3).getValues());
+ }
+
+ /** An empty key is invalid for HMAC operations and should throw a SemanticException. */
+ @Test
+ public void testHmacSha512WithEmptyKeyThrowsException() {
+ byte[] dataBytes = "some_data".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] keyBytes = "".getBytes(TSFileConfig.STRING_CHARSET); // Empty key
+
+ Column dataColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(dataBytes)).build();
+ Column keyColumn = new BinaryColumnBuilder(null, 1).writeBinary(new Binary(keyBytes)).build();
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyColumn);
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA512,
+ "hmac_sha512",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+
+ // Assert that calling evaluate throws the expected exception
+ SemanticException thrown = assertThrows(SemanticException.class, hmacTransformer::evaluate);
+
+ Assert.assertTrue(
+ "The exception message should indicate that an empty key is not allowed.",
+ thrown.getMessage().contains("the empty key is not allowed in HMAC operation"));
+ }
+
+ /** Test HMAC-SHA512 with a selection array to process only a subset of rows. */
+ @Test
+ public void testHmacSha512WithSelection() throws Exception {
+ String[] dataStrings = {"Apache", "IoTDB", "rocks"};
+ String[] keyStrings = {"key1", "key2", "key3"};
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ ColumnBuilder keyBuilder = new BinaryColumnBuilder(null, keyStrings.length);
+
+ for (String s : dataStrings) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ }
+ for (String s : keyStrings) {
+ keyBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ }
+
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+ ColumnTransformer keyTransformer = mockColumnTransformer(keyBuilder.build());
+
+ HmacColumnTransformer hmacTransformer =
+ new HmacColumnTransformer(
+ returnType,
+ dataTransformer,
+ keyTransformer,
+ HmacStrategiesFactory.HMAC_SHA512,
+ "hmac_sha512",
+ StringType.STRING);
+
+ hmacTransformer.addReferenceCount();
+
+ // Select only the first and third rows for processing.
+ boolean[] selection = {true, false, true};
+ hmacTransformer.evaluateWithSelection(selection);
+ Column resultColumn = hmacTransformer.getColumn();
+
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+
+ // Row 0: Selected -> should have a valid hash
+ byte[] expected0 = calculateHmacSha512(dataStrings[0].getBytes(), keyStrings[0].getBytes());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Not selected -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Selected -> should have a valid hash
+ byte[] expected2 = calculateHmacSha512(dataStrings[2].getBytes(), keyStrings[2].getBytes());
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expected2, resultColumn.getBinary(2).getValues());
+ }
+
+ /**
+ * Test case with multi-row inputs (including nulls) and a constant key. This tests the
+ * HmacConstantKeyColumnTransformer path.
+ */
+ @Test
+ public void testHmacSha512WithConstantKey_MultiRowAndNulls() throws Exception {
+ // 1. Arrange
+ String[] dataStrings = {"data1", null, "data3"};
+ String keyStr = "constant_secret";
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ for (String s : dataStrings) {
+ if (s != null) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ } else {
+ dataBuilder.appendNull();
+ }
+ }
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+
+ // Get the optimized strategy for a constant key
+ HmacStrategy strategy =
+ HmacStrategiesFactory.createConstantKeyHmacSha512Strategy(
+ keyStr.getBytes(TSFileConfig.STRING_CHARSET));
+ HmacConstantKeyColumnTransformer hmacTransformer =
+ new HmacConstantKeyColumnTransformer(returnType, dataTransformer, strategy);
+
+ // 2. Act
+ hmacTransformer.addReferenceCount();
+ hmacTransformer.evaluate();
+ Column resultColumn = hmacTransformer.getColumn();
+
+ // 3. Assert
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+
+ // Row 0: Valid data -> should have a valid hash
+ byte[] expected0 = calculateHmacSha512(dataStrings[0].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Null data -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Valid data -> should have a valid hash
+ byte[] expected2 = calculateHmacSha512(dataStrings[2].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expected2, resultColumn.getBinary(2).getValues());
+ }
+
+ /**
+ * Test HMAC-SHA512 with a constant key and a selection array. This tests the
+ * HmacConstantKeyColumnTransformer path with selection.
+ */
+ @Test
+ public void testHmacSha512WithConstantKey_WithSelection() throws Exception {
+ // 1. Arrange
+ String[] dataStrings = {"Apache", "IoTDB", "rocks"};
+ String keyStr = "super_secret_key";
+ byte[] keyBytes = keyStr.getBytes(TSFileConfig.STRING_CHARSET);
+
+ ColumnBuilder dataBuilder = new BinaryColumnBuilder(null, dataStrings.length);
+ for (String s : dataStrings) {
+ dataBuilder.writeBinary(new Binary(s.getBytes(TSFileConfig.STRING_CHARSET)));
+ }
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataBuilder.build());
+
+ HmacStrategy strategy =
+ HmacStrategiesFactory.createConstantKeyHmacSha512Strategy(
+ keyStr.getBytes(TSFileConfig.STRING_CHARSET));
+ HmacConstantKeyColumnTransformer hmacTransformer =
+ new HmacConstantKeyColumnTransformer(returnType, dataTransformer, strategy);
+
+ // 2. Act
+ hmacTransformer.addReferenceCount();
+ boolean[] selection = {true, false, true};
+ hmacTransformer.evaluateWithSelection(selection);
+ Column resultColumn = hmacTransformer.getColumn();
+
+ // 3. Assert
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+
+ // Row 0: Selected -> should have a valid hash
+ byte[] expected0 = calculateHmacSha512(dataStrings[0].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expected0, resultColumn.getBinary(0).getValues());
+
+ // Row 1: Not selected -> result should be null
+ Assert.assertTrue(resultColumn.isNull(1));
+
+ // Row 2: Selected -> should have a valid hash
+ byte[] expected2 = calculateHmacSha512(dataStrings[2].getBytes(), keyBytes);
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expected2, resultColumn.getBinary(2).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/LpadColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/LpadColumnTransformerTest.java
new file mode 100644
index 000000000000..6640ef79ed18
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/LpadColumnTransformerTest.java
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.iotdb.db.queryengine.transformation.dag.column.ternary;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.read.common.block.column.IntColumn;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+
+// This unit test is for a hypothetical LpadColumnTransformer class.
+// It assumes the class exists and follows a similar structure to other transformers.
+public class LpadColumnTransformerTest {
+
+ // Helper method to mock a ColumnTransformer to return a predefined Column.
+ private ColumnTransformer mockColumnTransformer(Column column) {
+ ColumnTransformer mockTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockTransformer.getColumn()).thenReturn(column);
+ // Common mocking for evaluation methods
+ Mockito.doNothing().when(mockTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockTransformer).clearCache();
+ Mockito.doNothing().when(mockTransformer).evaluateWithSelection(Mockito.any());
+ Mockito.when(mockTransformer.getColumnCachePositionCount())
+ .thenReturn(column.getPositionCount());
+ return mockTransformer;
+ }
+
+ // Helper method to convert a hex string to a byte array for creating Binary objects.
+ private static byte[] hexToBytes(String s) {
+ if (s == null || s.isEmpty()) {
+ return new byte[0];
+ }
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test cases for padding logic (data length < size). */
+ @Test
+ public void testPaddingCases() {
+ // Setup input columns
+ Binary[] dataValues = {
+ new Binary(hexToBytes("AABB")), // Simple padding
+ new Binary(hexToBytes("FF")), // Full repetition of pad
+ new Binary(hexToBytes("FF")), // Truncated repetition of pad
+ new Binary(hexToBytes("")) // Padding an empty blob
+ };
+ int[] sizeValues = {5, 7, 6, 4};
+ Binary[] padValues = {
+ new Binary(hexToBytes("00")),
+ new Binary(hexToBytes("123456")),
+ new Binary(hexToBytes("123456")),
+ new Binary(hexToBytes("AB"))
+ };
+
+ Column dataColumn = new BinaryColumn(dataValues.length, Optional.empty(), dataValues);
+ Column sizeColumn = new IntColumn(sizeValues.length, Optional.empty(), sizeValues);
+ Column padColumn = new BinaryColumn(padValues.length, Optional.empty(), padValues);
+
+ // Mock transformers
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer sizeTransformer = mockColumnTransformer(sizeColumn);
+ ColumnTransformer padTransformer = mockColumnTransformer(padColumn);
+
+ // Assuming LpadColumnTransformer exists and has this constructor
+ LpadColumnTransformer lpadTransformer =
+ new LpadColumnTransformer(BlobType.BLOB, dataTransformer, sizeTransformer, padTransformer);
+ lpadTransformer.addReferenceCount();
+ lpadTransformer.evaluate();
+ Column resultColumn = lpadTransformer.getColumn();
+
+ // Expected results
+ byte[][] expectedValues = {
+ hexToBytes("000000AABB"),
+ hexToBytes("123456123456FF"),
+ hexToBytes("1234561234FF"),
+ hexToBytes("ABABABAB")
+ };
+
+ // Assertions
+ Assert.assertEquals(4, resultColumn.getPositionCount());
+ for (int i = 0; i < 4; i++) {
+ Assert.assertFalse(resultColumn.isNull(i));
+ Assert.assertArrayEquals(expectedValues[i], resultColumn.getBinary(i).getValues());
+ }
+ }
+
+ /** Test cases for truncation logic (data length > size). */
+ @Test
+ public void testTruncationCases() {
+ Binary[] dataValues = {
+ new Binary(hexToBytes("0102030405060708")), new Binary(hexToBytes("AABB"))
+ };
+ int[] sizeValues = {4, 0};
+ Binary[] padValues = {new Binary(hexToBytes("FF")), new Binary(hexToBytes("00"))};
+
+ Column dataColumn = new BinaryColumn(2, Optional.empty(), dataValues);
+ Column sizeColumn = new IntColumn(2, Optional.empty(), sizeValues);
+ Column padColumn = new BinaryColumn(2, Optional.empty(), padValues);
+
+ LpadColumnTransformer transformer =
+ new LpadColumnTransformer(
+ BlobType.BLOB,
+ mockColumnTransformer(dataColumn),
+ mockColumnTransformer(sizeColumn),
+ mockColumnTransformer(padColumn));
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column resultColumn = transformer.getColumn();
+
+ byte[][] expectedValues = {hexToBytes("01020304"), hexToBytes("")};
+
+ Assert.assertEquals(2, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedValues[0], resultColumn.getBinary(0).getValues());
+ Assert.assertFalse(resultColumn.isNull(1));
+ Assert.assertArrayEquals(expectedValues[1], resultColumn.getBinary(1).getValues());
+ }
+
+ /** Test case where data length equals size. */
+ @Test
+ public void testEqualLengthCase() {
+ Binary[] dataValues = {new Binary(hexToBytes("ABCDEF"))};
+ int[] sizeValues = {3};
+ Binary[] padValues = {new Binary(hexToBytes("00"))};
+
+ Column dataColumn = new BinaryColumn(1, Optional.empty(), dataValues);
+ Column sizeColumn = new IntColumn(1, Optional.empty(), sizeValues);
+ Column padColumn = new BinaryColumn(1, Optional.empty(), padValues);
+
+ LpadColumnTransformer transformer =
+ new LpadColumnTransformer(
+ BlobType.BLOB,
+ mockColumnTransformer(dataColumn),
+ mockColumnTransformer(sizeColumn),
+ mockColumnTransformer(padColumn));
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column resultColumn = transformer.getColumn();
+
+ Assert.assertEquals(1, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(dataValues[0].getValues(), resultColumn.getBinary(0).getValues());
+ }
+
+ /** Test cases where one of the inputs is NULL. */
+ @Test
+ public void testNullInputCases() {
+ // One case for each argument being null
+ Binary[] dataValues = {null, new Binary(hexToBytes("AA")), new Binary(hexToBytes("BB"))};
+ boolean[] dataIsNull = {true, false, false};
+
+ int[] sizeValues = {5, 0, 5}; // Using 0 for the second case to avoid NPE on null size
+ boolean[] sizeIsNull = {false, true, false};
+
+ Binary[] padValues = {new Binary(hexToBytes("00")), new Binary(hexToBytes("00")), null};
+ boolean[] padIsNull = {false, false, true};
+
+ Column dataColumn = new BinaryColumn(3, Optional.of(dataIsNull), dataValues);
+ Column sizeColumn = new IntColumn(3, Optional.of(sizeIsNull), sizeValues);
+ Column padColumn = new BinaryColumn(3, Optional.of(padIsNull), padValues);
+
+ LpadColumnTransformer transformer =
+ new LpadColumnTransformer(
+ BlobType.BLOB,
+ mockColumnTransformer(dataColumn),
+ mockColumnTransformer(sizeColumn),
+ mockColumnTransformer(padColumn));
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column resultColumn = transformer.getColumn();
+
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+ Assert.assertTrue(resultColumn.isNull(0)); // data is null
+ Assert.assertTrue(resultColumn.isNull(1)); // size is null
+ Assert.assertTrue(resultColumn.isNull(2)); // pad is null
+ }
+
+ /** Test with a selection array to process only a subset of rows. */
+ @Test
+ public void testEvaluateWithSelection() {
+ Binary[] dataValues = {
+ new Binary(hexToBytes("AA")), // Should be processed
+ new Binary(hexToBytes("BB")), // Should be skipped
+ new Binary(hexToBytes("CC")) // Should be processed
+ };
+ int[] sizeValues = {4, 5, 2};
+ Binary[] padValues = {
+ new Binary(hexToBytes("00")), new Binary(hexToBytes("01")), new Binary(hexToBytes("02"))
+ };
+
+ Column dataColumn = new BinaryColumn(3, Optional.empty(), dataValues);
+ Column sizeColumn = new IntColumn(3, Optional.empty(), sizeValues);
+ Column padColumn = new BinaryColumn(3, Optional.empty(), padValues);
+
+ LpadColumnTransformer transformer =
+ new LpadColumnTransformer(
+ BlobType.BLOB,
+ mockColumnTransformer(dataColumn),
+ mockColumnTransformer(sizeColumn),
+ mockColumnTransformer(padColumn));
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column resultColumn = transformer.getColumn();
+
+ byte[] expectedRow1 = hexToBytes("000000AA");
+ byte[] expectedRow3 = hexToBytes("02CC"); // Corrected: Padding case
+
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedRow1, resultColumn.getBinary(0).getValues());
+ Assert.assertTrue(resultColumn.isNull(1));
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expectedRow3, resultColumn.getBinary(2).getValues());
+ }
+
+ /** Test failure when size is negative. */
+ @Test
+ public void testNegativeSize() {
+ Column dataColumn =
+ new BinaryColumn(1, Optional.empty(), new Binary[] {new Binary(hexToBytes("AABB"))});
+ Column sizeColumn = new IntColumn(1, Optional.empty(), new int[] {-1});
+ Column padColumn =
+ new BinaryColumn(1, Optional.empty(), new Binary[] {new Binary(hexToBytes("00"))});
+
+ LpadColumnTransformer transformer =
+ new LpadColumnTransformer(
+ BlobType.BLOB,
+ mockColumnTransformer(dataColumn),
+ mockColumnTransformer(sizeColumn),
+ mockColumnTransformer(padColumn));
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown for negative size.");
+ } catch (SemanticException e) {
+ String expectedMessage =
+ "Failed to execute function 'Lpad' due to the value 0xaabb corresponding to a invalid target size, the allowed range is [0, 2147483647].";
+ Assert.assertEquals(expectedMessage, e.getMessage());
+ }
+ }
+
+ /** Test failure when paddata is empty. */
+ @Test
+ public void testEmptyPadData() {
+ Column dataColumn =
+ new BinaryColumn(1, Optional.empty(), new Binary[] {new Binary(hexToBytes("AA"))});
+ Column sizeColumn = new IntColumn(1, Optional.empty(), new int[] {5});
+ Column padColumn =
+ new BinaryColumn(1, Optional.empty(), new Binary[] {new Binary(hexToBytes(""))});
+
+ LpadColumnTransformer transformer =
+ new LpadColumnTransformer(
+ BlobType.BLOB,
+ mockColumnTransformer(dataColumn),
+ mockColumnTransformer(sizeColumn),
+ mockColumnTransformer(padColumn));
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown for empty pad data.");
+ } catch (SemanticException e) {
+ String expectedMessage =
+ "Failed to execute function 'Lpad' due the value 0xaa corresponding to a empty padding string.";
+ Assert.assertEquals(expectedMessage, e.getMessage());
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/RpadColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/RpadColumnTransformerTest.java
new file mode 100644
index 000000000000..955aa4905b9c
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/ternary/RpadColumnTransformerTest.java
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.iotdb.db.queryengine.transformation.dag.column.ternary;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.read.common.block.column.IntColumn;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+
+// This unit test is for a hypothetical RpadColumnTransformer class.
+// It assumes the class exists and follows a similar structure to other transformers.
+public class RpadColumnTransformerTest {
+
+ // Helper method to mock a ColumnTransformer to return a predefined Column.
+ private ColumnTransformer mockColumnTransformer(Column column) {
+ ColumnTransformer mockTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockTransformer.getColumn()).thenReturn(column);
+ // Common mocking for evaluation methods
+ Mockito.doNothing().when(mockTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockTransformer).clearCache();
+ Mockito.doNothing().when(mockTransformer).evaluateWithSelection(Mockito.any());
+ Mockito.when(mockTransformer.getColumnCachePositionCount())
+ .thenReturn(column.getPositionCount());
+ return mockTransformer;
+ }
+
+ // Helper method to convert a hex string to a byte array for creating Binary objects.
+ private static byte[] hexToBytes(String s) {
+ if (s == null || s.isEmpty()) {
+ return new byte[0];
+ }
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test cases for padding logic (data length < size). */
+ @Test
+ public void testPaddingCases() {
+ // Setup input columns
+ Binary[] dataValues = {
+ new Binary(hexToBytes("AABB")), // Simple padding
+ new Binary(hexToBytes("FF")), // Full repetition of pad
+ new Binary(hexToBytes("FF")), // Truncated repetition of pad
+ new Binary(hexToBytes("")) // Padding an empty blob
+ };
+ int[] sizeValues = {5, 7, 6, 4};
+ Binary[] padValues = {
+ new Binary(hexToBytes("00")),
+ new Binary(hexToBytes("123456")),
+ new Binary(hexToBytes("123456")),
+ new Binary(hexToBytes("AB"))
+ };
+
+ Column dataColumn = new BinaryColumn(dataValues.length, Optional.empty(), dataValues);
+ Column sizeColumn = new IntColumn(sizeValues.length, Optional.empty(), sizeValues);
+ Column padColumn = new BinaryColumn(padValues.length, Optional.empty(), padValues);
+
+ // Mock transformers
+ ColumnTransformer dataTransformer = mockColumnTransformer(dataColumn);
+ ColumnTransformer sizeTransformer = mockColumnTransformer(sizeColumn);
+ ColumnTransformer padTransformer = mockColumnTransformer(padColumn);
+
+ // Assuming RpadColumnTransformer exists and has this constructor
+ RpadColumnTransformer rpadTransformer =
+ new RpadColumnTransformer(BlobType.BLOB, dataTransformer, sizeTransformer, padTransformer);
+ rpadTransformer.addReferenceCount();
+ rpadTransformer.evaluate();
+ Column resultColumn = rpadTransformer.getColumn();
+
+ // Expected results
+ byte[][] expectedValues = {
+ hexToBytes("AABB000000"),
+ hexToBytes("FF123456123456"),
+ hexToBytes("FF1234561234"),
+ hexToBytes("ABABABAB")
+ };
+
+ // Assertions
+ Assert.assertEquals(4, resultColumn.getPositionCount());
+ for (int i = 0; i < 4; i++) {
+ Assert.assertFalse(resultColumn.isNull(i));
+ Assert.assertArrayEquals(expectedValues[i], resultColumn.getBinary(i).getValues());
+ }
+ }
+
+ /** Test cases for truncation logic (data length > size). */
+ @Test
+ public void testTruncationCases() {
+ Binary[] dataValues = {
+ new Binary(hexToBytes("0102030405060708")), new Binary(hexToBytes("AABB"))
+ };
+ int[] sizeValues = {4, 0};
+ Binary[] padValues = {new Binary(hexToBytes("FF")), new Binary(hexToBytes("00"))};
+
+ Column dataColumn = new BinaryColumn(2, Optional.empty(), dataValues);
+ Column sizeColumn = new IntColumn(2, Optional.empty(), sizeValues);
+ Column padColumn = new BinaryColumn(2, Optional.empty(), padValues);
+
+ RpadColumnTransformer transformer =
+ new RpadColumnTransformer(
+ BlobType.BLOB,
+ mockColumnTransformer(dataColumn),
+ mockColumnTransformer(sizeColumn),
+ mockColumnTransformer(padColumn));
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column resultColumn = transformer.getColumn();
+
+ byte[][] expectedValues = {hexToBytes("01020304"), hexToBytes("")};
+
+ Assert.assertEquals(2, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedValues[0], resultColumn.getBinary(0).getValues());
+ Assert.assertFalse(resultColumn.isNull(1));
+ Assert.assertArrayEquals(expectedValues[1], resultColumn.getBinary(1).getValues());
+ }
+
+ /** Test case where data length equals size. */
+ @Test
+ public void testEqualLengthCase() {
+ Binary[] dataValues = {new Binary(hexToBytes("ABCDEF"))};
+ int[] sizeValues = {3};
+ Binary[] padValues = {new Binary(hexToBytes("00"))};
+
+ Column dataColumn = new BinaryColumn(1, Optional.empty(), dataValues);
+ Column sizeColumn = new IntColumn(1, Optional.empty(), sizeValues);
+ Column padColumn = new BinaryColumn(1, Optional.empty(), padValues);
+
+ RpadColumnTransformer transformer =
+ new RpadColumnTransformer(
+ BlobType.BLOB,
+ mockColumnTransformer(dataColumn),
+ mockColumnTransformer(sizeColumn),
+ mockColumnTransformer(padColumn));
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column resultColumn = transformer.getColumn();
+
+ Assert.assertEquals(1, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(dataValues[0].getValues(), resultColumn.getBinary(0).getValues());
+ }
+
+ /** Test cases where one of the inputs is NULL. */
+ @Test
+ public void testNullInputCases() {
+ // One case for each argument being null
+ Binary[] dataValues = {null, new Binary(hexToBytes("AA")), new Binary(hexToBytes("BB"))};
+ boolean[] dataIsNull = {true, false, false};
+
+ int[] sizeValues = {5, 0, 5}; // Using 0 for the second case to avoid NPE on null size
+ boolean[] sizeIsNull = {false, true, false};
+
+ Binary[] padValues = {new Binary(hexToBytes("00")), new Binary(hexToBytes("00")), null};
+ boolean[] padIsNull = {false, false, true};
+
+ Column dataColumn = new BinaryColumn(3, Optional.of(dataIsNull), dataValues);
+ Column sizeColumn = new IntColumn(3, Optional.of(sizeIsNull), sizeValues);
+ Column padColumn = new BinaryColumn(3, Optional.of(padIsNull), padValues);
+
+ RpadColumnTransformer transformer =
+ new RpadColumnTransformer(
+ BlobType.BLOB,
+ mockColumnTransformer(dataColumn),
+ mockColumnTransformer(sizeColumn),
+ mockColumnTransformer(padColumn));
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column resultColumn = transformer.getColumn();
+
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+ Assert.assertTrue(resultColumn.isNull(0)); // data is null
+ Assert.assertTrue(resultColumn.isNull(1)); // size is null
+ Assert.assertTrue(resultColumn.isNull(2)); // pad is null
+ }
+
+ /** Test with a selection array to process only a subset of rows. */
+ @Test
+ public void testEvaluateWithSelection() {
+ Binary[] dataValues = {
+ new Binary(hexToBytes("AA")), // Should be processed
+ new Binary(hexToBytes("BB")), // Should be skipped
+ new Binary(hexToBytes("CC")) // Should be processed
+ };
+ int[] sizeValues = {4, 5, 2};
+ Binary[] padValues = {
+ new Binary(hexToBytes("00")), new Binary(hexToBytes("01")), new Binary(hexToBytes("02"))
+ };
+
+ Column dataColumn = new BinaryColumn(3, Optional.empty(), dataValues);
+ Column sizeColumn = new IntColumn(3, Optional.empty(), sizeValues);
+ Column padColumn = new BinaryColumn(3, Optional.empty(), padValues);
+
+ RpadColumnTransformer transformer =
+ new RpadColumnTransformer(
+ BlobType.BLOB,
+ mockColumnTransformer(dataColumn),
+ mockColumnTransformer(sizeColumn),
+ mockColumnTransformer(padColumn));
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column resultColumn = transformer.getColumn();
+
+ byte[] expectedRow1 = hexToBytes("AA000000");
+ byte[] expectedRow3 = hexToBytes("CC02"); // Padding case
+
+ Assert.assertEquals(3, resultColumn.getPositionCount());
+ Assert.assertFalse(resultColumn.isNull(0));
+ Assert.assertArrayEquals(expectedRow1, resultColumn.getBinary(0).getValues());
+ Assert.assertTrue(resultColumn.isNull(1));
+ Assert.assertFalse(resultColumn.isNull(2));
+ Assert.assertArrayEquals(expectedRow3, resultColumn.getBinary(2).getValues());
+ }
+
+ /** Test failure when size is negative. */
+ @Test
+ public void testNegativeSize() {
+ Column dataColumn =
+ new BinaryColumn(1, Optional.empty(), new Binary[] {new Binary(hexToBytes("AABB"))});
+ Column sizeColumn = new IntColumn(1, Optional.empty(), new int[] {-1});
+ Column padColumn =
+ new BinaryColumn(1, Optional.empty(), new Binary[] {new Binary(hexToBytes("00"))});
+
+ RpadColumnTransformer transformer =
+ new RpadColumnTransformer(
+ BlobType.BLOB,
+ mockColumnTransformer(dataColumn),
+ mockColumnTransformer(sizeColumn),
+ mockColumnTransformer(padColumn));
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown for negative size.");
+ } catch (SemanticException e) {
+ String expectedMessage =
+ "Failed to execute function 'Rpad' due to the value 0xaabb corresponding to a invalid target size, the allowed range is [0, 2147483647].";
+ Assert.assertEquals(expectedMessage, e.getMessage());
+ }
+ }
+
+ /** Test failure when paddata is empty. */
+ @Test
+ public void testEmptyPadData() {
+ Column dataColumn =
+ new BinaryColumn(1, Optional.empty(), new Binary[] {new Binary(hexToBytes("AA"))});
+ Column sizeColumn = new IntColumn(1, Optional.empty(), new int[] {5});
+ Column padColumn =
+ new BinaryColumn(1, Optional.empty(), new Binary[] {new Binary(hexToBytes(""))});
+
+ RpadColumnTransformer transformer =
+ new RpadColumnTransformer(
+ BlobType.BLOB,
+ mockColumnTransformer(dataColumn),
+ mockColumnTransformer(sizeColumn),
+ mockColumnTransformer(padColumn));
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown for empty pad data.");
+ } catch (SemanticException e) {
+ String expectedMessage =
+ "Failed to execute function 'Rpad' due the value 0xaa corresponding to a empty padding string.";
+ Assert.assertEquals(expectedMessage, e.getMessage());
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromBigEndian32ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromBigEndian32ColumnTransformerTest.java
new file mode 100644
index 000000000000..c4c625e6a5f8
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromBigEndian32ColumnTransformerTest.java
@@ -0,0 +1,220 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntegerEncoding;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BytesToIntColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+import static org.apache.tsfile.read.common.type.IntType.INT32;
+
+public class FromBigEndian32ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert an integer to a big-endian 4-byte array.
+ private byte[] intToBigEndianBytes(int value) {
+ return ByteBuffer.allocate(4).putInt(value).array();
+ }
+
+ /** Test a positive integer decoding using FROM_BIG_ENDIAN_32 strategy. */
+ @Test
+ public void testFromBigEndian32Positive() {
+ int expected = 16909060; // 0x01020304
+ Binary[] values = new Binary[] {new Binary(intToBigEndianBytes(expected))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ BytesToIntColumnTransformer transformer =
+ new BytesToIntColumnTransformer(
+ INT32,
+ childColumnTransformer,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_32,
+ "from_big_endian_32",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getInt(0));
+ }
+
+ /** Test a negative integer decoding using FROM_BIG_ENDIAN_32 strategy. */
+ @Test
+ public void testFromBigEndian32Negative() {
+ int expected = -1; // 0xFFFFFFFF
+ Binary[] values = new Binary[] {new Binary(intToBigEndianBytes(expected))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ BytesToIntColumnTransformer transformer =
+ new BytesToIntColumnTransformer(
+ INT32,
+ childColumnTransformer,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_32,
+ "from_big_endian_32",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getInt(0));
+ }
+
+ /** Test multi-row decoding with a null value using FROM_BIG_ENDIAN_32 strategy. */
+ @Test
+ public void testFromBigEndian32MultiRowsWithNull() {
+ Binary[] values =
+ new Binary[] {
+ new Binary(intToBigEndianBytes(100)), null, new Binary(intToBigEndianBytes(-200))
+ };
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ BytesToIntColumnTransformer transformer =
+ new BytesToIntColumnTransformer(
+ INT32,
+ childColumnTransformer,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_32,
+ "from_big_endian_32",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(100, result.getInt(0));
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(-200, result.getInt(2));
+ }
+
+ /** Test decoding with a selection array using FROM_BIG_ENDIAN_32 strategy. */
+ @Test
+ public void testFromBigEndian32WithSelection() {
+ Binary[] values = {
+ new Binary(intToBigEndianBytes(5)),
+ new Binary(intToBigEndianBytes(10)),
+ new Binary(intToBigEndianBytes(15))
+ };
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ BytesToIntColumnTransformer transformer =
+ new BytesToIntColumnTransformer(
+ INT32,
+ child,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_32,
+ "from_big_endian_32",
+ BLOB);
+ transformer.addReferenceCount();
+
+ // Select and process only the first and third rows.
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(5, result.getInt(0));
+ // The second row was not selected, so the result should be null.
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(15, result.getInt(2));
+ }
+
+ /** Test decoding with a short binary input (< 4 bytes), expecting an exception. */
+ @Test
+ public void testFromBigEndian32WithShortLength() {
+ Binary[] values = {new Binary(new byte[] {1, 2, 3})}; // Too short
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ BytesToIntColumnTransformer transformer =
+ new BytesToIntColumnTransformer(
+ INT32,
+ child,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_32,
+ "from_big_endian_32",
+ BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown for short input.");
+ } catch (SemanticException e) {
+ Assert.assertTrue(
+ e.getMessage()
+ .contains(
+ "Failed to execute function 'from_big_endian_32' due to an invalid input format."));
+ }
+ }
+
+ /** Test decoding with a long binary input (> 4 bytes), expecting an exception. */
+ @Test
+ public void testFromBigEndian32WithLongLength() {
+ Binary[] values = {new Binary(new byte[] {1, 2, 3, 4, 5})}; // Too long
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ BytesToIntColumnTransformer transformer =
+ new BytesToIntColumnTransformer(
+ INT32,
+ child,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_32,
+ "from_big_endian_32",
+ BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown for long input.");
+ } catch (SemanticException e) {
+ Assert.assertTrue(
+ e.getMessage()
+ .contains(
+ "Failed to execute function 'from_big_endian_32' due to an invalid input format."));
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromBigEndian64ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromBigEndian64ColumnTransformerTest.java
new file mode 100644
index 000000000000..bb3b0165ff03
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromBigEndian64ColumnTransformerTest.java
@@ -0,0 +1,242 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntegerEncoding;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BytesToLongColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+import static org.apache.tsfile.read.common.type.LongType.INT64;
+
+public class FromBigEndian64ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a long to a big-endian 8-byte array.
+ private byte[] longToBigEndianBytes(long value) {
+ return ByteBuffer.allocate(8).putLong(value).array();
+ }
+
+ /** Test a positive long conversion using FROM_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testFromBigEndian64Positive() {
+ long expected = 72623859790382856L; // 0x0102030405060708L
+ Binary[] values = new Binary[] {new Binary(longToBigEndianBytes(expected))};
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_64,
+ "from_big_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getLong(0));
+ }
+
+ /** Test a negative long conversion using FROM_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testFromBigEndian64Negative() {
+ long expected = -1L; // 0xFFFFFFFFFFFFFFFFL
+ Binary[] values = new Binary[] {new Binary(longToBigEndianBytes(expected))};
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_64,
+ "from_big_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getLong(0));
+ }
+
+ /** Test long zero conversion using FROM_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testFromBigEndian64Zero() {
+ long expected = 0L;
+ Binary[] values = new Binary[] {new Binary(longToBigEndianBytes(expected))};
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_64,
+ "from_big_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getLong(0));
+ }
+
+ /** Test Long.MAX_VALUE conversion using FROM_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testFromBigEndian64MaxValue() {
+ long expected = Long.MAX_VALUE;
+ Binary[] values = new Binary[] {new Binary(longToBigEndianBytes(expected))};
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_64,
+ "from_big_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getLong(0));
+ }
+
+ /** Test multi-row conversion with a null value using FROM_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testFromBigEndian64MultiRowsWithNull() {
+ long val1 = 1000L;
+ long val3 = -2000L;
+ Binary[] values =
+ new Binary[] {
+ new Binary(longToBigEndianBytes(val1)), null, new Binary(longToBigEndianBytes(val3))
+ };
+ boolean[] isNull = {false, true, false};
+ Column blobColumn = new BinaryColumn(values.length, Optional.of(isNull), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_64,
+ "from_big_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(val1, result.getLong(0));
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(val3, result.getLong(2));
+ }
+
+ /** Test conversion with a selection array using FROM_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testFromBigEndian64WithSelection() {
+ long val1 = 50L;
+ long val2 = 100L;
+ long val3 = 150L;
+ Binary[] values = {
+ new Binary(longToBigEndianBytes(val1)),
+ new Binary(longToBigEndianBytes(val2)),
+ new Binary(longToBigEndianBytes(val3))
+ };
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_64,
+ "from_big_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(val1, result.getLong(0));
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(val3, result.getLong(2));
+ }
+
+ /** Test exception for invalid input length using FROM_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testFromBigEndian64InvalidLength() {
+ // Input is a 4-byte array, but 8 is expected.
+ Binary[] values = new Binary[] {new Binary(new byte[] {1, 2, 3, 4})};
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_BIG_ENDIAN_64,
+ "from_big_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException to be thrown");
+ } catch (SemanticException e) {
+ Assert.assertTrue(
+ e.getMessage()
+ .contains(
+ "Failed to execute function 'from_big_endian_64' due to an invalid input format."));
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromIEEE754_32BigEndianColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromIEEE754_32BigEndianColumnTransformerTest.java
new file mode 100644
index 000000000000..e493bc2deda6
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromIEEE754_32BigEndianColumnTransformerTest.java
@@ -0,0 +1,231 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntegerEncoding;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BytesToFloatColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+import static org.apache.tsfile.read.common.type.FloatType.FLOAT;
+
+public class FromIEEE754_32BigEndianColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a float to a big-endian 4-byte array.
+ private byte[] floatToBigEndianBytes(float value) {
+ return ByteBuffer.allocate(4).putFloat(value).array();
+ }
+
+ /** Test a positive float decoding using FROM_IEEE754_32_BIG_ENDIAN strategy. */
+ @Test
+ public void testFromIeee75432BigEndianPositive() {
+ float expected = 123.456f;
+ Binary[] values = new Binary[] {new Binary(floatToBigEndianBytes(expected))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+ BytesToFloatColumnTransformer transformer =
+ new BytesToFloatColumnTransformer(
+ FLOAT,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_32_BIG_ENDIAN,
+ "from_ieee754_32",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getFloat(0), 0.0f);
+ }
+
+ /** Test a negative float decoding. */
+ @Test
+ public void testFromIeee75432BigEndianNegative() {
+ float expected = -987.654f;
+ Binary[] values = new Binary[] {new Binary(floatToBigEndianBytes(expected))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+ BytesToFloatColumnTransformer transformer =
+ new BytesToFloatColumnTransformer(
+ FLOAT,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_32_BIG_ENDIAN,
+ "from_ieee754_32",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getFloat(0), 0.0f);
+ }
+
+ /** Test special float value decoding: Positive Infinity. */
+ @Test
+ public void testFromIeee75432BigEndianPositiveInfinity() {
+ float expected = Float.POSITIVE_INFINITY;
+ Binary[] values = new Binary[] {new Binary(floatToBigEndianBytes(expected))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+ BytesToFloatColumnTransformer transformer =
+ new BytesToFloatColumnTransformer(
+ FLOAT,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_32_BIG_ENDIAN,
+ "from_ieee754_32",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getFloat(0), 0.0f);
+ }
+
+ /** Test special float value decoding: NaN. */
+ @Test
+ public void testFromIeee75432BigEndianNaN() {
+ float expected = Float.NaN;
+ Binary[] values = new Binary[] {new Binary(floatToBigEndianBytes(expected))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+ BytesToFloatColumnTransformer transformer =
+ new BytesToFloatColumnTransformer(
+ FLOAT,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_32_BIG_ENDIAN,
+ "from_ieee754_32",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertTrue(Float.isNaN(result.getFloat(0)));
+ }
+
+ /** Test multi-row decoding with a null value. */
+ @Test
+ public void testFromIeee75432BigEndianMultiRowsWithNull() {
+ Binary[] values = {
+ new Binary(floatToBigEndianBytes(1.1f)), null, new Binary(floatToBigEndianBytes(-2.2f))
+ };
+ boolean[] isNull = {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(isNull), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+ BytesToFloatColumnTransformer transformer =
+ new BytesToFloatColumnTransformer(
+ FLOAT,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_32_BIG_ENDIAN,
+ "from_ieee754_32",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(1.1f, result.getFloat(0), 0.0f);
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(-2.2f, result.getFloat(2), 0.0f);
+ }
+
+ /** Test decoding with a short binary input (< 4 bytes), expecting an exception. */
+ @Test
+ public void testFromIeee75432BigEndianInvalidLengthShort() {
+ Binary[] values = {new Binary(new byte[] {1, 2, 3})}; // Too short
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ BytesToFloatColumnTransformer transformer =
+ new BytesToFloatColumnTransformer(
+ FLOAT,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_32_BIG_ENDIAN,
+ "from_ieee754_32",
+ BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown for short input.");
+ } catch (SemanticException e) {
+ Assert.assertTrue(
+ e.getMessage()
+ .contains(
+ "Failed to execute function 'from_ieee754_32' due to an invalid input format."));
+ }
+ }
+
+ /** Test decoding with a long binary input (> 4 bytes), expecting an exception. */
+ @Test
+ public void testFromIeee75432BigEndianInvalidLengthLong() {
+ Binary[] values = {new Binary(new byte[] {1, 2, 3, 4, 5})}; // Too long
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ BytesToFloatColumnTransformer transformer =
+ new BytesToFloatColumnTransformer(
+ FLOAT,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_32_BIG_ENDIAN,
+ "from_ieee754_32",
+ BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown for long input.");
+ } catch (SemanticException e) {
+ Assert.assertTrue(
+ e.getMessage()
+ .contains(
+ "Failed to execute function 'from_ieee754_32' due to an invalid input format."));
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromIEEE754_64BigEndianColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromIEEE754_64BigEndianColumnTransformerTest.java
new file mode 100644
index 000000000000..b764d1d696aa
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromIEEE754_64BigEndianColumnTransformerTest.java
@@ -0,0 +1,231 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntegerEncoding;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BytesToDoubleColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+import static org.apache.tsfile.read.common.type.DoubleType.DOUBLE;
+
+public class FromIEEE754_64BigEndianColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a double to a big-endian 8-byte array.
+ private byte[] doubleToBigEndianBytes(double value) {
+ return ByteBuffer.allocate(8).putDouble(value).array();
+ }
+
+ /** Test a positive double decoding using FROM_IEEE754_64_BIG_ENDIAN strategy. */
+ @Test
+ public void testFromIeee75464BigEndianPositive() {
+ double expected = 123456.7890123;
+ Binary[] values = new Binary[] {new Binary(doubleToBigEndianBytes(expected))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+ BytesToDoubleColumnTransformer transformer =
+ new BytesToDoubleColumnTransformer(
+ DOUBLE,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_64_BIG_ENDIAN,
+ "from_ieee754_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getDouble(0), 0.0);
+ }
+
+ /** Test a negative double decoding. */
+ @Test
+ public void testFromIeee75464BigEndianNegative() {
+ double expected = -987654.3210987;
+ Binary[] values = new Binary[] {new Binary(doubleToBigEndianBytes(expected))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+ BytesToDoubleColumnTransformer transformer =
+ new BytesToDoubleColumnTransformer(
+ DOUBLE,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_64_BIG_ENDIAN,
+ "from_ieee754_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getDouble(0), 0.0);
+ }
+
+ /** Test special double value decoding: Positive Infinity. */
+ @Test
+ public void testFromIeee75464BigEndianPositiveInfinity() {
+ double expected = Double.POSITIVE_INFINITY;
+ Binary[] values = new Binary[] {new Binary(doubleToBigEndianBytes(expected))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+ BytesToDoubleColumnTransformer transformer =
+ new BytesToDoubleColumnTransformer(
+ DOUBLE,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_64_BIG_ENDIAN,
+ "from_ieee754_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getDouble(0), 0.0);
+ }
+
+ /** Test special double value decoding: NaN. */
+ @Test
+ public void testFromIeee75464BigEndianNaN() {
+ double expected = Double.NaN;
+ Binary[] values = new Binary[] {new Binary(doubleToBigEndianBytes(expected))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+ BytesToDoubleColumnTransformer transformer =
+ new BytesToDoubleColumnTransformer(
+ DOUBLE,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_64_BIG_ENDIAN,
+ "from_ieee754_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertTrue(Double.isNaN(result.getDouble(0)));
+ }
+
+ /** Test multi-row decoding with a null value. */
+ @Test
+ public void testFromIeee75464BigEndianMultiRowsWithNull() {
+ Binary[] values = {
+ new Binary(doubleToBigEndianBytes(1.12)), null, new Binary(doubleToBigEndianBytes(-2.23))
+ };
+ boolean[] isNull = {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(isNull), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+ BytesToDoubleColumnTransformer transformer =
+ new BytesToDoubleColumnTransformer(
+ DOUBLE,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_64_BIG_ENDIAN,
+ "from_ieee754_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(1.12, result.getDouble(0), 0.0);
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(-2.23, result.getDouble(2), 0.0);
+ }
+
+ /** Test decoding with a short binary input (< 8 bytes), expecting an exception. */
+ @Test
+ public void testFromIeee75464BigEndianInvalidLengthShort() {
+ Binary[] values = {new Binary(new byte[] {1, 2, 3, 4, 5, 6, 7})}; // Too short
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ BytesToDoubleColumnTransformer transformer =
+ new BytesToDoubleColumnTransformer(
+ DOUBLE,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_64_BIG_ENDIAN,
+ "from_ieee754_64",
+ BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown for short input.");
+ } catch (SemanticException e) {
+ Assert.assertTrue(
+ e.getMessage()
+ .contains(
+ "Failed to execute function 'from_ieee754_64' due to an invalid input format."));
+ }
+ }
+
+ /** Test decoding with a long binary input (> 8 bytes), expecting an exception. */
+ @Test
+ public void testFromIeee75464BigEndianInvalidLengthLong() {
+ Binary[] values = {new Binary(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9})}; // Too long
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ BytesToDoubleColumnTransformer transformer =
+ new BytesToDoubleColumnTransformer(
+ DOUBLE,
+ child,
+ NumericCodecStrategiesFactory.FROM_IEEE754_64_BIG_ENDIAN,
+ "from_ieee754_64",
+ BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown for long input.");
+ } catch (SemanticException e) {
+ Assert.assertTrue(
+ e.getMessage()
+ .contains(
+ "Failed to execute function 'from_ieee754_64' due to an invalid input format."));
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromLittleEndian32ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromLittleEndian32ColumnTransformerTest.java
new file mode 100644
index 000000000000..f6240e3e1d48
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromLittleEndian32ColumnTransformerTest.java
@@ -0,0 +1,221 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntegerEncoding;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BytesToIntColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+import static org.apache.tsfile.read.common.type.IntType.INT32;
+
+public class FromLittleEndian32ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert an integer to a little-endian 4-byte array.
+ private byte[] intToLittleEndianBytes(int value) {
+ return ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN).putInt(value).array();
+ }
+
+ /** Test a positive integer decoding using FROM_LITTLE_ENDIAN_32 strategy. */
+ @Test
+ public void testFromLittleEndian32Positive() {
+ int expected = 16909060; // 0x01020304
+ Binary[] values = new Binary[] {new Binary(intToLittleEndianBytes(expected))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ BytesToIntColumnTransformer transformer =
+ new BytesToIntColumnTransformer(
+ INT32,
+ childColumnTransformer,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_32,
+ "from_little_endian_32",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getInt(0));
+ }
+
+ /** Test a negative integer decoding using FROM_LITTLE_ENDIAN_32 strategy. */
+ @Test
+ public void testFromLittleEndian32Negative() {
+ int expected = -1; // 0xFFFFFFFF
+ Binary[] values = new Binary[] {new Binary(intToLittleEndianBytes(expected))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ BytesToIntColumnTransformer transformer =
+ new BytesToIntColumnTransformer(
+ INT32,
+ childColumnTransformer,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_32,
+ "from_little_endian_32",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getInt(0));
+ }
+
+ /** Test multi-row decoding with a null value using FROM_LITTLE_ENDIAN_32 strategy. */
+ @Test
+ public void testFromLittleEndian32MultiRowsWithNull() {
+ Binary[] values =
+ new Binary[] {
+ new Binary(intToLittleEndianBytes(100)), null, new Binary(intToLittleEndianBytes(-200))
+ };
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ BytesToIntColumnTransformer transformer =
+ new BytesToIntColumnTransformer(
+ INT32,
+ childColumnTransformer,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_32,
+ "from_little_endian_32",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(100, result.getInt(0));
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(-200, result.getInt(2));
+ }
+
+ /** Test decoding with a selection array using FROM_LITTLE_ENDIAN_32 strategy. */
+ @Test
+ public void testFromLittleEndian32WithSelection() {
+ Binary[] values = {
+ new Binary(intToLittleEndianBytes(5)),
+ new Binary(intToLittleEndianBytes(10)),
+ new Binary(intToLittleEndianBytes(15))
+ };
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ BytesToIntColumnTransformer transformer =
+ new BytesToIntColumnTransformer(
+ INT32,
+ child,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_32,
+ "from_little_endian_32",
+ BLOB);
+ transformer.addReferenceCount();
+
+ // Select and process only the first and third rows.
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(5, result.getInt(0));
+ // The second row was not selected, so the result should be null.
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(15, result.getInt(2));
+ }
+
+ /** Test decoding with a short binary input (< 4 bytes), expecting an exception. */
+ @Test
+ public void testFromLittleEndian32WithShortLength() {
+ Binary[] values = {new Binary(new byte[] {1, 2, 3})}; // Too short
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ BytesToIntColumnTransformer transformer =
+ new BytesToIntColumnTransformer(
+ INT32,
+ child,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_32,
+ "from_little_endian_32",
+ BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown for short input.");
+ } catch (SemanticException e) {
+ Assert.assertTrue(
+ e.getMessage()
+ .contains(
+ "Failed to execute function 'from_little_endian_32' due to an invalid input format."));
+ }
+ }
+
+ /** Test decoding with a long binary input (> 4 bytes), expecting an exception. */
+ @Test
+ public void testFromLittleEndian32WithLongLength() {
+ Binary[] values = {new Binary(new byte[] {1, 2, 3, 4, 5})}; // Too long
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ BytesToIntColumnTransformer transformer =
+ new BytesToIntColumnTransformer(
+ INT32,
+ child,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_32,
+ "from_little_endian_32",
+ BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown for long input.");
+ } catch (SemanticException e) {
+ Assert.assertTrue(
+ e.getMessage()
+ .contains(
+ "Failed to execute function 'from_little_endian_32' due to an invalid input format."));
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromLittleEndian64ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromLittleEndian64ColumnTransformerTest.java
new file mode 100644
index 000000000000..6e3cefa5483f
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/FromLittleEndian64ColumnTransformerTest.java
@@ -0,0 +1,243 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntegerEncoding;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.BytesToLongColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+import static org.apache.tsfile.read.common.type.LongType.INT64;
+
+public class FromLittleEndian64ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a long to a little-endian 8-byte array.
+ private byte[] longToLittleEndianBytes(long value) {
+ return ByteBuffer.allocate(8).order(ByteOrder.LITTLE_ENDIAN).putLong(value).array();
+ }
+
+ /** Test a positive long conversion using FROM_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testFromLittleEndian64Positive() {
+ long expected = 72623859790382856L; // 0x0102030405060708L
+ Binary[] values = new Binary[] {new Binary(longToLittleEndianBytes(expected))};
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_64,
+ "from_little_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getLong(0));
+ }
+
+ /** Test a negative long conversion using FROM_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testFromLittleEndian64Negative() {
+ long expected = -1L; // 0xFFFFFFFFFFFFFFFFL
+ Binary[] values = new Binary[] {new Binary(longToLittleEndianBytes(expected))};
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_64,
+ "from_little_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getLong(0));
+ }
+
+ /** Test long zero conversion using FROM_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testFromLittleEndian64Zero() {
+ long expected = 0L;
+ Binary[] values = new Binary[] {new Binary(longToLittleEndianBytes(expected))};
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_64,
+ "from_little_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getLong(0));
+ }
+
+ /** Test Long.MAX_VALUE conversion using FROM_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testFromLittleEndian64MaxValue() {
+ long expected = Long.MAX_VALUE;
+ Binary[] values = new Binary[] {new Binary(longToLittleEndianBytes(expected))};
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_64,
+ "from_little_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expected, result.getLong(0));
+ }
+
+ /** Test multi-row conversion with a null value using FROM_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testFromLittleEndian64MultiRowsWithNull() {
+ long val1 = 1000L;
+ long val3 = -2000L;
+ Binary[] values =
+ new Binary[] {
+ new Binary(longToLittleEndianBytes(val1)), null, new Binary(longToLittleEndianBytes(val3))
+ };
+ boolean[] isNull = {false, true, false};
+ Column blobColumn = new BinaryColumn(values.length, Optional.of(isNull), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_64,
+ "from_little_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(val1, result.getLong(0));
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(val3, result.getLong(2));
+ }
+
+ /** Test conversion with a selection array using FROM_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testFromLittleEndian64WithSelection() {
+ long val1 = 50L;
+ long val2 = 100L;
+ long val3 = 150L;
+ Binary[] values = {
+ new Binary(longToLittleEndianBytes(val1)),
+ new Binary(longToLittleEndianBytes(val2)),
+ new Binary(longToLittleEndianBytes(val3))
+ };
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_64,
+ "from_little_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(val1, result.getLong(0));
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(val3, result.getLong(2));
+ }
+
+ /** Test exception for invalid input length using FROM_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testFromLittleEndian64InvalidLength() {
+ // Input is a 4-byte array, but 8 is expected.
+ Binary[] values = new Binary[] {new Binary(new byte[] {1, 2, 3, 4})};
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ BytesToLongColumnTransformer transformer =
+ new BytesToLongColumnTransformer(
+ INT64,
+ child,
+ NumericCodecStrategiesFactory.FROM_LITTLE_ENDIAN_64,
+ "from_little_endian_64",
+ BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException to be thrown");
+ } catch (SemanticException e) {
+ Assert.assertTrue(
+ e.getMessage()
+ .contains(
+ "Failed to execute function 'from_little_endian_64' due to an invalid input format."));
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToBigEndian32ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToBigEndian32ColumnTransformerTest.java
new file mode 100644
index 000000000000..e84fd6d2ff86
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToBigEndian32ColumnTransformerTest.java
@@ -0,0 +1,188 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntegerEncoding;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntToBytesColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.IntColumn;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+
+public class ToBigEndian32ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert an integer to a big-endian 4-byte array.
+ private byte[] intToBigEndianBytes(int value) {
+ return ByteBuffer.allocate(4).putInt(value).array();
+ }
+
+ /** Test a positive integer conversion using TO_BIG_ENDIAN_32 strategy. */
+ @Test
+ public void testToBigEndian32Positive() {
+ int input = 16909060; // 0x01020304
+ int[] values = new int[] {input};
+ Column intColumn = new IntColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(intColumn);
+ IntToBytesColumnTransformer transformer =
+ new IntToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_32);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = intToBigEndianBytes(input); // {1, 2, 3, 4}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test a negative integer conversion using TO_BIG_ENDIAN_32 strategy. */
+ @Test
+ public void testToBigEndian32Negative() {
+ int input = -1; // 0xFFFFFFFF
+ int[] values = new int[] {input};
+ Column intColumn = new IntColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(intColumn);
+ IntToBytesColumnTransformer transformer =
+ new IntToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_32);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = intToBigEndianBytes(input); // {-1, -1, -1, -1}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test integer zero conversion using TO_BIG_ENDIAN_32 strategy. */
+ @Test
+ public void testToBigEndian32Zero() {
+ int input = 0; // 0x00000000
+ int[] values = new int[] {input};
+ Column intColumn = new IntColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(intColumn);
+ IntToBytesColumnTransformer transformer =
+ new IntToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_32);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = intToBigEndianBytes(input); // {0, 0, 0, 0}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test Integer.MAX_VALUE conversion using TO_BIG_ENDIAN_32 strategy. */
+ @Test
+ public void testToBigEndian32MaxValue() {
+ int input = Integer.MAX_VALUE; // 0x7FFFFFFF
+ int[] values = new int[] {input};
+ Column intColumn = new IntColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(intColumn);
+ IntToBytesColumnTransformer transformer =
+ new IntToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_32);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = intToBigEndianBytes(input); // {127, -1, -1, -1}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test multi-row conversion with a null value using TO_BIG_ENDIAN_32 strategy. */
+ @Test
+ public void testToBigEndian32MultiRowsWithNull() {
+ int[] values = new int[] {100, 0, -200}; // Use 0 as a placeholder for null
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column intColumn = new IntColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(intColumn);
+
+ IntToBytesColumnTransformer transformer =
+ new IntToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_32);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = intToBigEndianBytes(100);
+ byte[] expected3 = intToBigEndianBytes(-200);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /** Test conversion with a selection array using TO_BIG_ENDIAN_32 strategy. */
+ @Test
+ public void testToBigEndian32WithSelection() {
+ int[] values = {5, 10, 15};
+ Column intColumn = new IntColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(intColumn);
+
+ IntToBytesColumnTransformer transformer =
+ new IntToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_32);
+ transformer.addReferenceCount();
+
+ // Select and process only the first and third rows.
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = intToBigEndianBytes(5);
+ byte[] expected3 = intToBigEndianBytes(15);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ // The second row was not selected, so the result should be null.
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToBigEndian64ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToBigEndian64ColumnTransformerTest.java
new file mode 100644
index 000000000000..27c4839aaf07
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToBigEndian64ColumnTransformerTest.java
@@ -0,0 +1,188 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntegerEncoding;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.LongToBytesColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.LongColumn;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+
+public class ToBigEndian64ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a long to a big-endian 8-byte array.
+ private byte[] longToBigEndianBytes(long value) {
+ return ByteBuffer.allocate(8).putLong(value).array();
+ }
+
+ /** Test a positive long conversion using TO_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testToBigEndian64Positive() {
+ long input = 72623859790382856L; // 0x0102030405060708L
+ long[] values = new long[] {input};
+ Column longColumn = new LongColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(longColumn);
+ LongToBytesColumnTransformer transformer =
+ new LongToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_64);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = longToBigEndianBytes(input); // {1, 2, 3, 4, 5, 6, 7, 8}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test a negative long conversion using TO_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testToBigEndian64Negative() {
+ long input = -1L; // 0xFFFFFFFFFFFFFFFFL
+ long[] values = new long[] {input};
+ Column longColumn = new LongColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(longColumn);
+ LongToBytesColumnTransformer transformer =
+ new LongToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_64);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = longToBigEndianBytes(input); // {-1, -1, -1, -1, -1, -1, -1, -1}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test long zero conversion using TO_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testToBigEndian64Zero() {
+ long input = 0L; // 0x0000000000000000L
+ long[] values = new long[] {input};
+ Column longColumn = new LongColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(longColumn);
+ LongToBytesColumnTransformer transformer =
+ new LongToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_64);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = longToBigEndianBytes(input); // {0, 0, 0, 0, 0, 0, 0, 0}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test Long.MAX_VALUE conversion using TO_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testToBigEndian64MaxValue() {
+ long input = Long.MAX_VALUE; // 0x7FFFFFFFFFFFFFFFL
+ long[] values = new long[] {input};
+ Column longColumn = new LongColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(longColumn);
+ LongToBytesColumnTransformer transformer =
+ new LongToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_64);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = longToBigEndianBytes(input); // {127, -1, -1, -1, -1, -1, -1, -1}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test multi-row conversion with a null value using TO_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testToBigEndian64MultiRowsWithNull() {
+ long[] values = new long[] {1000L, 0L, -2000L}; // Use 0L as a placeholder for null
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column longColumn = new LongColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(longColumn);
+
+ LongToBytesColumnTransformer transformer =
+ new LongToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_64);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = longToBigEndianBytes(1000L);
+ byte[] expected3 = longToBigEndianBytes(-2000L);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /** Test conversion with a selection array using TO_BIG_ENDIAN_64 strategy. */
+ @Test
+ public void testToBigEndian64WithSelection() {
+ long[] values = {50L, 100L, 150L};
+ Column longColumn = new LongColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(longColumn);
+
+ LongToBytesColumnTransformer transformer =
+ new LongToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_BIG_ENDIAN_64);
+ transformer.addReferenceCount();
+
+ // Select and process only the first and third rows.
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = longToBigEndianBytes(50L);
+ byte[] expected3 = longToBigEndianBytes(150L);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ // The second row was not selected, so the result should be null.
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToIEEE754_32BigEndianColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToIEEE754_32BigEndianColumnTransformerTest.java
new file mode 100644
index 000000000000..835013fde4f2
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToIEEE754_32BigEndianColumnTransformerTest.java
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntegerEncoding;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.FloatToBytesColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.FloatColumn;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+
+public class ToIEEE754_32BigEndianColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a float to a big-endian 4-byte array.
+ private byte[] floatToBigEndianBytes(float value) {
+ return ByteBuffer.allocate(4).putFloat(value).array();
+ }
+
+ /** Test a positive float conversion using TO_IEEE754_32_BIG_ENDIAN strategy. */
+ @Test
+ public void testToIeee75432BigEndianPositive() {
+ float input = 123.456f;
+ float[] values = new float[] {input};
+ Column floatColumn = new FloatColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(floatColumn);
+ FloatToBytesColumnTransformer transformer =
+ new FloatToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_IEEE754_32_BIG_ENDIAN);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = floatToBigEndianBytes(input);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test a negative float conversion. */
+ @Test
+ public void testToIeee75432BigEndianNegative() {
+ float input = -987.654f;
+ float[] values = new float[] {input};
+ Column floatColumn = new FloatColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(floatColumn);
+ FloatToBytesColumnTransformer transformer =
+ new FloatToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_IEEE754_32_BIG_ENDIAN);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = floatToBigEndianBytes(input);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test float zero conversion. */
+ @Test
+ public void testToIeee75432BigEndianZero() {
+ float input = 0.0f;
+ float[] values = new float[] {input};
+ Column floatColumn = new FloatColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(floatColumn);
+ FloatToBytesColumnTransformer transformer =
+ new FloatToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_IEEE754_32_BIG_ENDIAN);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = floatToBigEndianBytes(input);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test special float value: Positive Infinity. */
+ @Test
+ public void testToIeee75432BigEndianPositiveInfinity() {
+ float input = Float.POSITIVE_INFINITY;
+ float[] values = new float[] {input};
+ Column floatColumn = new FloatColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(floatColumn);
+ FloatToBytesColumnTransformer transformer =
+ new FloatToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_IEEE754_32_BIG_ENDIAN);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = floatToBigEndianBytes(input);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test special float value: NaN. */
+ @Test
+ public void testToIeee75432BigEndianNaN() {
+ float input = Float.NaN;
+ float[] values = new float[] {input};
+ Column floatColumn = new FloatColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(floatColumn);
+ FloatToBytesColumnTransformer transformer =
+ new FloatToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_IEEE754_32_BIG_ENDIAN);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = floatToBigEndianBytes(input);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test multi-row conversion with a null value. */
+ @Test
+ public void testToIeee75432BigEndianMultiRowsWithNull() {
+ float[] values = new float[] {1.1f, 0f, -2.2f}; // 0f is placeholder for null
+ boolean[] isNull = {false, true, false};
+ Column floatColumn = new FloatColumn(values.length, Optional.of(isNull), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(floatColumn);
+ FloatToBytesColumnTransformer transformer =
+ new FloatToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_IEEE754_32_BIG_ENDIAN);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = floatToBigEndianBytes(1.1f);
+ byte[] expected3 = floatToBigEndianBytes(-2.2f);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToIEEE754_64BigEndianColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToIEEE754_64BigEndianColumnTransformerTest.java
new file mode 100644
index 000000000000..aa70494031a6
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToIEEE754_64BigEndianColumnTransformerTest.java
@@ -0,0 +1,159 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntegerEncoding;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.DoubleToBytesColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.DoubleColumn;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+
+public class ToIEEE754_64BigEndianColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a double to a big-endian 8-byte array.
+ private byte[] doubleToBigEndianBytes(double value) {
+ return ByteBuffer.allocate(8).putDouble(value).array();
+ }
+
+ /** Test a positive double conversion using TO_IEEE754_64_BIG_ENDIAN strategy. */
+ @Test
+ public void testToIeee75464BigEndianPositive() {
+ double input = 123456.7890123;
+ double[] values = new double[] {input};
+ Column doubleColumn = new DoubleColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(doubleColumn);
+ DoubleToBytesColumnTransformer transformer =
+ new DoubleToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_IEEE754_64_BIG_ENDIAN);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = doubleToBigEndianBytes(input);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test a negative double conversion. */
+ @Test
+ public void testToIeee75464BigEndianNegative() {
+ double input = -987654.3210987;
+ double[] values = new double[] {input};
+ Column doubleColumn = new DoubleColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(doubleColumn);
+ DoubleToBytesColumnTransformer transformer =
+ new DoubleToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_IEEE754_64_BIG_ENDIAN);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = doubleToBigEndianBytes(input);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test special double value: Positive Infinity. */
+ @Test
+ public void testToIeee75464BigEndianPositiveInfinity() {
+ double input = Double.POSITIVE_INFINITY;
+ double[] values = new double[] {input};
+ Column doubleColumn = new DoubleColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(doubleColumn);
+ DoubleToBytesColumnTransformer transformer =
+ new DoubleToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_IEEE754_64_BIG_ENDIAN);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = doubleToBigEndianBytes(input);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test special double value: NaN. */
+ @Test
+ public void testToIeee75464BigEndianNaN() {
+ double input = Double.NaN;
+ double[] values = new double[] {input};
+ Column doubleColumn = new DoubleColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(doubleColumn);
+ DoubleToBytesColumnTransformer transformer =
+ new DoubleToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_IEEE754_64_BIG_ENDIAN);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = doubleToBigEndianBytes(input);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test multi-row conversion with a null value. */
+ @Test
+ public void testToIeee75464BigEndianMultiRowsWithNull() {
+ double[] values = new double[] {1.12, 0.0, -2.23}; // 0.0 is placeholder for null
+ boolean[] isNull = {false, true, false};
+ Column doubleColumn = new DoubleColumn(values.length, Optional.of(isNull), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(doubleColumn);
+ DoubleToBytesColumnTransformer transformer =
+ new DoubleToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_IEEE754_64_BIG_ENDIAN);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = doubleToBigEndianBytes(1.12);
+ byte[] expected3 = doubleToBigEndianBytes(-2.23);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToLittleEndian32ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToLittleEndian32ColumnTransformerTest.java
new file mode 100644
index 000000000000..2465a3d81d0b
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToLittleEndian32ColumnTransformerTest.java
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntegerEncoding;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntToBytesColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.IntColumn;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+
+public class ToLittleEndian32ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert an integer to a little-endian 4-byte array.
+ private byte[] intToLittleEndianBytes(int value) {
+ return ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN).putInt(value).array();
+ }
+
+ /** Test a positive integer conversion using TO_LITTLE_ENDIAN_32 strategy. */
+ @Test
+ public void testToLittleEndian32Positive() {
+ int input = 16909060; // 0x01020304
+ int[] values = new int[] {input};
+ Column intColumn = new IntColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(intColumn);
+ IntToBytesColumnTransformer transformer =
+ new IntToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_32);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = intToLittleEndianBytes(input); // {4, 3, 2, 1}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test a negative integer conversion using TO_LITTLE_ENDIAN_32 strategy. */
+ @Test
+ public void testToLittleEndian32Negative() {
+ int input = -1; // 0xFFFFFFFF
+ int[] values = new int[] {input};
+ Column intColumn = new IntColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(intColumn);
+ IntToBytesColumnTransformer transformer =
+ new IntToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_32);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = intToLittleEndianBytes(input); // {-1, -1, -1, -1}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test integer zero conversion using TO_LITTLE_ENDIAN_32 strategy. */
+ @Test
+ public void testToLittleEndian32Zero() {
+ int input = 0; // 0x00000000
+ int[] values = new int[] {input};
+ Column intColumn = new IntColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(intColumn);
+ IntToBytesColumnTransformer transformer =
+ new IntToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_32);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = intToLittleEndianBytes(input); // {0, 0, 0, 0}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test Integer.MAX_VALUE conversion using TO_LITTLE_ENDIAN_32 strategy. */
+ @Test
+ public void testToLittleEndian32MaxValue() {
+ int input = Integer.MAX_VALUE; // 0x7FFFFFFF
+ int[] values = new int[] {input};
+ Column intColumn = new IntColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(intColumn);
+ IntToBytesColumnTransformer transformer =
+ new IntToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_32);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = intToLittleEndianBytes(input); // {-1, -1, -1, 127}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test multi-row conversion with a null value using TO_LITTLE_ENDIAN_32 strategy. */
+ @Test
+ public void testToLittleEndian32MultiRowsWithNull() {
+ int[] values = new int[] {100, 0, -200}; // Use 0 as a placeholder for null
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column intColumn = new IntColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(intColumn);
+
+ IntToBytesColumnTransformer transformer =
+ new IntToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_32);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = intToLittleEndianBytes(100);
+ byte[] expected3 = intToLittleEndianBytes(-200);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /** Test conversion with a selection array using TO_LITTLE_ENDIAN_32 strategy. */
+ @Test
+ public void testToLittleEndian32WithSelection() {
+ int[] values = {5, 10, 15};
+ Column intColumn = new IntColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(intColumn);
+
+ IntToBytesColumnTransformer transformer =
+ new IntToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_32);
+ transformer.addReferenceCount();
+
+ // Select and process only the first and third rows.
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = intToLittleEndianBytes(5);
+ byte[] expected3 = intToLittleEndianBytes(15);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ // The second row was not selected, so the result should be null.
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToLittleEndian64ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToLittleEndian64ColumnTransformerTest.java
new file mode 100644
index 000000000000..941df1328083
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/IntegerEncoding/ToLittleEndian64ColumnTransformerTest.java
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.IntegerEncoding;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.LongToBytesColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.NumericCodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.LongColumn;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+
+public class ToLittleEndian64ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a long to a little-endian 8-byte array.
+ private byte[] longToLittleEndianBytes(long value) {
+ return ByteBuffer.allocate(8).order(ByteOrder.LITTLE_ENDIAN).putLong(value).array();
+ }
+
+ /** Test a positive long conversion using TO_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testToLittleEndian64Positive() {
+ long input = 72623859790382856L; // 0x0102030405060708L
+ long[] values = new long[] {input};
+ Column longColumn = new LongColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(longColumn);
+ LongToBytesColumnTransformer transformer =
+ new LongToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_64);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = longToLittleEndianBytes(input); // {8, 7, 6, 5, 4, 3, 2, 1}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test a negative long conversion using TO_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testToLittleEndian64Negative() {
+ long input = -1L; // 0xFFFFFFFFFFFFFFFFL
+ long[] values = new long[] {input};
+ Column longColumn = new LongColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(longColumn);
+ LongToBytesColumnTransformer transformer =
+ new LongToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_64);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = longToLittleEndianBytes(input); // {-1, -1, -1, -1, -1, -1, -1, -1}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test long zero conversion using TO_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testToLittleEndian64Zero() {
+ long input = 0L; // 0x0000000000000000L
+ long[] values = new long[] {input};
+ Column longColumn = new LongColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(longColumn);
+ LongToBytesColumnTransformer transformer =
+ new LongToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_64);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = longToLittleEndianBytes(input); // {0, 0, 0, 0, 0, 0, 0, 0}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test Long.MAX_VALUE conversion using TO_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testToLittleEndian64MaxValue() {
+ long input = Long.MAX_VALUE; // 0x7FFFFFFFFFFFFFFFL
+ long[] values = new long[] {input};
+ Column longColumn = new LongColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(longColumn);
+ LongToBytesColumnTransformer transformer =
+ new LongToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_64);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = longToLittleEndianBytes(input); // {-1, -1, -1, -1, -1, -1, -1, 127}
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /** Test multi-row conversion with a null value using TO_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testToLittleEndian64MultiRowsWithNull() {
+ long[] values = new long[] {1000L, 0L, -2000L}; // Use 0L as a placeholder for null
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column longColumn = new LongColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(longColumn);
+
+ LongToBytesColumnTransformer transformer =
+ new LongToBytesColumnTransformer(
+ BLOB, childColumnTransformer, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_64);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = longToLittleEndianBytes(1000L);
+ byte[] expected3 = longToLittleEndianBytes(-2000L);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /** Test conversion with a selection array using TO_LITTLE_ENDIAN_64 strategy. */
+ @Test
+ public void testToLittleEndian64WithSelection() {
+ long[] values = {50L, 100L, 150L};
+ Column longColumn = new LongColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(longColumn);
+
+ LongToBytesColumnTransformer transformer =
+ new LongToBytesColumnTransformer(
+ BLOB, child, NumericCodecStrategiesFactory.TO_LITTLE_ENDIAN_64);
+ transformer.addReferenceCount();
+
+ // Select and process only the first and third rows.
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = longToLittleEndianBytes(50L);
+ byte[] expected3 = longToLittleEndianBytes(150L);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ // The second row was not selected, so the result should be null.
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/ReverseColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/ReverseColumnTransformerTest.java
new file mode 100644
index 000000000000..3c3695f32d28
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/ReverseColumnTransformerTest.java
@@ -0,0 +1,216 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BinaryType.TEXT;
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+
+// Assuming this test file is for a GenericCodecColumnTransformer configured for REVERSE.
+public class ReverseColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a hex string to a byte array.
+ private static byte[] hexStringToByteArray(String s) {
+ if (s == null || s.isEmpty()) {
+ return new byte[0];
+ }
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test character-wise reversal for TEXT type, including multi-byte characters. */
+ @Test
+ public void testReverseTextAsChars() {
+ String originalString = "你好, world";
+ String expectedReversed = "dlrow ,好你";
+
+ Binary[] values =
+ new Binary[] {new Binary(originalString.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ TEXT, childColumnTransformer, CodecStrategiesFactory.REVERSE_CHARS, "reverse", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedReversed, result.getBinary(0).toString());
+ }
+
+ /** Test byte-wise reversal for BLOB type. */
+ @Test
+ public void testReverseBlobAsBytes() {
+ byte[] originalBytes = hexStringToByteArray("01020304AABB");
+ byte[] expectedReversed = hexStringToByteArray("BBAA04030201");
+
+ Binary[] values = new Binary[] {new Binary(originalBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, childColumnTransformer, CodecStrategiesFactory.REVERSE_BYTES, "reverse", BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedReversed, result.getBinary(0).getValues());
+ }
+
+ /** Test REVERSE with multiple rows, including a null value. */
+ @Test
+ public void testReverseMultiRowsWithNull() {
+ String original1 = "hello";
+ String original2 = "Apache";
+ String expected1 = "olleh";
+ String expected2 = "ehcapA";
+
+ Binary[] values =
+ new Binary[] {
+ new Binary(original1.getBytes(TSFileConfig.STRING_CHARSET)),
+ null,
+ new Binary(original2.getBytes(TSFileConfig.STRING_CHARSET))
+ };
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ TEXT, childColumnTransformer, CodecStrategiesFactory.REVERSE_CHARS, "reverse", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(expected1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(expected2, result.getBinary(2).toString());
+ }
+
+ /** Test REVERSE with a selection array to process only a subset of rows. */
+ @Test
+ public void testReverseWithSelection() {
+ String original1 = "one";
+ String original2 = "two";
+ String original3 = "three";
+ String expected1 = "eno";
+ String expected3 = "eerht";
+
+ Binary[] values = {
+ new Binary(original1.getBytes(TSFileConfig.STRING_CHARSET)),
+ new Binary(original2.getBytes(TSFileConfig.STRING_CHARSET)),
+ new Binary(original3.getBytes(TSFileConfig.STRING_CHARSET))
+ };
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ TEXT, child, CodecStrategiesFactory.REVERSE_CHARS, "reverse", TEXT);
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(expected1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1)); // Not selected, so should be null
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(expected3, result.getBinary(2).toString());
+ }
+
+ /** Test REVERSE with an empty TEXT input. */
+ @Test
+ public void testReverseEmptyText() {
+ String originalString = "";
+ Binary[] values =
+ new Binary[] {new Binary(originalString.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ TEXT, childColumnTransformer, CodecStrategiesFactory.REVERSE_CHARS, "reverse", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(originalString, result.getBinary(0).toString());
+ }
+
+ /** Test REVERSE with an empty BLOB input. */
+ @Test
+ public void testReverseEmptyBlob() {
+ byte[] originalBytes = new byte[0];
+ Binary[] values = new Binary[] {new Binary(originalBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, childColumnTransformer, CodecStrategiesFactory.REVERSE_BYTES, "reverse", BLOB);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(originalBytes, result.getBinary(0).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/FromBase32ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/FromBase32ColumnTransformerTest.java
new file mode 100644
index 000000000000..78197d3d03c3
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/FromBase32ColumnTransformerTest.java
@@ -0,0 +1,268 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+// It's recommended to place this file in a new package:
+// package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.base32Encoding;
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.base64Encoding;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import com.google.common.io.BaseEncoding;
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BinaryType.TEXT;
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class FromBase32ColumnTransformerTest {
+
+ // To match the production code's encoder/decoder, we use Guava's BaseEncoding.base32().
+ private static final BaseEncoding GUAVA_BASE32_ENCODING = BaseEncoding.base32();
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a hex string to a byte array.
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test FROM_BASE32 to a string output. */
+ @Test
+ public void testFromBase32ToString() {
+ String originalString = "Apache IoTDB";
+ String base32Input =
+ GUAVA_BASE32_ENCODING.encode(originalString.getBytes(TSFileConfig.STRING_CHARSET));
+
+ Binary[] values = new Binary[] {new Binary(base32Input.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ TEXT, childColumnTransformer, CodecStrategiesFactory.FROM_BASE32, "from_base32", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(originalString, result.getBinary(0).toString());
+ }
+
+ /** Test FROM_BASE32 to a blob output from a hex-represented original. */
+ @Test
+ public void testFromBase32ToBlob() {
+ String originalHex = "41706163686520496f544442";
+ byte[] originalBytes = hexStringToByteArray(originalHex);
+ String base32Input = GUAVA_BASE32_ENCODING.encode(originalBytes);
+
+ Binary[] values = new Binary[] {new Binary(base32Input.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ TEXT, childColumnTransformer, CodecStrategiesFactory.FROM_BASE32, "from_base32", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(originalBytes, result.getBinary(0).getValues());
+ }
+
+ /** Test FROM_BASE32 with multiple rows, including a null value. */
+ @Test
+ public void testFromBase32MultiRowsWithNull() {
+ String original1 = "hello";
+ String original2 = "world";
+ String base32_1 = GUAVA_BASE32_ENCODING.encode(original1.getBytes(TSFileConfig.STRING_CHARSET));
+ String base32_2 = GUAVA_BASE32_ENCODING.encode(original2.getBytes(TSFileConfig.STRING_CHARSET));
+
+ Binary[] values =
+ new Binary[] {
+ new Binary(base32_1.getBytes(TSFileConfig.STRING_CHARSET)),
+ null,
+ new Binary(base32_2.getBytes(TSFileConfig.STRING_CHARSET))
+ };
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, childColumnTransformer, CodecStrategiesFactory.FROM_BASE32, "from_base32", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(original1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(original2, result.getBinary(2).toString());
+ }
+
+ /** Test FROM_BASE32 with a selection array to process only a subset of rows. */
+ @Test
+ public void testFromBase32WithSelection() {
+ String original1 = "Apache";
+ String original2 = "IoTDB";
+ String original3 = "rocks";
+ String base32_1 = GUAVA_BASE32_ENCODING.encode(original1.getBytes(TSFileConfig.STRING_CHARSET));
+ String base32_2 = GUAVA_BASE32_ENCODING.encode(original2.getBytes(TSFileConfig.STRING_CHARSET));
+ String base32_3 = GUAVA_BASE32_ENCODING.encode(original3.getBytes(TSFileConfig.STRING_CHARSET));
+
+ Binary[] values = {
+ new Binary(base32_1.getBytes(TSFileConfig.STRING_CHARSET)),
+ new Binary(base32_2.getBytes(TSFileConfig.STRING_CHARSET)),
+ new Binary(base32_3.getBytes(TSFileConfig.STRING_CHARSET))
+ };
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.FROM_BASE32, "from_base32", TEXT);
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(original1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(original3, result.getBinary(2).toString());
+ }
+
+ /** Test FROM_BASE32 with an empty binary input. */
+ @Test
+ public void testFromBase32EmptyInput() {
+ String base32Input = "";
+ Binary[] values = new Binary[] {new Binary(base32Input.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB,
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_BASE32,
+ "from_base32",
+ STRING);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /**
+ * Test FROM_BASE32 with an invalid Base32 input string from a TEXT column. The error message
+ * should display the original string. '1' and '8' are not in the Base32 alphabet.
+ */
+ @Test
+ public void testInvalidBase32InputFromText() {
+ String invalidInput = "NBSWY3DP18";
+ byte[] inputBytes = invalidInput.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, childColumnTransformer, CodecStrategiesFactory.FROM_BASE32, "from_base32", TEXT);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown.");
+ } catch (SemanticException e) {
+ String expectedErrorMessage =
+ String.format(
+ "Failed to execute function 'from_base32' due to an invalid input format. Problematic value: %s",
+ invalidInput);
+ Assert.assertEquals(expectedErrorMessage, e.getMessage());
+ }
+ }
+
+ /**
+ * Test FROM_BASE32 with an invalid Base32 input from a BLOB column. The error message should
+ * display the hex representation of the input bytes.
+ */
+ @Test
+ public void testInvalidBase32InputFromBlob() {
+ // The byte for '`' (0x60) is not a valid Base32 character.
+ byte[] invalidBytes = "invalid`input".getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(invalidBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, childColumnTransformer, CodecStrategiesFactory.FROM_BASE32, "from_base32", BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown.");
+ } catch (SemanticException e) {
+ String expectedProblematicValue =
+ "0x" + BaseEncoding.base16().lowerCase().encode(invalidBytes);
+ String expectedErrorMessage =
+ String.format(
+ "Failed to execute function 'from_base32' due to an invalid input format. Problematic value: %s",
+ expectedProblematicValue);
+ Assert.assertEquals(expectedErrorMessage, e.getMessage());
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/FromBase64ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/FromBase64ColumnTransformerTest.java
new file mode 100644
index 000000000000..16e795273ffc
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/FromBase64ColumnTransformerTest.java
@@ -0,0 +1,271 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.base64Encoding;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Base64;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BinaryType.TEXT;
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class FromBase64ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a hex string to a byte array.
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test FROM_BASE64 to a string output. */
+ @Test
+ public void testFromBase64ToString() {
+ String originalString = "Apache IoTDB";
+ String base64Input =
+ Base64.getEncoder().encodeToString(originalString.getBytes(TSFileConfig.STRING_CHARSET));
+
+ Binary[] values = new Binary[] {new Binary(base64Input.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, childColumnTransformer, CodecStrategiesFactory.FROM_BASE64, "from_base64", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(originalString, result.getBinary(0).toString());
+ }
+
+ /** Test FROM_BASE64 to a hex-represented binary output. */
+ @Test
+ public void testFromBase64ToHex() {
+ // "Apache IoTDB" in hex
+ String originalHex = "41706163686520496f544442";
+ byte[] originalBytes = hexStringToByteArray(originalHex);
+ String base64Input = Base64.getEncoder().encodeToString(originalBytes);
+
+ Binary[] values = new Binary[] {new Binary(base64Input.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB,
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_BASE64,
+ "from_base64",
+ STRING);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(originalBytes, result.getBinary(0).getValues());
+ }
+
+ /** Test FROM_BASE64 with multiple rows, including a null value. */
+ @Test
+ public void testFromBase64MultiRowsWithNull() {
+ String original1 = "hello";
+ String original2 = "world";
+ String base64_1 =
+ Base64.getEncoder().encodeToString(original1.getBytes(TSFileConfig.STRING_CHARSET));
+ String base64_2 =
+ Base64.getEncoder().encodeToString(original2.getBytes(TSFileConfig.STRING_CHARSET));
+
+ Binary[] values =
+ new Binary[] {
+ new Binary(base64_1.getBytes(TSFileConfig.STRING_CHARSET)),
+ null,
+ new Binary(base64_2.getBytes(TSFileConfig.STRING_CHARSET))
+ };
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, childColumnTransformer, CodecStrategiesFactory.FROM_BASE64, "from_base64", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(original1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(original2, result.getBinary(2).toString());
+ }
+
+ /** Test FROM_BASE64 with a selection array to process only a subset of rows. */
+ @Test
+ public void testFromBase64WithSelection() {
+ String original1 = "Apache";
+ String original2 = "IoTDB";
+ String original3 = "rocks";
+ String base64_1 =
+ Base64.getEncoder().encodeToString(original1.getBytes(TSFileConfig.STRING_CHARSET));
+ String base64_2 =
+ Base64.getEncoder().encodeToString(original2.getBytes(TSFileConfig.STRING_CHARSET));
+ String base64_3 =
+ Base64.getEncoder().encodeToString(original3.getBytes(TSFileConfig.STRING_CHARSET));
+
+ Binary[] values = {
+ new Binary(base64_1.getBytes(TSFileConfig.STRING_CHARSET)),
+ new Binary(base64_2.getBytes(TSFileConfig.STRING_CHARSET)),
+ new Binary(base64_3.getBytes(TSFileConfig.STRING_CHARSET))
+ };
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.FROM_BASE64, "from_base64", TEXT);
+ transformer.addReferenceCount();
+
+ // Select only the first and third rows for processing.
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(original1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(original3, result.getBinary(2).toString());
+ }
+
+ /** Test FROM_BASE64 with an empty binary input. */
+ @Test
+ public void testFromBase64EmptyInput() {
+ String base64Input = "";
+ Binary[] values = new Binary[] {new Binary(base64Input.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, childColumnTransformer, CodecStrategiesFactory.FROM_BASE64, "from_base64", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ @Test
+ public void testInvalidBase64InputFromText() {
+ String invalidInput = "this is not base64!";
+ byte[] inputBytes = invalidInput.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ // The inputType is TEXT, so the error message should show the original string.
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, childColumnTransformer, CodecStrategiesFactory.FROM_BASE64, "from_base64", TEXT);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown.");
+ } catch (SemanticException e) {
+ String expectedErrorMessage =
+ String.format(
+ "Failed to execute function 'from_base64' due to an invalid input format. Problematic value: %s",
+ invalidInput);
+ Assert.assertEquals(expectedErrorMessage, e.getMessage());
+ }
+ }
+
+ /**
+ * Test FROM_BASE64 with an invalid Base64 input from a BLOB column. The error message should
+ * display the hex representation of the input bytes.
+ */
+ @Test
+ public void testInvalidBase64InputFromBlob() {
+ // Use some bytes that are not valid Base64.
+ byte[] invalidBytes = new byte[] {(byte) 0xDE, (byte) 0xAD, (byte) 0xBE, (byte) 0xEF};
+ Binary[] values = new Binary[] {new Binary(invalidBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ // The inputType is BLOB, so the error message should show a hex string.
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB,
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_BASE64,
+ "from_base64",
+ org.apache.tsfile.read.common.type.BlobType.BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown.");
+ } catch (SemanticException e) {
+ String expectedProblematicValue = "0xdeadbeef";
+ String expectedErrorMessage =
+ String.format(
+ "Failed to execute function 'from_base64' due to an invalid input format. Problematic value: %s",
+ expectedProblematicValue);
+ Assert.assertEquals(expectedErrorMessage, e.getMessage());
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/FromBase64UrlColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/FromBase64UrlColumnTransformerTest.java
new file mode 100644
index 000000000000..535d222cd4b8
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/FromBase64UrlColumnTransformerTest.java
@@ -0,0 +1,328 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.base64Encoding;
+
+import org.apache.iotdb.db.exception.sql.SemanticException;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Base64;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BinaryType.TEXT;
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class FromBase64UrlColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a hex string to a byte array.
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test FROM_BASE64URL to a string output. */
+ @Test
+ public void testFromBase64UrlToString() {
+ String originalString = "Apache IoTDB";
+ String base64UrlInput =
+ Base64.getUrlEncoder()
+ .withoutPadding()
+ .encodeToString(originalString.getBytes(TSFileConfig.STRING_CHARSET));
+
+ Binary[] values =
+ new Binary[] {new Binary(base64UrlInput.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB,
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_BASE64URL,
+ "from_base64url",
+ STRING);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(originalString, result.getBinary(0).toString());
+ }
+
+ /** Test FROM_BASE64URL to a hex-represented binary output. */
+ @Test
+ public void testFromBase64UrlToHex() {
+ String originalHex = "41706163686520496f544442";
+ byte[] originalBytes = hexStringToByteArray(originalHex);
+ String base64UrlInput = Base64.getUrlEncoder().withoutPadding().encodeToString(originalBytes);
+
+ Binary[] values =
+ new Binary[] {new Binary(base64UrlInput.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB,
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_BASE64URL,
+ "from_base64url",
+ STRING);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(originalBytes, result.getBinary(0).getValues());
+ }
+
+ /** Test specifically for URL-safe characters ('-' and '_'). */
+ @Test
+ public void testFromBase64UrlSpecialCharacters() {
+ String base64UrlInput = "-_-_";
+ byte[] expectedBytes = hexStringToByteArray("fbffbf");
+
+ Binary[] values =
+ new Binary[] {new Binary(base64UrlInput.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB,
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_BASE64URL,
+ "from_base64url",
+ TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedBytes, result.getBinary(0).getValues());
+ }
+
+ /** Test FROM_BASE64URL with multiple rows, including a null value. */
+ @Test
+ public void testFromBase64UrlMultiRowsWithNull() {
+ String original1 = "hello?";
+ String original2 = "rocks";
+ String base64Url1 =
+ Base64.getUrlEncoder()
+ .withoutPadding()
+ .encodeToString(original1.getBytes(TSFileConfig.STRING_CHARSET));
+ String base64Url2 =
+ Base64.getUrlEncoder()
+ .withoutPadding()
+ .encodeToString(original2.getBytes(TSFileConfig.STRING_CHARSET));
+
+ Binary[] values =
+ new Binary[] {
+ new Binary(base64Url1.getBytes(TSFileConfig.STRING_CHARSET)),
+ null,
+ new Binary(base64Url2.getBytes(TSFileConfig.STRING_CHARSET))
+ };
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB,
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_BASE64URL,
+ "from_base64url",
+ TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(original1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(original2, result.getBinary(2).toString());
+ }
+
+ /** Test FROM_BASE64URL with a selection array to process only a subset of rows. */
+ @Test
+ public void testFromBase64UrlWithSelection() {
+ String original1 = "Apache";
+ String original2 = "IoTDB?";
+ String original3 = "rocks";
+ String base64Url1 =
+ Base64.getUrlEncoder()
+ .withoutPadding()
+ .encodeToString(original1.getBytes(TSFileConfig.STRING_CHARSET));
+ String base64Url2 =
+ Base64.getUrlEncoder()
+ .withoutPadding()
+ .encodeToString(original2.getBytes(TSFileConfig.STRING_CHARSET));
+ String base64Url3 =
+ Base64.getUrlEncoder()
+ .withoutPadding()
+ .encodeToString(original3.getBytes(TSFileConfig.STRING_CHARSET));
+
+ Binary[] values = {
+ new Binary(base64Url1.getBytes(TSFileConfig.STRING_CHARSET)),
+ new Binary(base64Url2.getBytes(TSFileConfig.STRING_CHARSET)),
+ new Binary(base64Url3.getBytes(TSFileConfig.STRING_CHARSET))
+ };
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.FROM_BASE64URL, "from_base64url", TEXT);
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(original1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(original3, result.getBinary(2).toString());
+ }
+
+ /** Test FROM_BASE64URL with an empty binary input. */
+ @Test
+ public void testFromBase64UrlEmptyInput() {
+ String base64UrlInput = "";
+ Binary[] values =
+ new Binary[] {new Binary(base64UrlInput.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB,
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_BASE64URL,
+ "from_base64url",
+ TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedOutput = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedOutput, result.getBinary(0).getValues());
+ }
+
+ /**
+ * Test FROM_BASE64URL with an invalid Base64URL input string from a TEXT column. The error
+ * message should display the original string.
+ */
+ @Test
+ public void testInvalidBase64UrlInputFromText() {
+ String invalidInput = "this is not base64url!";
+ byte[] inputBytes = invalidInput.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB,
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_BASE64URL,
+ "from_base64url",
+ TEXT);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown.");
+ } catch (SemanticException e) {
+ String expectedErrorMessage =
+ String.format(
+ "Failed to execute function 'from_base64url' due to an invalid input format. Problematic value: %s",
+ invalidInput);
+ Assert.assertEquals(expectedErrorMessage, e.getMessage());
+ }
+ }
+
+ /**
+ * Test FROM_BASE64URL with an invalid Base64URL input from a BLOB column. The error message
+ * should display the hex representation of the input bytes.
+ */
+ @Test
+ public void testInvalidBase64UrlInputFromBlob() {
+ byte[] invalidBytes = new byte[] {(byte) 0xDE, (byte) 0xAD, (byte) 0xBE, (byte) 0xEF};
+ Binary[] values = new Binary[] {new Binary(invalidBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB,
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_BASE64URL,
+ "from_base64url",
+ BlobType.BLOB);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected SemanticException was not thrown.");
+ } catch (SemanticException e) {
+ String expectedProblematicValue = "0xdeadbeef";
+ String expectedErrorMessage =
+ String.format(
+ "Failed to execute function 'from_base64url' due to an invalid input format. Problematic value: %s",
+ expectedProblematicValue);
+ Assert.assertEquals(expectedErrorMessage, e.getMessage());
+ }
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/ToBase32ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/ToBase32ColumnTransformerTest.java
new file mode 100644
index 000000000000..f89cf121f6c6
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/ToBase32ColumnTransformerTest.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+// It's recommended to place this file in a new package:
+// package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.base32Encoding;
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.base64Encoding;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import com.google.common.io.BaseEncoding;
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BinaryType.TEXT;
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class ToBase32ColumnTransformerTest {
+
+ // To match the production code's encoder, we use Guava's BaseEncoding.base32().
+ private static final BaseEncoding GUAVA_BASE32_ENCODING = BaseEncoding.base32();
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a hex string to a byte array.
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test TO_BASE32 from a string input. */
+ @Test
+ public void testToBase32FromString() {
+ String input = "Apache IoTDB";
+ byte[] inputBytes = input.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, childColumnTransformer, CodecStrategiesFactory.TO_BASE32, "to_base32", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expectedOutput = GUAVA_BASE32_ENCODING.encode(inputBytes);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedOutput, result.getBinary(0).toString());
+ }
+
+ /** Test TO_BASE32 from a hex-represented binary input. */
+ @Test
+ public void testToBase32FromHex() {
+ // "Apache IoTDB" in hex
+ String inputHex = "41706163686520496f544442";
+ byte[] inputBytes = hexStringToByteArray(inputHex);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, childColumnTransformer, CodecStrategiesFactory.TO_BASE32, "to_base32", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expectedOutput =
+ GUAVA_BASE32_ENCODING.encode(inputBytes); // IFZCAUS2EBSWC4ZAJ5SS4Y3PNU======
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedOutput, result.getBinary(0).toString());
+ }
+
+ /** Test TO_BASE32 with multiple rows, including a null value. */
+ @Test
+ public void testToBase32MultiRowsWithNull() {
+ byte[] bytes1 = "hello".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "world".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = new Binary[] {new Binary(bytes1), null, new Binary(bytes2)};
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, childColumnTransformer, CodecStrategiesFactory.TO_BASE32, "to_base32", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expected1 = GUAVA_BASE32_ENCODING.encode(bytes1); // NBSWY3DP
+ String expected3 = GUAVA_BASE32_ENCODING.encode(bytes2); // ORSWG5BA
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(expected1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(expected3, result.getBinary(2).toString());
+ }
+
+ /** Test TO_BASE32 with a selection array to process only a subset of rows. */
+ @Test
+ public void testToBase32WithSelection() {
+ byte[] bytes1 = "Apache".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "IoTDB".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes3 = "rocks".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = {new Binary(bytes1), new Binary(bytes2), new Binary(bytes3)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, child, CodecStrategiesFactory.TO_BASE32, "to_base32", TEXT);
+ transformer.addReferenceCount();
+
+ // Select only the second and third rows for processing.
+ boolean[] selection = {false, true, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ String expected2 = GUAVA_BASE32_ENCODING.encode(bytes2); // J5SS4Y3P
+ String expected3 = GUAVA_BASE32_ENCODING.encode(bytes3); // ONXW2ZJA
+
+ Assert.assertEquals(3, result.getPositionCount());
+ // The first row was not selected, so it should be null in the result.
+ Assert.assertTrue(result.isNull(0));
+ Assert.assertFalse(result.isNull(1));
+ Assert.assertEquals(expected2, result.getBinary(1).toString());
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(expected3, result.getBinary(2).toString());
+ }
+
+ /** Test TO_BASE32 with an empty binary input. */
+ @Test
+ public void testToBase32EmptyInput() {
+ byte[] inputBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, childColumnTransformer, CodecStrategiesFactory.TO_BASE32, "to_base32", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expectedOutput = GUAVA_BASE32_ENCODING.encode(inputBytes); // ""
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedOutput, result.getBinary(0).toString());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/ToBase64ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/ToBase64ColumnTransformerTest.java
new file mode 100644
index 000000000000..04eae176abf8
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/ToBase64ColumnTransformerTest.java
@@ -0,0 +1,190 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.base64Encoding;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Base64;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BinaryType.TEXT;
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class ToBase64ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a hex string to a byte array.
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test TO_BASE64 from a string input. */
+ @Test
+ public void testToBase64FromString() {
+ String input = "Apache IoTDB";
+ byte[] inputBytes = input.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, childColumnTransformer, CodecStrategiesFactory.TO_BASE64, "to_base64", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expectedOutput = Base64.getEncoder().encodeToString(inputBytes);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedOutput, result.getBinary(0).toString());
+ }
+
+ /** Test TO_BASE64 from a hex-represented binary input. */
+ @Test
+ public void testToBase64FromHex() {
+ // "Apache IoTDB" in hex
+ String inputHex = "41706163686520496f544442";
+ byte[] inputBytes = hexStringToByteArray(inputHex);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, childColumnTransformer, CodecStrategiesFactory.TO_BASE64, "to_base64", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expectedOutput = Base64.getEncoder().encodeToString(inputBytes); // "QXBhY2hlIElvVERC"
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedOutput, result.getBinary(0).toString());
+ }
+
+ /** Test TO_BASE64 with multiple rows, including a null value. */
+ @Test
+ public void testToBase64MultiRowsWithNull() {
+ byte[] bytes1 = "hello".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "world".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = new Binary[] {new Binary(bytes1), null, new Binary(bytes2)};
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, childColumnTransformer, CodecStrategiesFactory.TO_BASE64, "to_base64", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expected1 = Base64.getEncoder().encodeToString(bytes1); // "aGVsbG8="
+ String expected3 = Base64.getEncoder().encodeToString(bytes2); // "d29ybGQ="
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(expected1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(expected3, result.getBinary(2).toString());
+ }
+
+ /** Test TO_BASE64 with a selection array to process only a subset of rows. */
+ @Test
+ public void testToBase64WithSelection() {
+ byte[] bytes1 = "Apache".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "IoTDB".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes3 = "rocks".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = {new Binary(bytes1), new Binary(bytes2), new Binary(bytes3)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, child, CodecStrategiesFactory.TO_BASE64, "to_base64", TEXT);
+ transformer.addReferenceCount();
+
+ // Select only the second and third rows for processing.
+ boolean[] selection = {false, true, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ String expected2 = Base64.getEncoder().encodeToString(bytes2); // "SW9UREI="
+ String expected3 = Base64.getEncoder().encodeToString(bytes3); // "cm9ja3M="
+
+ Assert.assertEquals(3, result.getPositionCount());
+ // The first row was not selected, so it should be null in the result.
+ Assert.assertTrue(result.isNull(0));
+ Assert.assertFalse(result.isNull(1));
+ Assert.assertEquals(expected2, result.getBinary(1).toString());
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(expected3, result.getBinary(2).toString());
+ }
+
+ /** Test TO_BASE64 with an empty binary input. */
+ @Test
+ public void testToBase64EmptyInput() {
+ byte[] inputBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, childColumnTransformer, CodecStrategiesFactory.TO_BASE64, "to_base64", TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expectedOutput = Base64.getEncoder().encodeToString(inputBytes); // ""
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedOutput, result.getBinary(0).toString());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/ToBase64UrlColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/ToBase64UrlColumnTransformerTest.java
new file mode 100644
index 000000000000..db79311f311c
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/base64Encoding/ToBase64UrlColumnTransformerTest.java
@@ -0,0 +1,235 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.base64Encoding;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Base64;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BinaryType.TEXT;
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class ToBase64UrlColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a hex string to a byte array.
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test TO_BASE64URL from a string input. */
+ @Test
+ public void testToBase64UrlFromString() {
+ String input = "Apache IoTDB";
+ byte[] inputBytes = input.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING,
+ childColumnTransformer,
+ CodecStrategiesFactory.TO_BASE64URL,
+ "to_base64url",
+ STRING);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expectedOutput = Base64.getUrlEncoder().encodeToString(inputBytes);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedOutput, result.getBinary(0).toString());
+ }
+
+ /** Test TO_BASE64URL from a hex-represented binary input. */
+ @Test
+ public void testToBase64UrlFromHex() {
+ // "Apache IoTDB" in hex
+ String inputHex = "41706163686520496f544442";
+ byte[] inputBytes = hexStringToByteArray(inputHex);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING,
+ childColumnTransformer,
+ CodecStrategiesFactory.TO_BASE64URL,
+ "to_base64url",
+ STRING);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expectedOutput = Base64.getUrlEncoder().encodeToString(inputBytes); // "QXBhY2hlIElvVERC"
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedOutput, result.getBinary(0).toString());
+ }
+
+ /** Test specifically for URL-safe characters ('-' and '_'). */
+ @Test
+ public void testToBase64UrlSpecialCharacters() {
+ // This specific byte sequence produces '+' and '/' in standard Base64.
+ // Binary: 11111011 11111111 10111111
+ // 6-bit groups: 111110 (62), 111111 (63), 111110 (62), 111111 (63)
+ // Standard Base64: +/+/
+ // URL-safe Base64: -_-_
+ byte[] inputBytes = hexStringToByteArray("fbffbf");
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ TEXT,
+ childColumnTransformer,
+ CodecStrategiesFactory.TO_BASE64URL,
+ "to_base64url",
+ TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expectedOutput = "-_-_";
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedOutput, result.getBinary(0).toString());
+ }
+
+ /** Test TO_BASE64URL with multiple rows, including a null value. */
+ @Test
+ public void testToBase64UrlMultiRowsWithNull() {
+ byte[] bytes1 = "hello?".getBytes(TSFileConfig.STRING_CHARSET); // Contains char that becomes /
+ byte[] bytes2 = "world".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = new Binary[] {new Binary(bytes1), null, new Binary(bytes2)};
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING,
+ childColumnTransformer,
+ CodecStrategiesFactory.TO_BASE64URL,
+ "to_base64url",
+ TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expected1 = Base64.getUrlEncoder().withoutPadding().encodeToString(bytes1);
+ String expected3 = Base64.getUrlEncoder().withoutPadding().encodeToString(bytes2);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(expected1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(expected3, result.getBinary(2).toString());
+ }
+
+ /** Test TO_BASE64URL with a selection array to process only a subset of rows. */
+ @Test
+ public void testToBase64UrlWithSelection() {
+ byte[] bytes1 = "Apache".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "IoTDB?".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes3 = "rocks".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = {new Binary(bytes1), new Binary(bytes2), new Binary(bytes3)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, child, CodecStrategiesFactory.TO_BASE64URL, "to_base64url", TEXT);
+ transformer.addReferenceCount();
+
+ // Select only the second and third rows for processing.
+ boolean[] selection = {false, true, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ String expected2 = Base64.getUrlEncoder().withoutPadding().encodeToString(bytes2);
+ String expected3 = Base64.getUrlEncoder().withoutPadding().encodeToString(bytes3);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ // The first row was not selected, so it should be null in the result.
+ Assert.assertTrue(result.isNull(0));
+ Assert.assertFalse(result.isNull(1));
+ Assert.assertEquals(expected2, result.getBinary(1).toString());
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(expected3, result.getBinary(2).toString());
+ }
+
+ /** Test TO_BASE64URL with an empty binary input. */
+ @Test
+ public void testToBase64UrlEmptyInput() {
+ byte[] inputBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING,
+ childColumnTransformer,
+ CodecStrategiesFactory.TO_BASE64URL,
+ "to_base64url",
+ TEXT);
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expectedOutput = Base64.getUrlEncoder().encodeToString(inputBytes); // ""
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedOutput, result.getBinary(0).toString());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Crc32ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Crc32ColumnTransformerTest.java
new file mode 100644
index 000000000000..78cf7dace0b4
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Crc32ColumnTransformerTest.java
@@ -0,0 +1,200 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+// Note: The actual package may vary, adjust if CRC32Transformer is in a different location.
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.CRC32Transformer;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.read.common.type.LongType;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+import java.util.zip.CRC32;
+
+public class Crc32ColumnTransformerTest {
+
+ // Helper method to calculate the expected CRC32 value for a byte array.
+ private long calculateCrc32(byte[] bytes) {
+ CRC32 crc32 = new CRC32();
+ crc32.update(bytes);
+ return crc32.getValue();
+ }
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a hex string to a byte array for BLOB testing.
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test CRC32 from a STRING-like input. Output should be INT64. */
+ @Test
+ public void testCrc32FromString() {
+ String input = "Apache IoTDB";
+ byte[] inputBytes = input.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ // Instantiate the specific CRC32Transformer
+ CRC32Transformer transformer = new CRC32Transformer(LongType.INT64, childColumnTransformer);
+
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ long expectedCrc32 = calculateCrc32(inputBytes);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedCrc32, result.getLong(0));
+ }
+
+ /** Test CRC32 from a BLOB-like input (represented by hex). Output should be INT64. */
+ @Test
+ public void testCrc32FromBlob() {
+ // Hex representation for the string "Apache IoTDB"
+ String inputHex = "41706163686520496f544442";
+ byte[] inputBytes = hexStringToByteArray(inputHex);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ // Instantiate the specific CRC32Transformer
+ CRC32Transformer transformer = new CRC32Transformer(LongType.INT64, childColumnTransformer);
+
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ long expectedCrc32 = calculateCrc32(inputBytes);
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedCrc32, result.getLong(0));
+ }
+
+ /** Test CRC32 with multiple rows, including a null value. */
+ @Test
+ public void testCrc32MultiRowsWithNull() {
+ byte[] bytes1 = "hello".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "world".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = new Binary[] {new Binary(bytes1), null, new Binary(bytes2)};
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ // Instantiate the specific CRC32Transformer
+ CRC32Transformer transformer = new CRC32Transformer(LongType.INT64, childColumnTransformer);
+
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ long expected1 = calculateCrc32(bytes1);
+ long expected3 = calculateCrc32(bytes2);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(expected1, result.getLong(0));
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(expected3, result.getLong(2));
+ }
+
+ /** Test CRC32 with a selection array to process only a subset of rows. */
+ @Test
+ public void testCrc32WithSelection() {
+ byte[] bytes1 = "Apache".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "IoTDB".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes3 = "rocks".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = {new Binary(bytes1), new Binary(bytes2), new Binary(bytes3)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ // Instantiate the specific CRC32Transformer
+ CRC32Transformer transformer = new CRC32Transformer(LongType.INT64, child);
+
+ transformer.addReferenceCount();
+
+ // Select only the first and third rows for processing.
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ long expected1 = calculateCrc32(bytes1);
+ long expected3 = calculateCrc32(bytes3);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ // The first row was selected and processed.
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(expected1, result.getLong(0));
+ // The second row was NOT selected, so its corresponding output should be null.
+ Assert.assertTrue(result.isNull(1));
+ // The third row was selected and processed.
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(expected3, result.getLong(2));
+ }
+
+ /** Test CRC32 with an empty binary input. The result should be 0. */
+ @Test
+ public void testCrc32EmptyInput() {
+ byte[] inputBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ // Instantiate the specific CRC32Transformer
+ CRC32Transformer transformer = new CRC32Transformer(LongType.INT64, childColumnTransformer);
+
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ // The CRC32 of an empty input is always 0.
+ long expectedCrc32 = 0L;
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertEquals(expectedCrc32, result.getLong(0));
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Md5ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Md5ColumnTransformerTest.java
new file mode 100644
index 000000000000..ac357616e364
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Md5ColumnTransformerTest.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+// It's recommended to place this file in a new package:
+// package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import com.google.common.hash.Hashing;
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class Md5ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ /**
+ * Helper method to convert a hex string to a byte array. This is crucial for tests involving
+ * binary data for two reasons: 1. It allows defining arbitrary binary inputs (like BLOBs) in a
+ * human-readable format. 2. It allows defining expected binary outputs (like standard hash
+ * values) from their readable hex representations for assertions.
+ */
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test MD5 from a STRING input. Output should be BLOB. */
+ @Test
+ public void testMd5FromString() {
+ String input = "Apache IoTDB";
+ byte[] inputBytes = input.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.MD5,
+ "md5",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedHash = Hashing.md5().hashBytes(inputBytes).asBytes();
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+
+ /** Test MD5 from a BLOB input (represented by hex). Output should be BLOB. */
+ @Test
+ public void testMd5FromBlob() {
+ String inputHex = "41706163686520496f544442";
+ byte[] inputBytes = hexStringToByteArray(inputHex);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.MD5,
+ "md5",
+ BlobType.BLOB // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedHash = Hashing.md5().hashBytes(inputBytes).asBytes();
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+
+ /** Test MD5 with multiple rows, including a null value. */
+ @Test
+ public void testMd5MultiRowsWithNull() {
+ byte[] bytes1 = "hello".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "world".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = new Binary[] {new Binary(bytes1), null, new Binary(bytes2)};
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.MD5,
+ "md5",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = Hashing.md5().hashBytes(bytes1).asBytes();
+ byte[] expected3 = Hashing.md5().hashBytes(bytes2).asBytes();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /** Test MD5 with a selection array to process only a subset of rows. */
+ @Test
+ public void testMd5WithSelection() {
+ byte[] bytes1 = "Apache".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "IoTDB".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes3 = "rocks".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = {new Binary(bytes1), new Binary(bytes2), new Binary(bytes3)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ child,
+ CodecStrategiesFactory.MD5,
+ "md5",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = Hashing.md5().hashBytes(bytes1).asBytes();
+ byte[] expected3 = Hashing.md5().hashBytes(bytes3).asBytes();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /**
+ * Test MD5 with an empty binary input. The result is compared against the well-known, standard
+ * hash for an empty string.
+ */
+ @Test
+ public void testMd5EmptyInput() {
+ byte[] inputBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.MD5,
+ "md5",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ // The known MD5 hash of an empty string.
+ String knownEmptyHashHex = "d41d8cd98f00b204e9800998ecf8427e";
+ byte[] expectedHash = hexStringToByteArray(knownEmptyHashHex);
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Murmur3ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Murmur3ColumnTransformerTest.java
new file mode 100644
index 000000000000..d4d7d675a26f
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Murmur3ColumnTransformerTest.java
@@ -0,0 +1,224 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+// It's recommended to place this file in a new package:
+// package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import com.google.common.hash.Hashing;
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class Murmur3ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ /**
+ * Helper method to convert a hex string to a byte array. This is crucial for tests involving
+ * binary data for two reasons: 1. It allows defining arbitrary binary inputs (like BLOBs) in a
+ * human-readable format. 2. It allows defining expected binary outputs (like standard hash
+ * values) from their readable hex representations for assertions.
+ */
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test MURMUR3 from a STRING input. Output should be BLOB. */
+ @Test
+ public void testMurmur3FromString() {
+ String input = "Apache IoTDB";
+ byte[] inputBytes = input.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.MURMUR3,
+ "murmur3",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedHash = Hashing.murmur3_128().hashBytes(inputBytes).asBytes();
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+
+ /** Test MURMUR3 from a BLOB input (represented by hex). Output should be BLOB. */
+ @Test
+ public void testMurmur3FromBlob() {
+ String inputHex = "41706163686520496f544442";
+ byte[] inputBytes = hexStringToByteArray(inputHex);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.MURMUR3,
+ "murmur3",
+ BlobType.BLOB // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedHash = Hashing.murmur3_128().hashBytes(inputBytes).asBytes();
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+
+ /** Test MURMUR3 with multiple rows, including a null value. */
+ @Test
+ public void testMurmur3MultiRowsWithNull() {
+ byte[] bytes1 = "hello".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "world".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = new Binary[] {new Binary(bytes1), null, new Binary(bytes2)};
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.MURMUR3,
+ "murmur3",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = Hashing.murmur3_128().hashBytes(bytes1).asBytes();
+ byte[] expected3 = Hashing.murmur3_128().hashBytes(bytes2).asBytes();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /** Test MURMUR3 with a selection array to process only a subset of rows. */
+ @Test
+ public void testMurmur3WithSelection() {
+ byte[] bytes1 = "Apache".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "IoTDB".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes3 = "rocks".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = {new Binary(bytes1), new Binary(bytes2), new Binary(bytes3)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ child,
+ CodecStrategiesFactory.MURMUR3,
+ "murmur3",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = Hashing.murmur3_128().hashBytes(bytes1).asBytes();
+ byte[] expected3 = Hashing.murmur3_128().hashBytes(bytes3).asBytes();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /**
+ * Test MURMUR3 with an empty binary input. The result is compared against the well-known,
+ * standard hash for an empty string.
+ */
+ @Test
+ public void testMurmur3EmptyInput() {
+ byte[] inputBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.MURMUR3,
+ "murmur3",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ // The known Murmur3_128 hash of an empty string (with seed 0) is all zeros.
+ byte[] expectedHash = new byte[16]; // 128 bits = 16 bytes
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Sha1ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Sha1ColumnTransformerTest.java
new file mode 100644
index 000000000000..c74f13e89165
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Sha1ColumnTransformerTest.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+// It's recommended to place this file in a new package:
+// package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import com.google.common.hash.Hashing;
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class Sha1ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ /**
+ * Helper method to convert a hex string to a byte array. This is crucial for tests involving
+ * binary data for two reasons: 1. It allows defining arbitrary binary inputs (like BLOBs) in a
+ * human-readable format. 2. It allows defining expected binary outputs (like standard hash
+ * values) from their readable hex representations for assertions.
+ */
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test SHA1 from a STRING input. Output should be BLOB. */
+ @Test
+ public void testSha1FromString() {
+ String input = "Apache IoTDB";
+ byte[] inputBytes = input.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.SHA1,
+ "sha1",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedHash = Hashing.sha1().hashBytes(inputBytes).asBytes();
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+
+ /** Test SHA1 from a BLOB input (represented by hex). Output should be BLOB. */
+ @Test
+ public void testSha1FromBlob() {
+ String inputHex = "41706163686520496f544442";
+ byte[] inputBytes = hexStringToByteArray(inputHex);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.SHA1,
+ "sha1",
+ BlobType.BLOB // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedHash = Hashing.sha1().hashBytes(inputBytes).asBytes();
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+
+ /** Test SHA1 with multiple rows, including a null value. */
+ @Test
+ public void testSha1MultiRowsWithNull() {
+ byte[] bytes1 = "hello".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "world".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = new Binary[] {new Binary(bytes1), null, new Binary(bytes2)};
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.SHA1,
+ "sha1",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = Hashing.sha1().hashBytes(bytes1).asBytes();
+ byte[] expected3 = Hashing.sha1().hashBytes(bytes2).asBytes();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /** Test SHA1 with a selection array to process only a subset of rows. */
+ @Test
+ public void testSha1WithSelection() {
+ byte[] bytes1 = "Apache".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "IoTDB".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes3 = "rocks".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = {new Binary(bytes1), new Binary(bytes2), new Binary(bytes3)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ child,
+ CodecStrategiesFactory.SHA1,
+ "sha1",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = Hashing.sha1().hashBytes(bytes1).asBytes();
+ byte[] expected3 = Hashing.sha1().hashBytes(bytes3).asBytes();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /**
+ * Test SHA1 with an empty binary input. The result is compared against the well-known, standard
+ * hash for an empty string.
+ */
+ @Test
+ public void testSha1EmptyInput() {
+ byte[] inputBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.SHA1,
+ "sha1",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ // The known SHA-1 hash of an empty string.
+ String knownEmptyHashHex = "da39a3ee5e6b4b0d3255bfef95601890afd80709";
+ byte[] expectedHash = hexStringToByteArray(knownEmptyHashHex);
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Sha256ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Sha256ColumnTransformerTest.java
new file mode 100644
index 000000000000..c0e177d77436
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Sha256ColumnTransformerTest.java
@@ -0,0 +1,229 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+// It's recommended to place this file in a new package:
+// package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import com.google.common.hash.Hashing;
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class Sha256ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ /**
+ * Helper method to convert a hex string to a byte array. This is crucial for tests involving
+ * binary data for two reasons: 1. It allows defining arbitrary binary inputs (like BLOBs) in a
+ * human-readable format. 2. It allows defining expected binary outputs (like standard hash
+ * values) from their readable hex representations for assertions.
+ */
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test SHA256 from a STRING input. Output should be BLOB. */
+ @Test
+ public void testSha256FromString() {
+ String input = "Apache IoTDB";
+ byte[] inputBytes = input.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.SHA256,
+ "sha256",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedHash = Hashing.sha256().hashBytes(inputBytes).asBytes();
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+
+ /** Test SHA256 from a BLOB input (represented by hex). Output should be BLOB. */
+ @Test
+ public void testSha256FromBlob() {
+ // "Apache IoTDB" in hex
+ String inputHex = "41706163686520496f544442";
+ byte[] inputBytes = hexStringToByteArray(inputHex);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ // Set inputType to BLOB to correctly simulate a blob input
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.SHA256,
+ "sha256",
+ BlobType.BLOB // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedHash = Hashing.sha256().hashBytes(inputBytes).asBytes();
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+
+ /** Test SHA256 with multiple rows, including a null value. */
+ @Test
+ public void testSha256MultiRowsWithNull() {
+ byte[] bytes1 = "hello".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "world".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = new Binary[] {new Binary(bytes1), null, new Binary(bytes2)};
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.SHA256,
+ "sha256",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = Hashing.sha256().hashBytes(bytes1).asBytes();
+ byte[] expected3 = Hashing.sha256().hashBytes(bytes2).asBytes();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /** Test SHA256 with a selection array to process only a subset of rows. */
+ @Test
+ public void testSha256WithSelection() {
+ byte[] bytes1 = "Apache".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "IoTDB".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes3 = "rocks".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = {new Binary(bytes1), new Binary(bytes2), new Binary(bytes3)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ child,
+ CodecStrategiesFactory.SHA256,
+ "sha256",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+
+ // Select only the second and third rows for processing.
+ boolean[] selection = {false, true, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ byte[] expected2 = Hashing.sha256().hashBytes(bytes2).asBytes();
+ byte[] expected3 = Hashing.sha256().hashBytes(bytes3).asBytes();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ // The first row was not selected, so it should be null in the result.
+ Assert.assertTrue(result.isNull(0));
+ Assert.assertFalse(result.isNull(1));
+ Assert.assertArrayEquals(expected2, result.getBinary(1).getValues());
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /**
+ * Test SHA256 with an empty binary input. The result is compared against the well-known, standard
+ * hash for an empty string. This verifies the correctness of the hash implementation itself.
+ */
+ @Test
+ public void testSha256EmptyInput() {
+ byte[] inputBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.SHA256,
+ "sha256",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ // The known SHA-256 hash of an empty string (or zero-length byte array).
+ String knownEmptyHashHex = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
+ byte[] expectedHash = hexStringToByteArray(knownEmptyHashHex);
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Sha512ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Sha512ColumnTransformerTest.java
new file mode 100644
index 000000000000..9b65799b908c
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/Sha512ColumnTransformerTest.java
@@ -0,0 +1,224 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import com.google.common.hash.Hashing;
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class Sha512ColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ /**
+ * Helper method to convert a hex string to a byte array. This is crucial for tests involving
+ * binary data for two reasons: 1. It allows defining arbitrary binary inputs (like BLOBs) in a
+ * human-readable format. 2. It allows defining expected binary outputs (like standard hash
+ * values) from their readable hex representations for assertions.
+ */
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test SHA512 from a STRING input. Output should be BLOB. */
+ @Test
+ public void testSha512FromString() {
+ String input = "Apache IoTDB";
+ byte[] inputBytes = input.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.SHA512,
+ "sha512",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedHash = Hashing.sha512().hashBytes(inputBytes).asBytes();
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+
+ /** Test SHA512 from a BLOB input (represented by hex). Output should be BLOB. */
+ @Test
+ public void testSha512FromBlob() {
+ String inputHex = "41706163686520496f544442";
+ byte[] inputBytes = hexStringToByteArray(inputHex);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.SHA512,
+ "sha512",
+ BlobType.BLOB // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedHash = Hashing.sha512().hashBytes(inputBytes).asBytes();
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+
+ /** Test SHA512 with multiple rows, including a null value. */
+ @Test
+ public void testSha512MultiRowsWithNull() {
+ byte[] bytes1 = "hello".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "world".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = new Binary[] {new Binary(bytes1), null, new Binary(bytes2)};
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.SHA512,
+ "sha512",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = Hashing.sha512().hashBytes(bytes1).asBytes();
+ byte[] expected3 = Hashing.sha512().hashBytes(bytes2).asBytes();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /** Test SHA512 with a selection array to process only a subset of rows. */
+ @Test
+ public void testSha512WithSelection() {
+ byte[] bytes1 = "Apache".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "IoTDB".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes3 = "rocks".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = {new Binary(bytes1), new Binary(bytes2), new Binary(bytes3)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ child,
+ CodecStrategiesFactory.SHA512,
+ "sha512",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ byte[] expected1 = Hashing.sha512().hashBytes(bytes1).asBytes();
+ byte[] expected3 = Hashing.sha512().hashBytes(bytes3).asBytes();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /**
+ * Test SHA512 with an empty binary input. The result is compared against the well-known, standard
+ * hash for an empty string.
+ */
+ @Test
+ public void testSha512EmptyInput() {
+ byte[] inputBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.SHA512,
+ "sha512",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ // The known SHA-512 hash of an empty string.
+ String knownEmptyHashHex =
+ "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e";
+ byte[] expectedHash = hexStringToByteArray(knownEmptyHashHex);
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/SpookyHashV2_32ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/SpookyHashV2_32ColumnTransformerTest.java
new file mode 100644
index 000000000000..eabf81d1ddba
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/SpookyHashV2_32ColumnTransformerTest.java
@@ -0,0 +1,183 @@
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BinaryType.TEXT;
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+
+public class SpookyHashV2_32ColumnTransformerTest {
+
+ private static final String FUNCTION_NAME = "spooky_hash_v2_32";
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert an integer to a big-endian 4-byte array.
+ private byte[] intToBytes(int value) {
+ return ByteBuffer.allocate(4).order(ByteOrder.BIG_ENDIAN).putInt(value).array();
+ }
+
+ /** Test spooky_hash_v2_32 on a TEXT/STRING column. */
+ @Test
+ public void testSpookyHashOnText() {
+ // Expected hash for 'hello' is 0xd382e6ca
+ int expectedHash = 0xd382e6ca;
+ Binary[] values = new Binary[] {new Binary("hello".getBytes())};
+ Column textColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(textColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.spooky_hash_v2_32, FUNCTION_NAME, TEXT);
+
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(intToBytes(expectedHash), result.getBinary(0).getValues());
+ }
+
+ /** Test spooky_hash_v2_32 on a BLOB column. */
+ @Test
+ public void testSpookyHashOnBlob() {
+ // Expected hash for blob x'74657374' ('test') is 0xec0d8b75
+ int expectedHash = 0xec0d8b75;
+ byte[] inputBytes = new byte[] {(byte) 0x74, (byte) 0x65, (byte) 0x73, (byte) 0x74};
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.spooky_hash_v2_32, FUNCTION_NAME, BLOB);
+
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(intToBytes(expectedHash), result.getBinary(0).getValues());
+ }
+
+ /** Test spooky_hash_v2_32 on a column with null values. */
+ @Test
+ public void testSpookyHashWithNull() {
+ // Expected hash for 'world' is 0x4a0db65a
+ int expectedHash = 0xaf3fbe25;
+ Binary[] values = new Binary[] {null, new Binary("world".getBytes())};
+ boolean[] isNull = {true, false};
+ Column textColumn = new BinaryColumn(values.length, Optional.of(isNull), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(textColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.spooky_hash_v2_32, FUNCTION_NAME, TEXT);
+
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(2, result.getPositionCount());
+ Assert.assertTrue(result.isNull(0));
+ Assert.assertFalse(result.isNull(1));
+ Assert.assertArrayEquals(intToBytes(expectedHash), result.getBinary(1).getValues());
+ }
+
+ /** Test spooky_hash_v2_32 with a selection array. */
+ @Test
+ public void testSpookyHashWithSelection() {
+ // Hash for 'A' is 0x3d43503b
+ // Hash for 'C' is 0xc1636130
+ int hashA = 0xbec890ba;
+ int hashC = 0x9bec4de2;
+ Binary[] values = {
+ new Binary("A".getBytes()), new Binary("B".getBytes()), new Binary("C".getBytes())
+ };
+ Column textColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(textColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.spooky_hash_v2_32, FUNCTION_NAME, TEXT);
+
+ transformer.addReferenceCount();
+
+ // Select only the first and third elements
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ // First element is processed
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(intToBytes(hashA), result.getBinary(0).getValues());
+ // Second element is skipped (should be null)
+ Assert.assertTrue(result.isNull(1));
+ // Third element is processed
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(intToBytes(hashC), result.getBinary(2).getValues());
+ }
+
+ /** Test spooky_hash_v2_32 on an empty string. */
+ @Test
+ public void testSpookyHashEmptyString() {
+ // Expected hash for '' (empty string) with default seed is 0x6bf50919
+ int expectedHash = 0x6bf50919;
+ Binary[] values = new Binary[] {new Binary("".getBytes())};
+ Column textColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(textColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.spooky_hash_v2_32, FUNCTION_NAME, TEXT);
+
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(intToBytes(expectedHash), result.getBinary(0).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/SpookyHashV2_64ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/SpookyHashV2_64ColumnTransformerTest.java
new file mode 100644
index 000000000000..aa374a9031de
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/SpookyHashV2_64ColumnTransformerTest.java
@@ -0,0 +1,183 @@
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.BinaryType.TEXT;
+import static org.apache.tsfile.read.common.type.BlobType.BLOB;
+
+public class SpookyHashV2_64ColumnTransformerTest {
+
+ private static final String FUNCTION_NAME = "spooky_hash_v2_64";
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ // Helper method to convert a long to a big-endian 8-byte array.
+ private byte[] longToBytes(long value) {
+ return ByteBuffer.allocate(8).order(ByteOrder.BIG_ENDIAN).putLong(value).array();
+ }
+
+ /** Test spooky_hash_v2_64 on a TEXT/STRING column. */
+ @Test
+ public void testSpookyHashOnText() {
+ // Expected hash for 'hello' is 0x3768826ad382e6caL
+ long expectedHash = 0x3768826ad382e6caL;
+ Binary[] values = new Binary[] {new Binary("hello".getBytes())};
+ Column textColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(textColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.spooky_hash_v2_64, FUNCTION_NAME, TEXT);
+
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(longToBytes(expectedHash), result.getBinary(0).getValues());
+ }
+
+ /** Test spooky_hash_v2_64 on a BLOB column. */
+ @Test
+ public void testSpookyHashOnBlob() {
+
+ long expectedHash = 0x7b01e8bcec0d8b75L;
+ byte[] inputBytes = new byte[] {(byte) 0x74, (byte) 0x65, (byte) 0x73, (byte) 0x74};
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column blobColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(blobColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.spooky_hash_v2_64, FUNCTION_NAME, BLOB);
+
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(longToBytes(expectedHash), result.getBinary(0).getValues());
+ }
+
+ /** Test spooky_hash_v2_64 on a column with null values. */
+ @Test
+ public void testSpookyHashWithNull() {
+ // Expected hash for 'world' is 0x1cdfd95caf3fbe25L
+ long expectedHash = 0x1cdfd95caf3fbe25L;
+ Binary[] values = new Binary[] {null, new Binary("world".getBytes())};
+ boolean[] isNull = {true, false};
+ Column textColumn = new BinaryColumn(values.length, Optional.of(isNull), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(textColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.spooky_hash_v2_64, FUNCTION_NAME, TEXT);
+
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(2, result.getPositionCount());
+ Assert.assertTrue(result.isNull(0));
+ Assert.assertFalse(result.isNull(1));
+ Assert.assertArrayEquals(longToBytes(expectedHash), result.getBinary(1).getValues());
+ }
+
+ /** Test spooky_hash_v2_64 with a selection array. */
+ @Test
+ public void testSpookyHashWithSelection() {
+ // Hash for 'A' is 0x317e0d51bec890ba
+ // Hash for 'C' is 0x2c358e019bec4de2
+ long hashA = 0x317e0d51bec890baL;
+ long hashC = 0x2c358e019bec4de2L;
+ Binary[] values = {
+ new Binary("A".getBytes()), new Binary("B".getBytes()), new Binary("C".getBytes())
+ };
+ Column textColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(textColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.spooky_hash_v2_64, FUNCTION_NAME, TEXT);
+
+ transformer.addReferenceCount();
+
+ // Select only the first and third elements
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ // First element is processed
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(longToBytes(hashA), result.getBinary(0).getValues());
+ // Second element is skipped (should be null)
+ Assert.assertTrue(result.isNull(1));
+ // Third element is processed
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(longToBytes(hashC), result.getBinary(2).getValues());
+ }
+
+ /** Test spooky_hash_v2_64 on an empty string. */
+ @Test
+ public void testSpookyHashEmptyString() {
+ // Expected hash for '' (empty string) with default seed (0) is 0x232706fc6bf50919
+ long expectedHash = 0x232706fc6bf50919L;
+ Binary[] values = new Binary[] {new Binary("".getBytes())};
+ Column textColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(textColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BLOB, child, CodecStrategiesFactory.spooky_hash_v2_64, FUNCTION_NAME, TEXT);
+
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(longToBytes(expectedHash), result.getBinary(0).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/XxHash64ColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/XxHash64ColumnTransformerTest.java
new file mode 100644
index 000000000000..ab00d0105aa3
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hashing/XxHash64ColumnTransformerTest.java
@@ -0,0 +1,238 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+// It's recommended to place this file in a new package:
+// package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hashing;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import net.jpountz.xxhash.XXHash64;
+import net.jpountz.xxhash.XXHashFactory;
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.nio.ByteBuffer;
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class XxHash64ColumnTransformerTest {
+
+ private final XXHash64 xxHash64 = XXHashFactory.fastestInstance().hash64();
+ private static final long SEED = 0L;
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ /**
+ * Helper method to convert a hex string to a byte array. This is crucial for tests involving
+ * binary data for two reasons: 1. It allows defining arbitrary binary inputs (like BLOBs) in a
+ * human-readable format. 2. It allows defining expected binary outputs (like standard hash
+ * values) from their readable hex representations for assertions.
+ */
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test XXHASH64 from a STRING input. Output should be BLOB. */
+ @Test
+ public void testXxHash64FromString() {
+ String input = "Apache IoTDB";
+ byte[] inputBytes = input.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.XXHASH64,
+ "xxhash64",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ long hash = xxHash64.hash(inputBytes, 0, inputBytes.length, SEED);
+ byte[] expectedHash = ByteBuffer.allocate(8).putLong(hash).array();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+
+ /** Test XXHASH64 from a BLOB input (represented by hex). Output should be BLOB. */
+ @Test
+ public void testXxHash64FromBlob() {
+ String inputHex = "41706163686520496f544442";
+ byte[] inputBytes = hexStringToByteArray(inputHex);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.XXHASH64,
+ "xxhash64",
+ BlobType.BLOB // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ long hash = xxHash64.hash(inputBytes, 0, inputBytes.length, SEED);
+ byte[] expectedHash = ByteBuffer.allocate(8).putLong(hash).array();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+
+ /** Test XXHASH64 with multiple rows, including a null value. */
+ @Test
+ public void testXxHash64MultiRowsWithNull() {
+ byte[] bytes1 = "hello".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "world".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = new Binary[] {new Binary(bytes1), null, new Binary(bytes2)};
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.XXHASH64,
+ "xxhash64",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ long hash1 = xxHash64.hash(bytes1, 0, bytes1.length, SEED);
+ byte[] expected1 = ByteBuffer.allocate(8).putLong(hash1).array();
+ long hash3 = xxHash64.hash(bytes2, 0, bytes2.length, SEED);
+ byte[] expected3 = ByteBuffer.allocate(8).putLong(hash3).array();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /** Test XXHASH64 with a selection array to process only a subset of rows. */
+ @Test
+ public void testXxHash64WithSelection() {
+ byte[] bytes1 = "Apache".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "IoTDB".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes3 = "rocks".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = {new Binary(bytes1), new Binary(bytes2), new Binary(bytes3)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ child,
+ CodecStrategiesFactory.XXHASH64,
+ "xxhash64",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ long hash1 = xxHash64.hash(bytes1, 0, bytes1.length, SEED);
+ byte[] expected1 = ByteBuffer.allocate(8).putLong(hash1).array();
+ long hash3 = xxHash64.hash(bytes3, 0, bytes3.length, SEED);
+ byte[] expected3 = ByteBuffer.allocate(8).putLong(hash3).array();
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expected1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expected3, result.getBinary(2).getValues());
+ }
+
+ /**
+ * Test XXHASH64 with an empty binary input. The result is compared against the well-known,
+ * standard hash for an empty string.
+ */
+ @Test
+ public void testXxHash64EmptyInput() {
+ byte[] inputBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType
+ childColumnTransformer,
+ CodecStrategiesFactory.XXHASH64,
+ "xxhash64",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ // The known xxHash64 of an empty string with seed 0.
+ long knownEmptyHashLong = 0xEF46DB3751D8E999L;
+ byte[] expectedHash = ByteBuffer.allocate(8).putLong(knownEmptyHashLong).array();
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedHash, result.getBinary(0).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hexEncoding/FromHexColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hexEncoding/FromHexColumnTransformerTest.java
new file mode 100644
index 000000000000..ac885515ecab
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hexEncoding/FromHexColumnTransformerTest.java
@@ -0,0 +1,239 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+// It's recommended to place this file in a new package:
+// package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.string;
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hexEncoding;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class FromHexColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ /** Test FROM_HEX from a valid hex STRING input. Output should be BLOB. */
+ @Test
+ public void testFromHexFromString() {
+ String inputText = "Apache IoTDB";
+ String inputHexString = "41706163686520496f544442"; // "Apache IoTDB" in hex
+
+ byte[] inputHexBytes = inputHexString.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputHexBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType is BLOB
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_HEX,
+ "from_hex",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedBytes = inputText.getBytes(TSFileConfig.STRING_CHARSET);
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedBytes, result.getBinary(0).getValues());
+ }
+
+ /**
+ * Test that FROM_HEX throws an exception for an input string with an odd number of characters.
+ */
+ @Test
+ public void testFromHexWithOddLengthInput() {
+ String invalidInput = "123";
+ Binary[] values = {new Binary(invalidInput.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, child, CodecStrategiesFactory.FROM_HEX, "from_hex", STRING);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected an exception to be thrown for odd-length hex string");
+ } catch (RuntimeException e) {
+ String expectedMessage =
+ String.format(
+ "Failed to execute function 'from_hex' due to an invalid input format. Problematic value: %s",
+ invalidInput);
+ Assert.assertEquals(expectedMessage, e.getMessage());
+ }
+ }
+
+ /** Test that FROM_HEX throws an exception for an input string with non-hex characters. */
+ @Test
+ public void testFromHexWithNonHexCharsInput() {
+ String invalidInput = "gg";
+ Binary[] values = {new Binary(invalidInput.getBytes(TSFileConfig.STRING_CHARSET))};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, child, CodecStrategiesFactory.FROM_HEX, "from_hex", STRING);
+ transformer.addReferenceCount();
+
+ try {
+ transformer.evaluate();
+ Assert.fail("Expected an exception to be thrown for non-hex characters in input");
+ } catch (RuntimeException e) {
+ String expectedMessage =
+ String.format(
+ "Failed to execute function 'from_hex' due to an invalid input format. Problematic value: %s",
+ invalidInput);
+ Assert.assertEquals(expectedMessage, e.getMessage());
+ }
+ }
+
+ /** Test FROM_HEX with multiple rows, including a null value. */
+ @Test
+ public void testFromHexMultiRowsWithNull() {
+ String hex1 = "68656c6c6f"; // "hello"
+ String hex2 = "776f726c64"; // "world"
+
+ Binary[] values = {
+ new Binary(hex1.getBytes(TSFileConfig.STRING_CHARSET)),
+ null,
+ new Binary(hex2.getBytes(TSFileConfig.STRING_CHARSET))
+ };
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType is BLOB
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_HEX,
+ "from_hex",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ byte[] expectedBytes1 = "hello".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] expectedBytes3 = "world".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expectedBytes1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expectedBytes3, result.getBinary(2).getValues());
+ }
+
+ /** Test FROM_HEX with a selection array to process only a subset of rows. */
+ @Test
+ public void testFromHexWithSelection() {
+ String hex1 = "417061636865"; // "Apache"
+ String hex2 = "496f544442"; // "IoTDB"
+ String hex3 = "726f636b73"; // "rocks"
+
+ Binary[] values = {
+ new Binary(hex1.getBytes(TSFileConfig.STRING_CHARSET)),
+ new Binary(hex2.getBytes(TSFileConfig.STRING_CHARSET)),
+ new Binary(hex3.getBytes(TSFileConfig.STRING_CHARSET))
+ };
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType is BLOB
+ child,
+ CodecStrategiesFactory.FROM_HEX,
+ "from_hex",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ byte[] expectedBytes1 = "Apache".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] expectedBytes3 = "rocks".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertArrayEquals(expectedBytes1, result.getBinary(0).getValues());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertArrayEquals(expectedBytes3, result.getBinary(2).getValues());
+ }
+
+ /** Test FROM_HEX with an empty string input. The result should be an empty BLOB. */
+ @Test
+ public void testFromHexEmptyInput() {
+ String inputHexString = "";
+ byte[] inputHexBytes = inputHexString.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputHexBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ BlobType.BLOB, // returnType is BLOB
+ childColumnTransformer,
+ CodecStrategiesFactory.FROM_HEX,
+ "from_hex",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ // The decoded version of an empty string is an empty byte array.
+ byte[] expectedBytes = new byte[0];
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedBytes, result.getBinary(0).getValues());
+ }
+}
diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hexEncoding/ToHexColumnTransformerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hexEncoding/ToHexColumnTransformerTest.java
new file mode 100644
index 000000000000..b95ffe32b086
--- /dev/null
+++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/transformation/dag/column/unary/scalar/hexEncoding/ToHexColumnTransformerTest.java
@@ -0,0 +1,222 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.hexEncoding;
+
+import org.apache.iotdb.db.queryengine.transformation.dag.column.ColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.GenericCodecColumnTransformer;
+import org.apache.iotdb.db.queryengine.transformation.dag.column.unary.scalar.factory.CodecStrategiesFactory;
+
+import com.google.common.io.BaseEncoding;
+import org.apache.tsfile.block.column.Column;
+import org.apache.tsfile.common.conf.TSFileConfig;
+import org.apache.tsfile.read.common.block.column.BinaryColumn;
+import org.apache.tsfile.read.common.type.BlobType;
+import org.apache.tsfile.utils.Binary;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.util.Optional;
+
+import static org.apache.tsfile.read.common.type.StringType.STRING;
+
+public class ToHexColumnTransformerTest {
+
+ // Helper method to mock a child ColumnTransformer that returns a predefined Column.
+ private ColumnTransformer mockChildColumnTransformer(Column column) {
+ ColumnTransformer mockColumnTransformer = Mockito.mock(ColumnTransformer.class);
+ Mockito.when(mockColumnTransformer.getColumn()).thenReturn(column);
+ Mockito.doNothing().when(mockColumnTransformer).tryEvaluate();
+ Mockito.doNothing().when(mockColumnTransformer).clearCache();
+ Mockito.doNothing().when(mockColumnTransformer).evaluateWithSelection(Mockito.any());
+ return mockColumnTransformer;
+ }
+
+ /**
+ * Helper method to convert a hex string to a byte array. This is useful for creating BLOB inputs
+ * for testing.
+ */
+ private static byte[] hexStringToByteArray(String s) {
+ int len = s.length();
+ if (len % 2 != 0) {
+ throw new IllegalArgumentException("Hex string must have an even number of characters");
+ }
+ byte[] data = new byte[len / 2];
+ for (int i = 0; i < len; i += 2) {
+ data[i / 2] =
+ (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16));
+ }
+ return data;
+ }
+
+ /** Test TO_HEX from a STRING input. Output should be STRING. */
+ @Test
+ public void testToHexFromString() {
+ String input = "Apache IoTDB";
+ byte[] inputBytes = input.getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, // returnType is STRING
+ childColumnTransformer,
+ CodecStrategiesFactory.TO_HEX,
+ "to_hex",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expectedHexString = BaseEncoding.base16().lowerCase().encode(inputBytes);
+ byte[] expectedBytes = expectedHexString.getBytes(TSFileConfig.STRING_CHARSET);
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedBytes, result.getBinary(0).getValues());
+ }
+
+ /** Test TO_HEX from a BLOB input. Output should be STRING. */
+ @Test
+ public void testToHexFromBlob() {
+ String inputHex = "010203deadbeef";
+ byte[] inputBytes = hexStringToByteArray(inputHex);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, // returnType is STRING
+ childColumnTransformer,
+ CodecStrategiesFactory.TO_HEX,
+ "to_hex",
+ BlobType.BLOB // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ // The expected output is the hex string itself.
+ byte[] expectedBytes = inputHex.getBytes(TSFileConfig.STRING_CHARSET);
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedBytes, result.getBinary(0).getValues());
+ }
+
+ /** Test TO_HEX with multiple rows, including a null value. */
+ @Test
+ public void testToHexMultiRowsWithNull() {
+ byte[] bytes1 = "hello".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "world".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = new Binary[] {new Binary(bytes1), null, new Binary(bytes2)};
+ boolean[] valueIsNull = new boolean[] {false, true, false};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.of(valueIsNull), values);
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, // returnType is STRING
+ childColumnTransformer,
+ CodecStrategiesFactory.TO_HEX,
+ "to_hex",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ String expectedHex1 = BaseEncoding.base16().lowerCase().encode(bytes1);
+ String expectedHex3 = BaseEncoding.base16().lowerCase().encode(bytes2);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(expectedHex1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(expectedHex3, result.getBinary(2).toString());
+ }
+
+ /** Test TO_HEX with a selection array to process only a subset of rows. */
+ @Test
+ public void testToHexWithSelection() {
+ byte[] bytes1 = "Apache".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes2 = "IoTDB".getBytes(TSFileConfig.STRING_CHARSET);
+ byte[] bytes3 = "rocks".getBytes(TSFileConfig.STRING_CHARSET);
+
+ Binary[] values = {new Binary(bytes1), new Binary(bytes2), new Binary(bytes3)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+ ColumnTransformer child = mockChildColumnTransformer(binaryColumn);
+
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, // returnType is STRING
+ child,
+ CodecStrategiesFactory.TO_HEX,
+ "to_hex",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+
+ boolean[] selection = {true, false, true};
+ transformer.evaluateWithSelection(selection);
+ Column result = transformer.getColumn();
+
+ String expectedHex1 = BaseEncoding.base16().lowerCase().encode(bytes1);
+ String expectedHex3 = BaseEncoding.base16().lowerCase().encode(bytes3);
+
+ Assert.assertEquals(3, result.getPositionCount());
+ Assert.assertFalse(result.isNull(0));
+ Assert.assertEquals(expectedHex1, result.getBinary(0).toString());
+ Assert.assertTrue(result.isNull(1));
+ Assert.assertFalse(result.isNull(2));
+ Assert.assertEquals(expectedHex3, result.getBinary(2).toString());
+ }
+
+ /** Test TO_HEX with an empty input. The result should be an empty string. */
+ @Test
+ public void testToHexEmptyInput() {
+ byte[] inputBytes = "".getBytes(TSFileConfig.STRING_CHARSET);
+ Binary[] values = new Binary[] {new Binary(inputBytes)};
+ Column binaryColumn = new BinaryColumn(values.length, Optional.empty(), values);
+
+ ColumnTransformer childColumnTransformer = mockChildColumnTransformer(binaryColumn);
+ GenericCodecColumnTransformer transformer =
+ new GenericCodecColumnTransformer(
+ STRING, // returnType is STRING
+ childColumnTransformer,
+ CodecStrategiesFactory.TO_HEX,
+ "to_hex",
+ STRING // inputType
+ );
+ transformer.addReferenceCount();
+ transformer.evaluate();
+ Column result = transformer.getColumn();
+
+ // The hex representation of an empty byte array is an empty string.
+ String expectedHexString = "";
+ byte[] expectedBytes = expectedHexString.getBytes(TSFileConfig.STRING_CHARSET);
+
+ Assert.assertEquals(1, result.getPositionCount());
+ Assert.assertArrayEquals(expectedBytes, result.getBinary(0).getValues());
+ }
+}
diff --git a/iotdb-core/node-commons/src/main/java/org/apache/iotdb/commons/udf/builtin/relational/TableBuiltinScalarFunction.java b/iotdb-core/node-commons/src/main/java/org/apache/iotdb/commons/udf/builtin/relational/TableBuiltinScalarFunction.java
index dfad12adafcf..e20ca4dd0737 100644
--- a/iotdb-core/node-commons/src/main/java/org/apache/iotdb/commons/udf/builtin/relational/TableBuiltinScalarFunction.java
+++ b/iotdb-core/node-commons/src/main/java/org/apache/iotdb/commons/udf/builtin/relational/TableBuiltinScalarFunction.java
@@ -75,6 +75,42 @@ public enum TableBuiltinScalarFunction {
BITWISE_LEFT_SHIFT("bitwise_left_shift"),
BITWISE_RIGHT_SHIFT("bitwise_right_shift"),
BITWISE_RIGHT_SHIFT_ARITHMETIC("bitwise_right_shift_arithmetic"),
+ TO_BASE64("to_base64"),
+ FROM_BASE64("from_base64"),
+ TO_BASE64URL("to_base64url"),
+ FROM_BASE64URL("from_base64url"),
+ TO_BASE32("to_base32"),
+ FROM_BASE32("from_base32"),
+ SHA256("sha256"),
+ SHA512("sha512"),
+ SHA1("sha1"),
+ MD5("md5"),
+ XXHASH64("xxhash64"),
+ MURMUR3("murmur3"),
+ TO_HEX("to_hex"),
+ FROM_HEX("from_hex"),
+ REVERSE("reverse"),
+ HMAC_MD5("hmac_md5"),
+ HMAC_SHA1("hmac_sha1"),
+ HMAC_SHA256("hmac_sha256"),
+ HMAC_SHA512("hmac_sha512"),
+ TO_BIG_ENDIAN_32("to_big_endian_32"),
+ FROM_BIG_ENDIAN_32("from_big_endian_32"),
+ TO_BIG_ENDIAN_64("to_big_endian_64"),
+ FROM_BIG_ENDIAN_64("from_big_endian_64"),
+ TO_LITTLE_ENDIAN_32("to_little_endian_32"),
+ FROM_LITTLE_ENDIAN_32("from_little_endian_32"),
+ TO_LITTLE_ENDIAN_64("to_little_endian_64"),
+ FROM_LITTLE_ENDIAN_64("from_little_endian_64"),
+ TO_IEEE754_32("to_ieee754_32"),
+ FROM_IEEE754_32("from_ieee754_32"),
+ TO_IEEE754_64("to_ieee754_64"),
+ FROM_IEEE754_64("from_ieee754_64"),
+ CRC32("crc32"),
+ SPOOKY_HASH_V2_32("spooky_hash_v2_32"),
+ SPOOKY_HASH_V2_64("spooky_hash_v2_64"),
+ LPAD("lpad"),
+ RPAD("rpad"),
;
private final String functionName;