From f13286f9d0baab33157683839aa87d9f854a019e Mon Sep 17 00:00:00 2001 From: An Qi Date: Wed, 15 Jan 2025 13:18:55 +0800 Subject: [PATCH 1/4] fix(filesystem): RestIT.testDelete --- .../parquet/db/lsm/buffer/chunk/IndexedChunk.java | 5 +++-- .../parquet/db/lsm/buffer/chunk/NoIndexChunk.java | 10 ++++++++-- .../struct/legacy/parquet/util/arrow/ArrowVectors.java | 2 +- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/dataSource/filesystem/src/main/java/cn/edu/tsinghua/iginx/filesystem/struct/legacy/parquet/db/lsm/buffer/chunk/IndexedChunk.java b/dataSource/filesystem/src/main/java/cn/edu/tsinghua/iginx/filesystem/struct/legacy/parquet/db/lsm/buffer/chunk/IndexedChunk.java index 1d05577976..fbdca55a8d 100644 --- a/dataSource/filesystem/src/main/java/cn/edu/tsinghua/iginx/filesystem/struct/legacy/parquet/db/lsm/buffer/chunk/IndexedChunk.java +++ b/dataSource/filesystem/src/main/java/cn/edu/tsinghua/iginx/filesystem/struct/legacy/parquet/db/lsm/buffer/chunk/IndexedChunk.java @@ -45,10 +45,10 @@ protected IndexedChunk(@WillCloseWhenClosed Chunk chunk, BufferAllocator allocat @Override public synchronized Snapshot snapshot(BufferAllocator allocator) { Snapshot snapshot = super.snapshot(allocator); - if (ArrowVectors.isSorted(snapshot.keys)) { + IntVector indexes = indexOf(snapshot, allocator); + if (indexes == null) { return snapshot; } - IntVector indexes = indexOf(snapshot, allocator); return new IndexedSnapshot(snapshot, indexes); } @@ -69,6 +69,7 @@ public synchronized void close() { super.close(); } + @Nullable protected abstract IntVector indexOf(Snapshot snapshot, BufferAllocator allocator); protected abstract void updateIndex(Snapshot data, int offset); diff --git a/dataSource/filesystem/src/main/java/cn/edu/tsinghua/iginx/filesystem/struct/legacy/parquet/db/lsm/buffer/chunk/NoIndexChunk.java b/dataSource/filesystem/src/main/java/cn/edu/tsinghua/iginx/filesystem/struct/legacy/parquet/db/lsm/buffer/chunk/NoIndexChunk.java index 52e0beccc1..d27f23137c 100644 --- a/dataSource/filesystem/src/main/java/cn/edu/tsinghua/iginx/filesystem/struct/legacy/parquet/db/lsm/buffer/chunk/NoIndexChunk.java +++ b/dataSource/filesystem/src/main/java/cn/edu/tsinghua/iginx/filesystem/struct/legacy/parquet/db/lsm/buffer/chunk/NoIndexChunk.java @@ -43,9 +43,15 @@ public NoIndexChunk(@WillCloseWhenClosed Chunk chunk, BufferAllocator allocator) @Override protected IntVector indexOf(Snapshot snapshot, BufferAllocator allocator) { + if (tombstone.get(0).isEmpty()) { + if (ArrowVectors.isStrictlyOrdered(snapshot.keys)) { + return null; + } + } + IntVector indexes = ArrowVectors.stableSortIndexes(snapshot.keys, allocator); ArrowVectors.dedupSortedIndexes(snapshot.keys, indexes); - if (!tombstone.isEmpty()) { + if (!tombstone.get(0).isEmpty()) { ArrowVectors.filter(indexes, i -> !isDeleted(snapshot, i)); } return indexes; @@ -69,6 +75,6 @@ protected void updateIndex(Snapshot data, int offset) { @Override protected void deleteIndex(RangeSet rangeSet) { tombstone.computeIfAbsent(valueCount, k -> TreeRangeSet.create()).addAll(rangeSet); - tombstone.values().forEach(r -> r.removeAll(rangeSet)); + tombstone.values().forEach(r -> r.addAll(rangeSet)); } } diff --git a/dataSource/filesystem/src/main/java/cn/edu/tsinghua/iginx/filesystem/struct/legacy/parquet/util/arrow/ArrowVectors.java b/dataSource/filesystem/src/main/java/cn/edu/tsinghua/iginx/filesystem/struct/legacy/parquet/util/arrow/ArrowVectors.java index f36615f25a..7015d0af1b 100644 --- a/dataSource/filesystem/src/main/java/cn/edu/tsinghua/iginx/filesystem/struct/legacy/parquet/util/arrow/ArrowVectors.java +++ b/dataSource/filesystem/src/main/java/cn/edu/tsinghua/iginx/filesystem/struct/legacy/parquet/util/arrow/ArrowVectors.java @@ -116,7 +116,7 @@ public static V like(V vector, BufferAllocator allocator return (V) vector.getTransferPair(allocator).getTo(); } - public static boolean isSorted(ValueVector vector) { + public static boolean isStrictlyOrdered(ValueVector vector) { VectorValueComparator comparator = DefaultVectorComparators.createDefaultComparator(vector); comparator.attachVector(vector); From 662823a82d0c6df036e4632d3709685ebff95cd0 Mon Sep 17 00:00:00 2001 From: An Qi Date: Wed, 15 Jan 2025 13:20:44 +0800 Subject: [PATCH 2/4] chore(conf): change default value of `data.config.write.buffer.timeout` --- conf/config.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conf/config.properties b/conf/config.properties index 61618098bc..66a1cdc4c9 100644 --- a/conf/config.properties +++ b/conf/config.properties @@ -40,7 +40,7 @@ password=root # 数据库列表,使用','分隔不同实例 #storageEngineList=127.0.0.1#6667#iotdb12#username=root#password=root#sessionPoolSize=20#has_data=false#is_read_only=false #storageEngineList=127.0.0.1#8086#influxdb#url=http://localhost:8086/#token=your-token#organization=your-organization#has_data=false -storageEngineList=127.0.0.1#6668#filesystem#iginx_port=6888#has_data=false#is_read_only=false#dir=data#data.config.write.buffer.size=104857600#data.config.write.buffer.timeout=0#dummy_dir=dummy#dummy.struct=LegacyFilesystem#dummy.config.chunk_size_in_bytes=1048576#dummy.config.memory_pool_size=100#client.connectPool.maxTotal=100 +storageEngineList=127.0.0.1#6668#filesystem#iginx_port=6888#has_data=false#is_read_only=false#dir=data#data.config.write.buffer.size=104857600#data.config.write.buffer.timeout=1s#dummy_dir=dummy#dummy.struct=LegacyFilesystem#dummy.config.chunk_size_in_bytes=1048576#dummy.config.memory_pool_size=100#client.connectPool.maxTotal=100 #storageEngineList=127.0.0.1#5432#relational#engine=postgresql#username=postgres#password=postgres#has_data=false #storageEngineList=127.0.0.1#3306#relational#engine=mysql#username=root#has_data=false #storageEngineList=127.0.0.1#27017#mongodb#uri="mongodb://127.0.0.1:27017/?maxPoolSize=200&maxIdleTimeMS=60000&waitQueueTimeoutMS=50000"#has_data=false#schema.sample.size=1000#dummy.sample.size=0 From 911b29608bb550ba299b1fba40f8c863ad3d5d45 Mon Sep 17 00:00:00 2001 From: ZM <12236590+shinyano@users.noreply.github.com> Date: Mon, 20 Jan 2025 11:51:43 +0800 Subject: [PATCH 3/4] feat(ci): test free-threading python 3.13.0 (#494) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add tests for python3.13t on: win & mac & linux iotdb & filesystem Note: The testAddStorageEngine test in PySessionIT is skipped because it relies on fastparquet module which does not support python 3.13 yet 顺便解决IoTDB启动时使用了非JDK8导致出错的问题 剩余一个3.13t测试在macOS、FS情况下出错的问题,在PR#555中解决 --- .github/actions/confWriter/action.yml | 32 ++++- .github/actions/dbRunner/action.yml | 2 + .github/actions/dependence/action.yml | 73 ++++++++++- .github/actions/iginxRunner/action.yml | 5 +- .../service/freeThreadPython/action.yml | 115 ++++++++++++++++++ .github/actions/tpchSingleTest/action.yml | 10 +- .../scripts/dataSources/restart/influxdb.sh | 9 +- .../dataSources/restart/influxdb_macos.sh | 9 +- .../dataSources/restart/influxdb_windows.sh | 9 +- .github/scripts/dataSources/restart/iotdb.sh | 4 +- .../dataSources/restart/iotdb_macos.sh | 6 +- .../dataSources/restart/iotdb_windows.sh | 2 +- .../scripts/dataSources/restart/mongodb.sh | 2 +- .../dataSources/restart/mongodb_macos.sh | 2 +- .../dataSources/restart/mongodb_windows.sh | 2 +- .github/scripts/dataSources/restart/mysql.sh | 2 +- .../dataSources/restart/mysql_macos.sh | 2 +- .../dataSources/restart/mysql_windows.sh | 2 +- .../scripts/dataSources/restart/postgresql.sh | 2 +- .../dataSources/restart/postgresql_macos.sh | 2 +- .../dataSources/restart/postgresql_windows.sh | 2 +- .github/scripts/dataSources/restart/redis.sh | 2 +- .../dataSources/restart/redis_macos.sh | 2 +- .../dataSources/restart/redis_windows.sh | 2 +- .../scripts/dataSources/shutdown/influxdb.sh | 2 +- .../dataSources/shutdown/influxdb_macos.sh | 2 +- .../dataSources/shutdown/influxdb_windows.sh | 2 +- .github/scripts/dataSources/shutdown/iotdb.sh | 2 +- .../dataSources/shutdown/iotdb_macos.sh | 2 +- .../dataSources/shutdown/iotdb_windows.sh | 2 +- .../scripts/dataSources/shutdown/mongodb.sh | 2 +- .../dataSources/shutdown/mongodb_macos.sh | 2 +- .../dataSources/shutdown/mongodb_windows.sh | 2 +- .github/scripts/dataSources/shutdown/mysql.sh | 2 +- .../dataSources/shutdown/mysql_macos.sh | 2 +- .../dataSources/shutdown/mysql_windows.sh | 2 +- .../dataSources/shutdown/postgresql.sh | 2 +- .../dataSources/shutdown/postgresql_macos.sh | 2 +- .../shutdown/postgresql_windows.sh | 2 +- .github/scripts/dataSources/shutdown/redis.sh | 2 +- .../dataSources/shutdown/redis_macos.sh | 2 +- .../dataSources/shutdown/redis_windows.sh | 2 +- .../scripts/dataSources/startup/filesystem.sh | 6 +- .../scripts/dataSources/startup/influxdb.sh | 2 +- .../dataSources/startup/influxdb_macos.sh | 2 +- .../dataSources/startup/influxdb_windows.sh | 2 +- .../scripts/dataSources/startup/iotdb12.sh | 4 +- .../dataSources/startup/iotdb12_macos.sh | 4 +- .../dataSources/startup/iotdb12_windows.sh | 2 +- .../scripts/dataSources/update/influxdb.sh | 2 +- .../dataSources/update/influxdb_macos.sh | 2 +- .../dataSources/update/influxdb_windows.sh | 2 +- .github/scripts/dataSources/update/iotdb.sh | 2 +- .../scripts/dataSources/update/iotdb_macos.sh | 2 +- .../dataSources/update/iotdb_windows.sh | 2 +- .github/scripts/dataSources/update/mysql.sh | 2 +- .../scripts/dataSources/update/mysql_macos.sh | 2 +- .../dataSources/update/mysql_windows.sh | 2 +- .../scripts/dataSources/update/postgresql.sh | 2 +- .../dataSources/update/postgresql_macos.sh | 2 +- .../dataSources/update/postgresql_windows.sh | 2 +- .github/scripts/iginx/iginx.sh | 2 +- .github/scripts/iginx/iginx_macos.sh | 2 +- .github/scripts/iginx/iginx_udf_path.sh | 2 +- .github/scripts/iginx/iginx_windows.sh | 8 +- .github/workflows/DB-CE.yml | 10 +- .github/workflows/assembly-test.yml | 4 +- .github/workflows/case-regression.yml | 6 +- .github/workflows/free-thread-test.yml | 87 +++++++++++++ .github/workflows/full-test-suite.yml | 2 +- .github/workflows/remote-test.yml | 6 +- .../standalone-test-no-optimizer.yml | 8 +- .github/workflows/standalone-test.yml | 20 +-- .github/workflows/standard-test-suite.yml | 4 + .github/workflows/tpc-h.yml | 4 +- .github/workflows/unit-test.yml | 2 +- .../physical/storage/StorageManager.java | 4 +- .../function/manager/FunctionManager.java | 12 ++ .../shared/function/udf/python/PyUDF.java | 2 + .../src/main/resources/pemja-0.5-SNAPSHOT.jar | Bin 20105 -> 20210 bytes .../edu/tsinghua/iginx/utils/ShellRunner.java | 32 ++++- .../expansion/BaseCapacityExpansionIT.java | 2 + .../expansion/utils/SQLTestTools.java | 10 +- .../integration/func/session/PySessionIT.java | 83 +++++++------ .../iginx/integration/func/udf/UDFIT.java | 9 ++ test/src/test/resources/testConfig.properties | 2 +- 86 files changed, 552 insertions(+), 153 deletions(-) create mode 100644 .github/actions/service/freeThreadPython/action.yml create mode 100644 .github/workflows/free-thread-test.yml diff --git a/.github/actions/confWriter/action.yml b/.github/actions/confWriter/action.yml index d920560e51..bac48185b3 100644 --- a/.github/actions/confWriter/action.yml +++ b/.github/actions/confWriter/action.yml @@ -33,6 +33,10 @@ inputs: description: "the path of IGinX root directory" required: false default: "." + use-free-thread-python: + description: "use python3.13 free-threading mode" + required: false + default: "false" zookeeper-port: description: "zookeeper service port" required: false @@ -67,12 +71,36 @@ runs: paths: ${{ inputs.Root-Dir-Path }}/core/target/iginx-core-*/conf/config.properties statements: s/needInitBasicUDFFunctions=false/needInitBasicUDFFunctions=true/g + - name: Find PythonCMD && Set Free-threading Support ENV + id: find_python_cmd + shell: bash -el {0} + run: | + if [[ "${{ inputs.use-free-thread-python }}" == "true" ]]; then + if [ "$RUNNER_OS" == "Windows" ]; then + pythonPath=$(echo "$FT_PYTHON_PATH" | sed 's/\\/\//g') + pythonCMD="${pythonPath}/python3.13t.exe" + echo "TORCH_SUPPORTED=false" >> $GITHUB_ENV + else + pythonCMD='python3.13t' + echo "TORCH_SUPPORTED=false" >> $GITHUB_ENV + fi + else + if [ "$RUNNER_OS" == "Windows" ]; then + pythonCMD=python + else + pythonCMD=python3 + fi + fi + # have to set env to make it work in ut tests + echo "pythonCMD=${pythonCMD}" >> $GITHUB_ENV + echo "pythonCMD=${pythonCMD}" >> $GITHUB_OUTPUT + echo ${pythonCMD} + - name: Change Python CMD - if: runner.os == 'Windows' uses: ./.github/actions/edit with: paths: ${{ inputs.Root-Dir-Path }}/core/target/iginx-core-*/conf/config.properties - statements: s/pythonCMD=python3/pythonCMD=python/g + statements: s|pythonCMD=python3|pythonCMD=${{ steps.find_python_cmd.outputs.pythonCMD }}|g - if: inputs.Close-Optimizer=='true' name: Close All Optimizer diff --git a/.github/actions/dbRunner/action.yml b/.github/actions/dbRunner/action.yml index 1d720d6a19..26ca49ce95 100644 --- a/.github/actions/dbRunner/action.yml +++ b/.github/actions/dbRunner/action.yml @@ -63,6 +63,8 @@ runs: name: Run DB shell: bash run: | + java -version + which java if [ "$RUNNER_OS" == "Linux" ]; then chmod +x "${GITHUB_WORKSPACE}/.github/scripts/dataSources/startup/iotdb12.sh" "${GITHUB_WORKSPACE}/.github/scripts/dataSources/startup/iotdb12.sh" ${{ inputs.IoTDB12-ports }} diff --git a/.github/actions/dependence/action.yml b/.github/actions/dependence/action.yml index 41da09b283..57a55cad42 100644 --- a/.github/actions/dependence/action.yml +++ b/.github/actions/dependence/action.yml @@ -4,7 +4,7 @@ inputs: python-version: description: "python version" required: false - default: 3.9 + default: 3.11 java: description: "java version" required: false @@ -22,6 +22,17 @@ inputs: description: "is docker needed in this test" required: false default: "false" + free-thread-python-required: + description: "is free-threading python needed" + required: false + default: "false" + iginx-conda-flag: + description: "whether to use conda" + required: false + default: "false" + iginx-conda-env: + description: "conda env name" + required: false runs: using: "composite" @@ -34,6 +45,19 @@ runs: tzutil /s "China Standard Time" echo "JAVA_OPTS=-Xmx4g -Xms2g" >> %GITHUB_ENV% + # after this setup, every shell using login mode will be in conda env + # conda should be set at very beginning because this action will delete ~/.bashrc & ~/.bash_profile + # and regenerate them by initializing conda + - if: inputs.scope=='all' && inputs.iginx-conda-flag == 'true' + name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v3.1.0 + with: + miniconda-version: ${{ inputs.conda-version }} + python-version: ${{ inputs.python-version }} + activate-environment: ${{ inputs.iginx-conda-env }} + channel-priority: strict + run-post: "false" + # we use a patched version of lima(slower) because colima cannot start on M1 chips and the docker task is lightweight. - if: runner.os == 'macOS' && inputs.docker-required=='true' name: Install Docker on MacOS @@ -60,17 +84,38 @@ runs: echo "requests" >> requirements.txt echo "torch" >> requirements.txt - - if: inputs.scope=='all' + - if: inputs.scope=='all' && inputs.iginx-conda-flag != 'true' name: Set up Python ${{ inputs.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ inputs.python-version }} cache: "pip" # caching pip dependencies + # when using login mode shell, setup-python does not take effect. + # thus manually add python path to PATH + - if: inputs.scope=='all' && inputs.iginx-conda-flag != 'true' + name: Add System Python to PATH + shell: bash + run: | + PYTHON_PATH=$(dirname $(which python)) + echo $PYTHON_PATH + echo "IGINX_PYTHON_PATH=$PYTHON_PATH" >> $GITHUB_ENV + - if: inputs.scope=='all' name: Install Python dependencies - shell: bash - run: pip install -r requirements.txt + shell: bash -el {0} + run: | + if [ ! -z "$IGINX_PYTHON_PATH" ]; then + export PATH="$IGINX_PYTHON_PATH:$PATH" + fi + echo $PATH + which python + python -m pip install --upgrade pip + pip install -r requirements.txt + + - if: inputs.free-thread-python-required=='true' + name: Install free-thread python + uses: ./.github/actions/service/freeThreadPython - name: Set up JDK ${{ inputs.java }} uses: actions/setup-java@v4 @@ -79,6 +124,26 @@ runs: distribution: ${{ runner.os == 'macOS' && matrix.java == '8' && 'liberica' || 'temurin' }} cache: "maven" + - name: Check Java & Python Version + shell: bash + run: | + java -version + which java + python --version + which python + + - name: Check Java & Python Version in Conda and set conda env name + if: inputs.iginx-conda-flag == 'true' + shell: bash -el {0} + run: | + java -version + which java + python --version + which python + # to access conda env in scripts + echo "IGINX_CONDA_ENV=${{ inputs.iginx-conda-env }}" >> $GITHUB_ENV + echo "IGINX_CONDA_FLAG=${{ inputs.iginx-conda-flag }}" >> $GITHUB_ENV + - name: Get project info id: project uses: ./.github/actions/project diff --git a/.github/actions/iginxRunner/action.yml b/.github/actions/iginxRunner/action.yml index 1b69407ddf..c0b60767ca 100644 --- a/.github/actions/iginxRunner/action.yml +++ b/.github/actions/iginxRunner/action.yml @@ -16,7 +16,7 @@ runs: # TODO: use iginx binaries in assembly rather than core - if: inputs.if-test-udf=='true' name: Test UDF Path - shell: bash + shell: bash -el {0} run: | if [ "$RUNNER_OS" == "Linux" ]; then sudo sed -i 's/needInitBasicUDFFunctions=false/needInitBasicUDFFunctions=true/g' ${GITHUB_WORKSPACE}/core/target/iginx-core-${VERSION}/conf/config.properties @@ -34,8 +34,9 @@ runs: - if: inputs.if-test-udf=='false' && inputs.if-stop=='false' name: Start IGinX - shell: bash + shell: bash -el {0} run: | + which python if [ "$RUNNER_OS" == "Linux" ]; then chmod +x "${GITHUB_WORKSPACE}/.github/scripts/iginx/iginx.sh" "${GITHUB_WORKSPACE}/.github/scripts/iginx/iginx.sh" 6888 7888 diff --git a/.github/actions/service/freeThreadPython/action.yml b/.github/actions/service/freeThreadPython/action.yml new file mode 100644 index 0000000000..6e473ce15f --- /dev/null +++ b/.github/actions/service/freeThreadPython/action.yml @@ -0,0 +1,115 @@ +name: "Install_Free-thread_Python" +description: "Install Free-thread Python" + +runs: + using: "composite" + steps: + - name: Set Python variables + id: set-python-vars + shell: bash + run: | + if [ "$RUNNER_OS" == "Windows" ]; then + echo "PYTHON_INSTALLER_URL=https://www.python.org/ftp/python/3.13.0/python-3.13.0-amd64.exe" >> $GITHUB_ENV + echo "PYTHON_INSTALLER_PATH=python-installer.exe" >> $GITHUB_ENV + echo "CACHE_KEY=python-windows-3.13.0" >> $GITHUB_ENV + elif [ "$RUNNER_OS" == "macOS" ]; then + echo "PYTHON_INSTALLER_URL=https://www.python.org/ftp/python/3.13.0/python-3.13.0-macos11.pkg" >> $GITHUB_ENV + echo "PYTHON_INSTALLER_PATH=python-installer.pkg" >> $GITHUB_ENV + echo "CACHE_KEY=python-macos-3.13.0" >> $GITHUB_ENV + fi + + - name: Restore Python installer + if: runner.os!='Linux' + id: restore-cache-python + uses: actions/cache/restore@v3 + with: + path: ${{ env.PYTHON_INSTALLER_PATH }} + key: ${{ env.CACHE_KEY }} + + - name: Download Python installer + if: steps.restore-cache-python.outputs.cache-hit != 'true' && runner.os=='Windows' + shell: powershell + run: | + Invoke-WebRequest -Uri $env:PYTHON_INSTALLER_URL -OutFile $env:PYTHON_INSTALLER_PATH + + # have to wait until installation finishes + - if: runner.os=='Windows' + name: Setup Python3.13 on Windows + shell: powershell + run: | + Start-Process -FilePath "$(Join-Path -Path $PWD -ChildPath $env:PYTHON_INSTALLER_PATH)" -ArgumentList "/quiet","Include_freethreaded=1" -Wait + $pythonPath = "$env:LocalAppData\Programs\Python\Python313" + Get-ChildItem $pythonPath + $env:Path += ";$pythonPath" + echo "$pythonPath" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + echo "FT_PYTHON_PATH=$pythonPath" | Out-File -FilePath $env:GITHUB_ENV -Append + python3.13t -VV + + - if: runner.os=='Linux' + name: Setup Python3.13 on Linux + shell: bash + run: | + sudo add-apt-repository ppa:deadsnakes + sudo apt-get update + sudo apt-get install python3.13-nogil python3.13-dev + python3.13t -VV + + - name: Download Python installer + if: steps.restore-cache-python.outputs.cache-hit != 'true' && runner.os=='macOS' + shell: bash + run: | + curl -o $PYTHON_INSTALLER_PATH $PYTHON_INSTALLER_URL + + - if: runner.os=='macOS' + name: Setup Python3.13 on MacOS + shell: bash + run: | + cat > ./choicechanges.plist < + + + + + attributeSetting + 1 + choiceAttribute + selected + choiceIdentifier + org.python.Python.PythonTFramework-3.13 + + + + EOF + sudo installer -pkg ./$PYTHON_INSTALLER_PATH \ + -applyChoiceChangesXML ./choicechanges.plist \ + -target / + python3.13t -VV + export PATH="$PATH:/Library/Frameworks/PythonT.framework/Versions/3.13/bin" + + - name: Cache Python installer + if: runner.os!='Linux' && steps.restore-cache-python.outputs.cache-hit != 'true' + id: cache-python + uses: actions/cache/save@v3 + with: + path: ${{ env.PYTHON_INSTALLER_PATH }} + key: ${{ env.CACHE_KEY }} + + - name: Install pip + if: runner.os=='Linux' + shell: bash + run: | + curl -O https://bootstrap.pypa.io/get-pip.py + sudo python3.13t get-pip.py + + # pandas 2.2.3 cannot be directly installed on windows yet. use dev wheel file + - name: Install dependencies for tests + shell: bash + run: | + curl -L -O https://github.com/IGinX-THU/IGinX-resources/raw/refs/heads/main/resources/python/pandas-3.0.0.dev0+1654.g32a97a969a-cp313-cp313t-win_amd64.whl + ls -l + if [ "$RUNNER_OS" == "Windows" ]; then + python3.13t -m pip install numpy thrift pemjax + python3.13t -m pip install pandas*.whl + else + sudo python3.13t -m pip install pandas==2.2.3 numpy thrift pemjax + fi diff --git a/.github/actions/tpchSingleTest/action.yml b/.github/actions/tpchSingleTest/action.yml index 2d1ad3cfd2..50b7502df9 100644 --- a/.github/actions/tpchSingleTest/action.yml +++ b/.github/actions/tpchSingleTest/action.yml @@ -18,12 +18,12 @@ runs: - name: Run SQL Warmup on New IGinX if: inputs.status != 'ok' - shell: bash + shell: bash -el {0} run: mvn test -q -Dtest=SQLWarmupIT -DfailIfNoTests=false -P-format - name: Run TPCH Test on New IGinX if: inputs.status != 'ok' - shell: bash + shell: bash -el {0} run: mvn test -q -Dtest=TPCHNewIT -DfailIfNoTests=false -P-format - name: Show New IGinX log @@ -39,7 +39,7 @@ runs: - name: Start Old IGinX if: inputs.status != 'ok' - shell: bash + shell: bash -el {0} working-directory: IGinX run: | chmod +x core/target/iginx-core-*/sbin/start_iginx.sh @@ -48,12 +48,12 @@ runs: - name: Run SQL Warmup on Old IGinX if: inputs.status != 'ok' - shell: bash + shell: bash -el {0} run: mvn test -q -Dtest=SQLWarmupIT -DfailIfNoTests=false -P-format - name: Run TPCH Test on Old IGinX if: inputs.status != 'ok' - shell: bash + shell: bash -el {0} run: mvn test -q -Dtest=TPCHOldIT -DfailIfNoTests=false -P-format - name: Show Old IGinX log diff --git a/.github/scripts/dataSources/restart/influxdb.sh b/.github/scripts/dataSources/restart/influxdb.sh index 772a3bbc62..031f1bb9b0 100644 --- a/.github/scripts/dataSources/restart/influxdb.sh +++ b/.github/scripts/dataSources/restart/influxdb.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University @@ -22,6 +22,11 @@ port=$1 echo "Starting InfluxDB on port $port" -sudo sh -c "cd influxdb2-2.0.7-linux-amd64-$port/; nohup ./influxd run --bolt-path=~/.influxdbv2/influxd.bolt --engine-path=~/.influxdbv2/engine --http-bind-address=:$port --query-memory-bytes=20971520 --query-concurrency=2 > influxdb_$port.log 2>&1 &" +sudo sh -c "cd influxdb2-2.0.7-linux-amd64-$port/; nohup ./influxd run --bolt-path=~/.influxdbv2/influxd.bolt --engine-path=~/.influxdbv2/engine --http-bind-address=:$port --query-memory-bytes=300971520 --query-concurrency=2 > influxdb_$port.log 2>&1 &" sleep 10 + +sudo lsof -i:$port +if [ $? -eq 1 ]; then + echo "No process is listening on port $port" +fi \ No newline at end of file diff --git a/.github/scripts/dataSources/restart/influxdb_macos.sh b/.github/scripts/dataSources/restart/influxdb_macos.sh index ea8b075769..b2a3327977 100644 --- a/.github/scripts/dataSources/restart/influxdb_macos.sh +++ b/.github/scripts/dataSources/restart/influxdb_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University @@ -22,5 +22,10 @@ port=$1 echo "Starting InfluxDB on port $port" -sudo sh -c "cd influxdb2-2.0.7-darwin-amd64-$port/; nohup ./influxd run --bolt-path=~/.influxdbv2/influxd.bolt --engine-path=~/.influxdbv2/engine --http-bind-address=:$port --query-memory-bytes=20971520 --query-concurrency=2 > influxdb_$port.log 2>&1 &" +sudo -E sh -c "cd influxdb2-2.0.7-darwin-amd64-$port/; nohup ./influxd run --bolt-path=~/.influxdbv2/influxd.bolt --engine-path=~/.influxdbv2/engine --http-bind-address=:$port --query-memory-bytes=300971520 --query-concurrency=2 > influxdb_$port.log 2>&1 &" sleep 10 + +sudo lsof -i:$port +if [ $? -eq 1 ]; then + echo "No process is listening on port $port" +fi \ No newline at end of file diff --git a/.github/scripts/dataSources/restart/influxdb_windows.sh b/.github/scripts/dataSources/restart/influxdb_windows.sh index daa128a903..617cd94a1b 100644 --- a/.github/scripts/dataSources/restart/influxdb_windows.sh +++ b/.github/scripts/dataSources/restart/influxdb_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University @@ -23,9 +23,14 @@ port=$1 echo "Starting InfluxDB on port $port" pathPrefix="influxdb2-2.0.7-windows-amd64-$port" -arguments="-ArgumentList 'run', '--bolt-path=$pathPrefix/.influxdbv2/influxd.bolt', '--engine-path=$pathPrefix/.influxdbv2/engine', '--http-bind-address=:$port', '--query-memory-bytes=20971520', '--query-concurrency=2'" +arguments="-ArgumentList 'run', '--bolt-path=$pathPrefix/.influxdbv2/influxd.bolt', '--engine-path=$pathPrefix/.influxdbv2/engine', '--http-bind-address=:$port', '--query-memory-bytes=300971520', '--query-concurrency=2'" redirect="-RedirectStandardOutput '$pathPrefix/logs/db.log' -RedirectStandardError '$pathPrefix/logs/db-error.log'" powershell -command "Start-Process -FilePath 'influxdb2-2.0.7-windows-amd64-$port/influxd' $arguments -NoNewWindow $redirect" sleep 3 + +sudo lsof -i:$port +if [ $? -eq 1 ]; then + echo "No process is listening on port $port" +fi \ No newline at end of file diff --git a/.github/scripts/dataSources/restart/iotdb.sh b/.github/scripts/dataSources/restart/iotdb.sh index d675f6f35e..ded650167b 100644 --- a/.github/scripts/dataSources/restart/iotdb.sh +++ b/.github/scripts/dataSources/restart/iotdb.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University @@ -22,7 +22,7 @@ set -e port=$1 cd apache-iotdb-0.12.6-server-bin-$port/ sudo sysctl -w net.core.somaxconn=65535 -sudo sh -c "nohup sbin/start-server.sh >run.log 2>&1 &" +sudo -E sh -c "nohup sbin/start-server.sh >run.log 2>&1 &" sleep 3 sudo lsof -i:$port diff --git a/.github/scripts/dataSources/restart/iotdb_macos.sh b/.github/scripts/dataSources/restart/iotdb_macos.sh index b2b810d364..5e515deade 100644 --- a/.github/scripts/dataSources/restart/iotdb_macos.sh +++ b/.github/scripts/dataSources/restart/iotdb_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University @@ -23,10 +23,10 @@ set -e port=$1 cd apache-iotdb-0.12.6-server-bin-$port/ sudo sysctl -w kern.ipc.somaxconn=65535 -sudo sh -c "nohup sbin/start-server.sh >run.log 2>&1 &" +sudo -E sh -c "nohup sbin/start-server.sh >run.log 2>&1 &" sleep 3 sudo lsof -i:$port if [ $? -eq 1 ]; then echo "No process is listening on port $port" -fi \ No newline at end of file +fi diff --git a/.github/scripts/dataSources/restart/iotdb_windows.sh b/.github/scripts/dataSources/restart/iotdb_windows.sh index 9e131e2ea5..fee4f6a9fb 100644 --- a/.github/scripts/dataSources/restart/iotdb_windows.sh +++ b/.github/scripts/dataSources/restart/iotdb_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/restart/mongodb.sh b/.github/scripts/dataSources/restart/mongodb.sh index 531c4a92f0..69987cac9d 100644 --- a/.github/scripts/dataSources/restart/mongodb.sh +++ b/.github/scripts/dataSources/restart/mongodb.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/restart/mongodb_macos.sh b/.github/scripts/dataSources/restart/mongodb_macos.sh index 5161bc1b50..807c132f64 100644 --- a/.github/scripts/dataSources/restart/mongodb_macos.sh +++ b/.github/scripts/dataSources/restart/mongodb_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/restart/mongodb_windows.sh b/.github/scripts/dataSources/restart/mongodb_windows.sh index 427dbf80c6..23510cdb8b 100644 --- a/.github/scripts/dataSources/restart/mongodb_windows.sh +++ b/.github/scripts/dataSources/restart/mongodb_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/restart/mysql.sh b/.github/scripts/dataSources/restart/mysql.sh index 56894c829e..02b5318595 100644 --- a/.github/scripts/dataSources/restart/mysql.sh +++ b/.github/scripts/dataSources/restart/mysql.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/restart/mysql_macos.sh b/.github/scripts/dataSources/restart/mysql_macos.sh index 1aca0d98bc..3f83666090 100644 --- a/.github/scripts/dataSources/restart/mysql_macos.sh +++ b/.github/scripts/dataSources/restart/mysql_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/restart/mysql_windows.sh b/.github/scripts/dataSources/restart/mysql_windows.sh index 2d62cfd2a2..f2f5be88ed 100644 --- a/.github/scripts/dataSources/restart/mysql_windows.sh +++ b/.github/scripts/dataSources/restart/mysql_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/restart/postgresql.sh b/.github/scripts/dataSources/restart/postgresql.sh index cb1e5cc338..c5af3513a7 100644 --- a/.github/scripts/dataSources/restart/postgresql.sh +++ b/.github/scripts/dataSources/restart/postgresql.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/restart/postgresql_macos.sh b/.github/scripts/dataSources/restart/postgresql_macos.sh index f135ff4f30..558afa1813 100644 --- a/.github/scripts/dataSources/restart/postgresql_macos.sh +++ b/.github/scripts/dataSources/restart/postgresql_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/restart/postgresql_windows.sh b/.github/scripts/dataSources/restart/postgresql_windows.sh index 0491352e5e..f98c3efd04 100644 --- a/.github/scripts/dataSources/restart/postgresql_windows.sh +++ b/.github/scripts/dataSources/restart/postgresql_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/restart/redis.sh b/.github/scripts/dataSources/restart/redis.sh index 0c0ac1bc58..9a1b6737ec 100644 --- a/.github/scripts/dataSources/restart/redis.sh +++ b/.github/scripts/dataSources/restart/redis.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/restart/redis_macos.sh b/.github/scripts/dataSources/restart/redis_macos.sh index 0c0ac1bc58..9a1b6737ec 100644 --- a/.github/scripts/dataSources/restart/redis_macos.sh +++ b/.github/scripts/dataSources/restart/redis_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/restart/redis_windows.sh b/.github/scripts/dataSources/restart/redis_windows.sh index b6f22aa51a..6521318c1c 100644 --- a/.github/scripts/dataSources/restart/redis_windows.sh +++ b/.github/scripts/dataSources/restart/redis_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/influxdb.sh b/.github/scripts/dataSources/shutdown/influxdb.sh index 8ceb9f04ae..7fbb98b005 100644 --- a/.github/scripts/dataSources/shutdown/influxdb.sh +++ b/.github/scripts/dataSources/shutdown/influxdb.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/influxdb_macos.sh b/.github/scripts/dataSources/shutdown/influxdb_macos.sh index 711606f4d6..5787ad0456 100644 --- a/.github/scripts/dataSources/shutdown/influxdb_macos.sh +++ b/.github/scripts/dataSources/shutdown/influxdb_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/influxdb_windows.sh b/.github/scripts/dataSources/shutdown/influxdb_windows.sh index 9341131f09..4f2d2a1369 100644 --- a/.github/scripts/dataSources/shutdown/influxdb_windows.sh +++ b/.github/scripts/dataSources/shutdown/influxdb_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/iotdb.sh b/.github/scripts/dataSources/shutdown/iotdb.sh index a23d222d52..26663eca78 100644 --- a/.github/scripts/dataSources/shutdown/iotdb.sh +++ b/.github/scripts/dataSources/shutdown/iotdb.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/iotdb_macos.sh b/.github/scripts/dataSources/shutdown/iotdb_macos.sh index a23d222d52..26663eca78 100644 --- a/.github/scripts/dataSources/shutdown/iotdb_macos.sh +++ b/.github/scripts/dataSources/shutdown/iotdb_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/iotdb_windows.sh b/.github/scripts/dataSources/shutdown/iotdb_windows.sh index a7da9348f6..163e1050fa 100644 --- a/.github/scripts/dataSources/shutdown/iotdb_windows.sh +++ b/.github/scripts/dataSources/shutdown/iotdb_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/mongodb.sh b/.github/scripts/dataSources/shutdown/mongodb.sh index a444cb22d1..83b72dcd95 100644 --- a/.github/scripts/dataSources/shutdown/mongodb.sh +++ b/.github/scripts/dataSources/shutdown/mongodb.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/mongodb_macos.sh b/.github/scripts/dataSources/shutdown/mongodb_macos.sh index a444cb22d1..83b72dcd95 100644 --- a/.github/scripts/dataSources/shutdown/mongodb_macos.sh +++ b/.github/scripts/dataSources/shutdown/mongodb_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/mongodb_windows.sh b/.github/scripts/dataSources/shutdown/mongodb_windows.sh index d1efef5901..9ebaf0586a 100644 --- a/.github/scripts/dataSources/shutdown/mongodb_windows.sh +++ b/.github/scripts/dataSources/shutdown/mongodb_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/mysql.sh b/.github/scripts/dataSources/shutdown/mysql.sh index 483ad8788d..3d901119e2 100644 --- a/.github/scripts/dataSources/shutdown/mysql.sh +++ b/.github/scripts/dataSources/shutdown/mysql.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/mysql_macos.sh b/.github/scripts/dataSources/shutdown/mysql_macos.sh index 483ad8788d..3d901119e2 100644 --- a/.github/scripts/dataSources/shutdown/mysql_macos.sh +++ b/.github/scripts/dataSources/shutdown/mysql_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/mysql_windows.sh b/.github/scripts/dataSources/shutdown/mysql_windows.sh index ac126f3967..a845a8f618 100644 --- a/.github/scripts/dataSources/shutdown/mysql_windows.sh +++ b/.github/scripts/dataSources/shutdown/mysql_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/postgresql.sh b/.github/scripts/dataSources/shutdown/postgresql.sh index 628174f7ab..351573586b 100644 --- a/.github/scripts/dataSources/shutdown/postgresql.sh +++ b/.github/scripts/dataSources/shutdown/postgresql.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/postgresql_macos.sh b/.github/scripts/dataSources/shutdown/postgresql_macos.sh index 628174f7ab..351573586b 100644 --- a/.github/scripts/dataSources/shutdown/postgresql_macos.sh +++ b/.github/scripts/dataSources/shutdown/postgresql_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/postgresql_windows.sh b/.github/scripts/dataSources/shutdown/postgresql_windows.sh index 11c2711171..6956cf0606 100644 --- a/.github/scripts/dataSources/shutdown/postgresql_windows.sh +++ b/.github/scripts/dataSources/shutdown/postgresql_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/redis.sh b/.github/scripts/dataSources/shutdown/redis.sh index 31afbabf1e..05d6140b5a 100644 --- a/.github/scripts/dataSources/shutdown/redis.sh +++ b/.github/scripts/dataSources/shutdown/redis.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/redis_macos.sh b/.github/scripts/dataSources/shutdown/redis_macos.sh index 31afbabf1e..05d6140b5a 100644 --- a/.github/scripts/dataSources/shutdown/redis_macos.sh +++ b/.github/scripts/dataSources/shutdown/redis_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/shutdown/redis_windows.sh b/.github/scripts/dataSources/shutdown/redis_windows.sh index 31afbabf1e..05d6140b5a 100644 --- a/.github/scripts/dataSources/shutdown/redis_windows.sh +++ b/.github/scripts/dataSources/shutdown/redis_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/startup/filesystem.sh b/.github/scripts/dataSources/startup/filesystem.sh index b49c4000bd..4fecd474e6 100644 --- a/.github/scripts/dataSources/startup/filesystem.sh +++ b/.github/scripts/dataSources/startup/filesystem.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University @@ -44,7 +44,9 @@ sed -i"" -e "s/#write.buffer.size=[0-9]*/#write.buffer.size=1048576/g" $7 sed -i"" -e "s/#client.connectPool.maxTotal=[0-9]*/#client.connectPool.maxTotal=2/g" $7 -if [ "$8" = "etcd" ]; then +sed -i"" -e "s|pythonCMD=python3|pythonCMD=$8|g" $7 + +if [ "$9" = "etcd" ]; then sed -i"" -e 's/^metaStorage=.*$/metaStorage=etcd/g' $7 sed -i"" -e 's/^zookeeperConnectionString=/#zookeeperConnectionString=/g' $7 sed -i"" -e 's/^#etcdEndpoints=/etcdEndpoints=/g' $7 diff --git a/.github/scripts/dataSources/startup/influxdb.sh b/.github/scripts/dataSources/startup/influxdb.sh index 53f8c44aa2..de9529934a 100644 --- a/.github/scripts/dataSources/startup/influxdb.sh +++ b/.github/scripts/dataSources/startup/influxdb.sh @@ -44,5 +44,5 @@ do # target path is also used in update/ script sh -c "sudo cp -r influxdb2-2.0.7-linux-amd64/ influxdb2-2.0.7-linux-amd64-$port/" - sudo sh -c "cd influxdb2-2.0.7-linux-amd64-$port/; nohup ./influxd run --bolt-path=~/.influxdbv2/influxd.bolt --engine-path=~/.influxdbv2/engine --http-bind-address=:$port --query-memory-bytes=20971520 --query-concurrency=2 & echo \$! > influxdb.pid" + sudo -E sh -c "cd influxdb2-2.0.7-linux-amd64-$port/; nohup ./influxd run --bolt-path=~/.influxdbv2/influxd.bolt --engine-path=~/.influxdbv2/engine --http-bind-address=:$port --query-memory-bytes=300971520 --query-concurrency=2 & echo \$! > influxdb.pid" done \ No newline at end of file diff --git a/.github/scripts/dataSources/startup/influxdb_macos.sh b/.github/scripts/dataSources/startup/influxdb_macos.sh index 2ba8b67af2..8a07855dd2 100644 --- a/.github/scripts/dataSources/startup/influxdb_macos.sh +++ b/.github/scripts/dataSources/startup/influxdb_macos.sh @@ -44,5 +44,5 @@ do # target path is also used in update/ script sh -c "sudo cp -r influxdb2-2.0.7-darwin-amd64/ influxdb2-2.0.7-darwin-amd64-$port/" - sudo sh -c "cd influxdb2-2.0.7-darwin-amd64-$port/; nohup ./influxd run --bolt-path=~/.influxdbv2/influxd.bolt --engine-path=~/.influxdbv2/engine --http-bind-address=:$port --query-memory-bytes=20971520 --query-concurrency=2 & echo \$! > influxdb.pid" + sudo -E sh -c "cd influxdb2-2.0.7-darwin-amd64-$port/; nohup ./influxd run --bolt-path=~/.influxdbv2/influxd.bolt --engine-path=~/.influxdbv2/engine --http-bind-address=:$port --query-memory-bytes=300971520 --query-concurrency=2 & echo \$! > influxdb.pid" done \ No newline at end of file diff --git a/.github/scripts/dataSources/startup/influxdb_windows.sh b/.github/scripts/dataSources/startup/influxdb_windows.sh index f015a9ffd9..316ef19d1c 100644 --- a/.github/scripts/dataSources/startup/influxdb_windows.sh +++ b/.github/scripts/dataSources/startup/influxdb_windows.sh @@ -54,7 +54,7 @@ do pathPrefix="influxdb2-2.0.7-windows-amd64-$port" - arguments="-ArgumentList 'run', '--bolt-path=$pathPrefix/.influxdbv2/influxd.bolt', '--engine-path=$pathPrefix/.influxdbv2/engine', '--http-bind-address=:$port', '--query-memory-bytes=20971520', '--query-concurrency=2'" + arguments="-ArgumentList 'run', '--bolt-path=$pathPrefix/.influxdbv2/influxd.bolt', '--engine-path=$pathPrefix/.influxdbv2/engine', '--http-bind-address=:$port', '--query-memory-bytes=300971520', '--query-concurrency=2'" redirect="-RedirectStandardOutput '$pathPrefix/logs/db.log' -RedirectStandardError '$pathPrefix/logs/db-error.log'" diff --git a/.github/scripts/dataSources/startup/iotdb12.sh b/.github/scripts/dataSources/startup/iotdb12.sh index 0e11971ab7..caf6d33991 100644 --- a/.github/scripts/dataSources/startup/iotdb12.sh +++ b/.github/scripts/dataSources/startup/iotdb12.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University @@ -37,5 +37,5 @@ do sh -c "sudo sed -i 's/6667/$port/g' apache-iotdb-0.12.6-server-bin-$port/conf/iotdb-engine.properties" - sudo sh -c "cd apache-iotdb-0.12.6-server-bin-$port/; nohup sbin/start-server.sh &" + sudo -E sh -c "cd apache-iotdb-0.12.6-server-bin-$port/; nohup sbin/start-server.sh &" done diff --git a/.github/scripts/dataSources/startup/iotdb12_macos.sh b/.github/scripts/dataSources/startup/iotdb12_macos.sh index 5298b95bf6..7133ba3470 100644 --- a/.github/scripts/dataSources/startup/iotdb12_macos.sh +++ b/.github/scripts/dataSources/startup/iotdb12_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University @@ -37,5 +37,5 @@ do sh -c "sudo sed -i '' 's/6667/$port/' apache-iotdb-0.12.6-server-bin-$port/conf/iotdb-engine.properties" - sudo sh -c "cd apache-iotdb-0.12.6-server-bin-$port/; nohup sbin/start-server.sh &" + sudo -E sh -c "cd apache-iotdb-0.12.6-server-bin-$port/; nohup sbin/start-server.sh &" done diff --git a/.github/scripts/dataSources/startup/iotdb12_windows.sh b/.github/scripts/dataSources/startup/iotdb12_windows.sh index acd6cab6e3..516fb51d37 100644 --- a/.github/scripts/dataSources/startup/iotdb12_windows.sh +++ b/.github/scripts/dataSources/startup/iotdb12_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/update/influxdb.sh b/.github/scripts/dataSources/update/influxdb.sh index df5737b298..6c5c998eec 100644 --- a/.github/scripts/dataSources/update/influxdb.sh +++ b/.github/scripts/dataSources/update/influxdb.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/update/influxdb_macos.sh b/.github/scripts/dataSources/update/influxdb_macos.sh index 8daa987745..8b96524cb5 100644 --- a/.github/scripts/dataSources/update/influxdb_macos.sh +++ b/.github/scripts/dataSources/update/influxdb_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/update/influxdb_windows.sh b/.github/scripts/dataSources/update/influxdb_windows.sh index 160a36c4f2..2886b86dd1 100644 --- a/.github/scripts/dataSources/update/influxdb_windows.sh +++ b/.github/scripts/dataSources/update/influxdb_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/update/iotdb.sh b/.github/scripts/dataSources/update/iotdb.sh index d4ccd6d608..f3714869fb 100644 --- a/.github/scripts/dataSources/update/iotdb.sh +++ b/.github/scripts/dataSources/update/iotdb.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/update/iotdb_macos.sh b/.github/scripts/dataSources/update/iotdb_macos.sh index d4ccd6d608..f3714869fb 100644 --- a/.github/scripts/dataSources/update/iotdb_macos.sh +++ b/.github/scripts/dataSources/update/iotdb_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/update/iotdb_windows.sh b/.github/scripts/dataSources/update/iotdb_windows.sh index a60e18c78a..f28bf316b5 100644 --- a/.github/scripts/dataSources/update/iotdb_windows.sh +++ b/.github/scripts/dataSources/update/iotdb_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/update/mysql.sh b/.github/scripts/dataSources/update/mysql.sh index e5fb32035a..3aff98aa71 100644 --- a/.github/scripts/dataSources/update/mysql.sh +++ b/.github/scripts/dataSources/update/mysql.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/update/mysql_macos.sh b/.github/scripts/dataSources/update/mysql_macos.sh index e5fb32035a..3aff98aa71 100644 --- a/.github/scripts/dataSources/update/mysql_macos.sh +++ b/.github/scripts/dataSources/update/mysql_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/update/mysql_windows.sh b/.github/scripts/dataSources/update/mysql_windows.sh index e5fb32035a..3aff98aa71 100644 --- a/.github/scripts/dataSources/update/mysql_windows.sh +++ b/.github/scripts/dataSources/update/mysql_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/update/postgresql.sh b/.github/scripts/dataSources/update/postgresql.sh index d32940c16c..545aae6dc5 100644 --- a/.github/scripts/dataSources/update/postgresql.sh +++ b/.github/scripts/dataSources/update/postgresql.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/update/postgresql_macos.sh b/.github/scripts/dataSources/update/postgresql_macos.sh index d32940c16c..545aae6dc5 100644 --- a/.github/scripts/dataSources/update/postgresql_macos.sh +++ b/.github/scripts/dataSources/update/postgresql_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/dataSources/update/postgresql_windows.sh b/.github/scripts/dataSources/update/postgresql_windows.sh index 9525bcc34d..7b44dd2f20 100644 --- a/.github/scripts/dataSources/update/postgresql_windows.sh +++ b/.github/scripts/dataSources/update/postgresql_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/iginx/iginx.sh b/.github/scripts/iginx/iginx.sh index 9091b0241c..ee9cf74cb8 100644 --- a/.github/scripts/iginx/iginx.sh +++ b/.github/scripts/iginx/iginx.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/iginx/iginx_macos.sh b/.github/scripts/iginx/iginx_macos.sh index 24702790db..ec90ad61ad 100644 --- a/.github/scripts/iginx/iginx_macos.sh +++ b/.github/scripts/iginx/iginx_macos.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/iginx/iginx_udf_path.sh b/.github/scripts/iginx/iginx_udf_path.sh index 71a7b074af..5ae587106b 100644 --- a/.github/scripts/iginx/iginx_udf_path.sh +++ b/.github/scripts/iginx/iginx_udf_path.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University diff --git a/.github/scripts/iginx/iginx_windows.sh b/.github/scripts/iginx/iginx_windows.sh index efabcb8711..3162fc1027 100644 --- a/.github/scripts/iginx/iginx_windows.sh +++ b/.github/scripts/iginx/iginx_windows.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # IGinX - the polystore system with high performance # Copyright (C) Tsinghua University @@ -32,7 +32,11 @@ sed -i 's/-Xmx%MAX_HEAP_SIZE% -Xms%MAX_HEAP_SIZE%/-Xmx4g -Xms4g -XX:MaxMetaspace echo "starting iginx on windows..." -powershell -Command "Start-Process -FilePath '$batPath' -NoNewWindow -RedirectStandardOutput 'iginx-$1.log' -RedirectStandardError 'iginx-$1-error.log'" +if [[ "$IGINX_CONDA_FLAG" == "true" ]]; then + powershell -Command "conda activate $IGINX_CONDA_ENV;Start-Process -FilePath '$batPath' -NoNewWindow -RedirectStandardOutput 'iginx-$1.log' -RedirectStandardError 'iginx-$1-error.log'" +else + powershell -Command "Start-Process -FilePath '$batPath' -NoNewWindow -RedirectStandardOutput 'iginx-$1.log' -RedirectStandardError 'iginx-$1-error.log'" +fi sh -c "sleep 3" diff --git a/.github/workflows/DB-CE.yml b/.github/workflows/DB-CE.yml index 0b1f42105e..108d613f08 100644 --- a/.github/workflows/DB-CE.yml +++ b/.github/workflows/DB-CE.yml @@ -11,7 +11,7 @@ on: description: "The python version to run the test on" type: string required: false - default: '["3.9"]' + default: '["3.11"]' os-matrix: description: "The operating system to run the test on" type: string @@ -68,6 +68,8 @@ jobs: with: python-version: ${{ matrix.python-version }} java: ${{ matrix.java }} + iginx-conda-flag: "true" + iginx-conda-env: "iginxEnv" - name: Run Metadata uses: ./.github/actions/metadataRunner @@ -101,14 +103,14 @@ jobs: Test-Way: ${{ matrix.test-way }} - name: Write DB history data - shell: bash + shell: bash -el {0} run: mvn test -q -Dtest=${{ matrix.DB-name }}HistoryDataGenerator#${{ matrix.test-way }} -DfailIfNoTests=false -P-format - name: Start IGinX uses: ./.github/actions/iginxRunner - name: ${{ matrix.test-way }} IT - shell: bash + shell: bash -el {0} run: mvn test -q -Dtest=${{ matrix.DB-name }}CapacityExpansionIT#${{ matrix.test-way }} -DfailIfNoTests=false -P-format - name: Clear history data @@ -116,7 +118,7 @@ jobs: run: mvn test -q -Dtest=${{ matrix.DB-name }}HistoryDataGenerator#clearHistoryData -DfailIfNoTests=false -P-format - name: ${{ matrix.test-way }} Normal IT - shell: bash + shell: bash -el {0} run: mvn test -q -Dtest=${FUNCTEST} -DfailIfNoTests=false -P-format - name: Show IGinX log diff --git a/.github/workflows/assembly-test.yml b/.github/workflows/assembly-test.yml index 8764846b12..7bdf0f5e2d 100644 --- a/.github/workflows/assembly-test.yml +++ b/.github/workflows/assembly-test.yml @@ -11,7 +11,7 @@ on: description: "The python version to run the test on" type: string required: false - default: '["3.9"]' + default: '["3.11"]' os-matrix: description: "The operating system to run the test on" type: string @@ -76,7 +76,7 @@ jobs: fi - name: Run Tests - shell: bash + shell: bash -el {0} run: mvn test -Dtest=${{ env.FUNCTEST }} -DfailIfNoTests=false -P-format - name: Check Whether Logs Contains Error diff --git a/.github/workflows/case-regression.yml b/.github/workflows/case-regression.yml index 4fefbf62fe..dc70a5c7f8 100644 --- a/.github/workflows/case-regression.yml +++ b/.github/workflows/case-regression.yml @@ -12,7 +12,7 @@ on: description: "The python version to run the test on" type: string required: false - default: '["3.9"]' + default: '["3.11"]' os-matrix: description: "The operating system to run the test on" type: string @@ -42,6 +42,8 @@ jobs: with: python-version: ${{ matrix.python-version }} java: ${{ matrix.java }} + iginx-conda-flag: "true" + iginx-conda-env: "iginxEnv" - name: Run Metadata uses: ./.github/actions/metadataRunner @@ -68,7 +70,7 @@ jobs: - name: TestMixCluster if: always() - shell: bash + shell: bash -el {0} run: | mvn test -q -Dtest=MixClusterShowColumnsRegressionTest -DfailIfNoTests=false -P-format diff --git a/.github/workflows/free-thread-test.yml b/.github/workflows/free-thread-test.yml new file mode 100644 index 0000000000..48f91b12c6 --- /dev/null +++ b/.github/workflows/free-thread-test.yml @@ -0,0 +1,87 @@ +name: "Free-threading Python Test" + +on: + workflow_call: + inputs: + java-matrix: + description: "The java version to run the test on" + type: string + required: false + default: '["8"]' + os-matrix: + description: "The operating system to run the test on" + type: string + required: false + default: '["ubuntu-latest", "macos-latest", "windows-latest"]' + metadata-matrix: + description: "The metadata to run the test on" + type: string + required: false + default: '["zookeeper", "etcd"]' + db-matrix: + description: "The database to run the test on" + type: string + required: false + default: '["IoTDB12", "FileSystem"]' + +jobs: + Free-threading-Python-Test: + timeout-minutes: 40 + strategy: + fail-fast: false + matrix: + java: ${{ fromJSON(inputs.java-matrix) }} + os: ${{ fromJSON(inputs.os-matrix) }} + metadata: ${{ fromJSON(inputs.metadata-matrix) }} + DB-name: ${{ fromJSON(inputs.db-matrix) }} + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + # use free-threading python instead of stable version + - name: Environment dependence + uses: ./.github/actions/dependence + with: + java: ${{ matrix.java }} + free-thread-python-required: "true" + scope: "only-java" + + - name: Run Metadata + uses: ./.github/actions/metadataRunner + with: + metadata: ${{ matrix.metadata }} + + - name: Run DB + uses: ./.github/actions/dbRunner + with: + DB-name: ${{ matrix.DB-name }} + + - name: Install IGinX with Maven + shell: bash + run: | + mvn clean package -DskipTests -P-format -q + + - name: Change IGinX config + uses: ./.github/actions/confWriter + with: + DB-name: ${{ matrix.DB-name }} + Set-Filter-Fragment-OFF: "true" + Metadata: ${{ matrix.metadata }} + use-free-thread-python: "true" + + - name: Start IGinX + uses: ./.github/actions/iginxRunner + + - name: TestController IT + if: always() + shell: bash + env: + METADATA_STORAGE: ${{ matrix.metadata }} + run: | + chmod +x "${GITHUB_WORKSPACE}/.github/scripts/test/test_union.sh" + mvn test -q -Dtest=Controller -DfailIfNoTests=false -P-format + + - name: Show IGinX log + if: always() + shell: bash + run: | + cat iginx-*.log diff --git a/.github/workflows/full-test-suite.yml b/.github/workflows/full-test-suite.yml index c13906632b..c7d89ce9e3 100644 --- a/.github/workflows/full-test-suite.yml +++ b/.github/workflows/full-test-suite.yml @@ -24,5 +24,5 @@ jobs: uses: ./.github/workflows/DB-CE.yml remote-test: uses: ./.github/workflows/remote-test.yml - assemebly-test: + assembly-test: uses: ./.github/workflows/assembly-test.yml diff --git a/.github/workflows/remote-test.yml b/.github/workflows/remote-test.yml index 65d36c2561..ecb73c12bb 100644 --- a/.github/workflows/remote-test.yml +++ b/.github/workflows/remote-test.yml @@ -11,7 +11,7 @@ on: description: "The python version to run the test on" type: string required: false - default: '["3.9"]' + default: '["3.11"]' os-matrix: description: "The operating system to run the test on" type: string @@ -56,6 +56,8 @@ jobs: python-version: ${{ matrix.python-version }} java: ${{ matrix.java }} docker-required: true + iginx-conda-flag: "true" + iginx-conda-env: "iginxEnv" - if: runner.os == 'Windows' name: Set up Docker Firewall on Windows @@ -127,7 +129,7 @@ jobs: fi - name: Run Tests - shell: bash + shell: bash -el {0} run: | mvn test -q -Dtest=${FUNCTEST} -DfailIfNoTests=false -P-format diff --git a/.github/workflows/standalone-test-no-optimizer.yml b/.github/workflows/standalone-test-no-optimizer.yml index 963329f43c..e44ee0f997 100644 --- a/.github/workflows/standalone-test-no-optimizer.yml +++ b/.github/workflows/standalone-test-no-optimizer.yml @@ -12,7 +12,7 @@ on: description: "The python version to run the test on" type: string required: false - default: '["3.9"]' + default: '["3.11"]' os-matrix: description: "The operating system to run the test on" type: string @@ -32,7 +32,7 @@ on: description: "The timeout minutes for the job" type: number required: false - default: 35 + default: 40 jobs: Union-DB-Test-Push_Down: @@ -53,6 +53,8 @@ jobs: with: python-version: ${{ matrix.python-version }} java: ${{ matrix.java }} + iginx-conda-flag: "true" + iginx-conda-env: "iginxEnv" - if: runner.os == 'Windows' name: Set JAVA_OPTS @@ -85,7 +87,7 @@ jobs: - name: TestController IT if: always() - shell: bash + shell: bash -el {0} env: METADATA_STORAGE: ${{ matrix.metadata }} run: | diff --git a/.github/workflows/standalone-test.yml b/.github/workflows/standalone-test.yml index bda6f53835..e7ee003684 100644 --- a/.github/workflows/standalone-test.yml +++ b/.github/workflows/standalone-test.yml @@ -12,7 +12,7 @@ on: description: "The python version to run the test on" type: string required: false - default: '["3.9"]' + default: '["3.11"]' os-matrix: description: "The operating system to run the test on" type: string @@ -32,7 +32,7 @@ on: description: "The timeout minutes for the job" type: number required: false - default: 40 + default: 45 jobs: Union-DB-Test: @@ -53,6 +53,8 @@ jobs: with: python-version: ${{ matrix.python-version }} java: ${{ matrix.java }} + iginx-conda-flag: "true" + iginx-conda-env: "iginxEnv" - name: Run Metadata uses: ./.github/actions/metadataRunner @@ -83,7 +85,7 @@ jobs: - name: Run UDF path test if: always() - shell: bash + shell: bash -el {0} run: | mvn test -q -Dtest=UDFPathIT -DfailIfNoTests=false -P-format if [ "$RUNNER_OS" == "Linux" ]; then @@ -101,7 +103,7 @@ jobs: # 1. test relative path # 2. prepare for restart test - name: Run Transform Job tests - shell: bash + shell: bash -el {0} run: | mvn test -q -Dtest=TransformJobPathIT#prepare -DfailIfNoTests=false -P-format if [ "$RUNNER_OS" == "Linux" ]; then @@ -128,7 +130,7 @@ jobs: if-test-udf: "true" - name: Verify Transform Job - shell: bash + shell: bash -el {0} run: | mvn test -q -Dtest=TransformJobRestartIT#verifyJobExists -DfailIfNoTests=false -P-format @@ -153,7 +155,7 @@ jobs: # TODO: extract it to a separate job to test # large image export only tested in FileSystem - name: Test Client Export File - shell: bash + shell: bash -el {0} run: | if [[ "${{ matrix.DB-name }}" == "FileSystem" ]]; then mvn test -q -Dtest=ExportFileIT -DfailIfNoTests=false -P-format @@ -175,7 +177,7 @@ jobs: - name: Test Client Import File if: always() - shell: bash + shell: bash -el {0} run: | mvn test -q -Dtest=ImportFileIT -DfailIfNoTests=false -P-format @@ -187,7 +189,7 @@ jobs: - name: TestController IT if: always() - shell: bash + shell: bash -el {0} env: METADATA_STORAGE: ${{ matrix.metadata }} run: | @@ -215,7 +217,7 @@ jobs: - name: FilterFragmentRuleTest IT if: always() - shell: bash + shell: bash -el {0} run: | chmod +x "${GITHUB_WORKSPACE}/.github/scripts/test/test_union.sh" mvn test -q -Dtest=SQLSessionIT#testFilterFragmentOptimizer -DfailIfNoTests=false -P-format diff --git a/.github/workflows/standard-test-suite.yml b/.github/workflows/standard-test-suite.yml index d250f6f675..a0251346ad 100644 --- a/.github/workflows/standard-test-suite.yml +++ b/.github/workflows/standard-test-suite.yml @@ -42,6 +42,10 @@ jobs: metadata-matrix: '["zookeeper"]' assembly-test: uses: ./.github/workflows/assembly-test.yml + free-thread-test: + uses: ./.github/workflows/free-thread-test.yml + with: + metadata-matrix: '["zookeeper"]' tpc-h-regression-test: uses: ./.github/workflows/tpc-h.yml with: diff --git a/.github/workflows/tpc-h.yml b/.github/workflows/tpc-h.yml index 5ad4c82afa..08a8242fcb 100644 --- a/.github/workflows/tpc-h.yml +++ b/.github/workflows/tpc-h.yml @@ -12,7 +12,7 @@ on: description: "The python version to run the test on" type: string required: false - default: '["3.9"]' + default: '["3.11"]' os-matrix: description: "The operating system to run the test on" type: string @@ -64,6 +64,8 @@ jobs: with: python-version: ${{ matrix.python-version }} java: ${{ matrix.java }} + iginx-conda-flag: "true" + iginx-conda-env: "iginxEnv" - name: Display System Info shell: bash diff --git a/.github/workflows/unit-test.yml b/.github/workflows/unit-test.yml index eede84419b..adeb4a3782 100644 --- a/.github/workflows/unit-test.yml +++ b/.github/workflows/unit-test.yml @@ -12,7 +12,7 @@ on: description: "The python version to run the test on" type: string required: false - default: '["3.9"]' + default: '["3.11"]' os-matrix: description: "The operating system to run the test on" type: string diff --git a/core/src/main/java/cn/edu/tsinghua/iginx/engine/physical/storage/StorageManager.java b/core/src/main/java/cn/edu/tsinghua/iginx/engine/physical/storage/StorageManager.java index 732e60b382..16fc4d7640 100644 --- a/core/src/main/java/cn/edu/tsinghua/iginx/engine/physical/storage/StorageManager.java +++ b/core/src/main/java/cn/edu/tsinghua/iginx/engine/physical/storage/StorageManager.java @@ -99,7 +99,9 @@ public static Pair getBoundaryOfStorage( return null; } finally { try { - storage.release(); + if (storage != null) { + storage.release(); + } } catch (Exception e) { LOGGER.error("release session pool failure!", e); } diff --git a/core/src/main/java/cn/edu/tsinghua/iginx/engine/shared/function/manager/FunctionManager.java b/core/src/main/java/cn/edu/tsinghua/iginx/engine/shared/function/manager/FunctionManager.java index f1c4cae304..e45b3497f1 100644 --- a/core/src/main/java/cn/edu/tsinghua/iginx/engine/shared/function/manager/FunctionManager.java +++ b/core/src/main/java/cn/edu/tsinghua/iginx/engine/shared/function/manager/FunctionManager.java @@ -20,6 +20,7 @@ package cn.edu.tsinghua.iginx.engine.shared.function.manager; import static cn.edu.tsinghua.iginx.utils.ShellRunner.runCommand; +import static cn.edu.tsinghua.iginx.utils.ShellRunner.runCommandAndGetResult; import cn.edu.tsinghua.iginx.conf.Config; import cn.edu.tsinghua.iginx.conf.ConfigDescriptor; @@ -68,6 +69,17 @@ public class FunctionManager { private FunctionManager() { this.functions = new HashMap<>(); + LOGGER.debug("main thread: using pythonCMD: {}", PythonCMD); + if (LOGGER.isDebugEnabled()) { + try { + String sitePath = + runCommandAndGetResult( + "", PythonCMD, "-c", "import sysconfig; print(sysconfig.get_paths()['purelib'])"); + LOGGER.debug("main thread: python site path: {}", sitePath); + } catch (Exception e) { + LOGGER.debug("failed to get purelib path", e); + } + } this.initSystemFunctions(); if (config.isNeedInitBasicUDFFunctions()) { this.initBasicUDFFunctions(); diff --git a/core/src/main/java/cn/edu/tsinghua/iginx/engine/shared/function/udf/python/PyUDF.java b/core/src/main/java/cn/edu/tsinghua/iginx/engine/shared/function/udf/python/PyUDF.java index a6f35ab6a7..2736a5dc98 100644 --- a/core/src/main/java/cn/edu/tsinghua/iginx/engine/shared/function/udf/python/PyUDF.java +++ b/core/src/main/java/cn/edu/tsinghua/iginx/engine/shared/function/udf/python/PyUDF.java @@ -45,6 +45,8 @@ public PyUDF(String moduleName, String className) { public void close(String funcName, PythonInterpreter interpreter) { try { interpreter.exec(String.format("import sys; sys.modules.pop('%s', None)", moduleName)); + } catch (NullPointerException e) { + LOGGER.error("Did not find module {} for function {}", moduleName, funcName); } catch (Exception e) { LOGGER.error("Remove module for udf {} failed:", funcName, e); } diff --git a/dependency/src/main/resources/pemja-0.5-SNAPSHOT.jar b/dependency/src/main/resources/pemja-0.5-SNAPSHOT.jar index fa5c6734468bf85c1cfe56fd31d0d77dbb00d5cb..73bc4c00ed676866c3490feb939c6d78a6ef5482 100644 GIT binary patch delta 6258 zcmZ9QWmuHm*2e+q7zvS(l9ZP2QebB2?v!qZkY=QNU})(<8l*dzP?l0@N|9kz{zV`iPUH6K}L_W<#2B^qCLw$jSiHV6s6lkdn2MD459pzz9ahdz? zm_URq@Hf;bAtR8Vq%#!pC)g)>Rip9@hVry4#(I?ic!rUn1P0U)q9;6ivK3>AKG|ya zuxtO(5CB=Mr_+6sKe+$YqQ8BT_82Oj3f5%2x%kJB#Mb}MB_^DLPx%>~Z=t^GvtEzX z`rq5)%e^ph!%^|^JDiAU=;2sBq|X@ZA#nlWDNV^04gqDQyGF>4!!!Bw)ozE?Zu0!i zjLW-`v-$Hh4)%IXT#7U1@(txaZidyrdySS$eVi0idhfGK!x;1nZtmAsT*q(5Jihfm z-eoKMpS5weqnMxdO{~ZRa{?tYWCW(_^Sije|7JBzVCEmzIGsXx;^Im1AE>c zRWaOu#C#q49OwA%^aKO@L8X`arjm3uaTQv&rmxnW?OBEH#O$fxn=NOts&!(;@?g+w z44Q1$IMx=&UP_E@zg%}ZZoqy}^8eH3>)Rg&?T{FnlwiKLB7WR>j|X)uxh@ZcYMwZ$ zJ+{l=KfZRqFN}PQKYG+UmcV|{?!EGz3-xFfJ-@?#7!UJte&5@2djEKOlJ$HFMS~-l zg+)V^x!0+rD<8XoMYKevNU^BoxTLEPyPbtsQ)93RtwaZ8+(WC$rX}4GMpY#(o;(Ds zqfNf)ajEJGGa}IFk%P)C{p@~EyW6b_Y4I(KkNqL85yhwWHZQU+Ql~KTLhXuD+`d;) z#y)OrFe5w$#TMw1*HG8;)sSZHGq>#6?kJf^R%P1a2%Lb&O9!Tp`sc)a3AP_`jS-EE z3{f!I{&u!5Vcb2tvn2V43`^ox0qr&A|mImu_8U z=>f^M++fa*^3E>F3Of{_y_30n&cwb9)1OvQPq13dN0muRbX=4bX3h~J`|?VxSxjLm zU-rqLLG2is?4>f> z`efvL+y~|UmULkbUpge!MamKwJ7C};0q6nJbPb6MwiT#jm@r1plBFr8K3CyIeBV40 zuw2rfRObEqMzBeQN6VQ|5LUr!B%*nPT zB9pvvpe3n|w6-nt8dYqapOobO=*ld)N<4vzhJW!|*oJfOA>db4 z`qWS^HW^;)Tvgtx4c_y+6$8GC_9Py58&nX^ z1VOq5kqrLvVV2pq5h=U`vVpj2=6jV{Fw*B%(dN;U8a&BxNAmDOzR5skqHe9U=}q!U ziwCj!W9tuLjj>*=9HwM$EUU~PnK%3uYvrI;FR3`cx+os>St!;r-)dy6wTmV3I zzi5pp0Q$o8-ZB5|EN52Wg%jKk{Os0JJUme>R#>1}w-?qeU9V}(jR4T|LL{5neA7H7 z-G1_D%ITGT{~S&+M1Oc4u39@+uWcnr+3sa9a~&sI%lv{pa&S*WwAb?_ONqYv9f?vH z`lijWQZzr8ajty#RBk$8T)hp&PDBxU#>Vt6T1QJ}G-LtaFy1SUDU=VQa&EmL^?B-j zS~v**d?>e6I4Nbd*X0AwTeEYT&u_OX9g8~B9f5jl-weVt$CH=BZ0gK6?e!F4**#YA zni>hC`o^m?npYP;X0XecV-znF3Br~rgTUNA-{g>OH4zz{jdU&ADK8LUCAhM{9Mb}lWDJw^r<1%-a>uwv_#-4^|!DdYiz{oHhep)hcUCUDP+{ zsebDaow^f!NtVP>l4oNy4BE3zcY0A2Z@UxA+Nm+*Kqr=1MFZC^H&31(QSitP?&zQ|c!T7HF#nMY0k9K+W_EB0M zcIwz3O;ZG;wPs@R3D}3JK;``Ct-DVSHWjj@^OoPLbkt<)@&s>^Ktsmx87fQZ`Zqn{ ztNbFPXR^(T4XN1F6&bY-gR3}awXV1u)zxjBZF*hRuDl|Onh($|&9WSQ_j1P_1up|K zu5wP3F*#CGFC%9qftK8YgFv&z5w-HekHYHZ!~7}_*3k?KWPvPxCN%Nj{O_WM4m%l5-{rJ@3Onz97 zZgY;t8L(56#!|1+9O;@}en-n99#QzKPt^;{wu2Adqhr}ndsPsiJV)$i7{(nM*8TgF zz}l-Yi32QegUyLRDlS;bD;<2TKsnyBZ{!+%Rn9F9E-e+%1?|JO=oqQ|v{x^eJ2omM zcAum6Rc#Ua&d=w^-uBF$>q}kKag|J818misYL-luGo!M8@F|$g^EKCy?^hA91YLab z3}%!F?9cGP%j#fKQN-Hwm(UuJh?D9x)CoPr{j$)_J`UR! zYK+OqH|@CS(1jkV&_ZJBE^*()Qsp&d+oO(g1WwAE8#eEZxPrC9KOYQC(*6hr3rBA} z!#$A-{S!ptk&%TLqmlv)-~U-GCmtz%UH4)l@c#h+>3U+dX*9X4px>cEogo4j~ro`2nV8B3czpB-zR@Ov|dme%Mv^)z@$oqH?5T8STc6T{SwH!zYV`Q z9q1cCP3tNrnP3~Ar)YGg(BHZ-lSbGPF6IWb1eRmjXL}=iC~su#qW;DU?w@>D+YoKm zFZi3a&TWT5)CVVu`UdfGm?=pbrNuJ&a>&%^3shj&&!VNmrKNqr_%MO%SnIB#lD@W8 z)OSbo@Qm_GLUG`dSpN^K-1}G33yTUjiaK3*`lxUMJ7*>PZZF6S z5@?TZbYz&2`1nOsPjeX@h!ulbd_Z)qJ98*YVL=S;abgV2-pGg=@AX{hd>|fsY^}Ma zP<_BD<0H8!?_pvtJn3;$8b)Ow9BPZ%If;V3?BX;QpxFK;EBbRFCf3Ly4_F) z$;?;lS5w{LOe!LrW8dHYMPbqaHt~otHEl>Z6N}Ns7Z7-X_`~wNj7hPUM6wd@MnAPq z8^LBS$$w%vu5n~}y`VenL$-wRnPL&4B(TGpS*zjPdqn>wem}~^)eRx3?#;2p4IR)d zDvK57Iv3drG9J3)pjRBQ+oSb6!mORzh8*A>xc(0OA;grwD#UrU)7r|~KFwuivz;+v zu5H<$I~*|;fKJ8~rnJqT7CS7Af{hBt$?j4U4YEx0Mao8KXXj57r-}dUF=qm|BJdOZ zLf}T>U||v_vdpNET?4krF}cPxsP?xg3qb43Tv{CR!P0S+jTDFwM#9Hmae5^gZ{24w zs7Qf3J2E=P z!Xd5A*f(K>{UREogE}@DEkY4l*7vAX2eve?sf35DftYU#K~~S? zExGRVD2J+#VkW?9Q64qTdC0gBrQD*@689?L-B^$lof+^?O8)Q)FmnbFh2D968>3lX0I7id+Sg zAZBJxy8Dboz+B>iU}6q5{Q#OoKt@B%IHz&Y#bq=%f3VruaE-bbv-8jVBSt&v`C5A}Qi`o2TbP=BB3s(uMQER(f0NVmiD_QR zs~0$S@EslO!?IoI|7j6j?>X0Keq2g#A!CX7RF!_sr^W~!T4(QqZg^SRF@J*@_d87# z)J;Q)tG!A#g_JkZlGpk88Fl>#lM;&J&3D`Jy|(XroR9Sn4KRN_>Di#?wi^dqIJ1f;17&l<;uLL6wWCnz)?@=D6r& z{NzLu)!Xn~(4Q;&1@sJ_WbG^^q&#~kpGzd1A8C#tsB&TAWl)I6)nubXDaL|I-4hlb z_kTUeY5z)QEwLmcVJA7xe&K)9&C;|@oVKCg)A%llJ|{$uw*XDbJS$%9^^nR?p@kHe zJXV@2AD4)b*bLSAip@G*h!j!~2lk8m#sVN%-q=su`8z}1r_&Vwa=@LX50XQCUTV{OS zfjTl|Npcga`J&G`qR}{ck=Ndjs8mQu2bn&` zb`MY}GcvzX7y!nUOx=QS3y&s^gQikM!aqHlFmZE+h@+*-)j5%nM1?|wJuRb~_~eu6 z^Ucz_kadmLGjcMX^`{odf_oC>{Kg5RN6K>FouRSv%nN7Evtjb$$ahwNOYq~82Y19g z7}R~=>zt{Ljju?zhqg#svj6gr9;s={tIPN{G;c;%)!x!V(&ppJjY_Oep~9GC8OOxA z$77SM>}hMi+=Vn9k~$0>Q9?&2>1s@wTi#$E2dxLb<{E}QP$>(Z4qYfxbnrzdI_ltSTE<4)xl5x1M zMX+NQwMx~&*R3kH8ef<9e5xiJ_g6G{zi!ROPp*s%3omfyTquA)juj4)k~DYOOD?6z zK+&as()0b0{y3ko2aM*VL2#GXKHC`}D(vfHB)2yu9 z{C)tT8LvV6!%EKEg**Zj_wLu^rKQ;9rp>~2pZZjJ%=X8$8nBJ_KDyeGC*dt);U}-S z88z?wXxo{$IWCY^A&MbD-3s&%1$I{#%Fq<=AC}{l_wl+76Q}MezqC{h^fs4e`cGwP z{be?alWK|Ln8g+?a`fbRh#c6W>UuH}tFkM|ye2esTG+npW3OkAUB9mJs~!eiJQFez zCGFn-HAY&&AkcS9Sf{!13wF<7P8a1*+JDOZ_%84;r2XE+8=O{SVtPJvUag6STL|4; zY_U^d{|MExJ$oqJyx%S2@#&lLQ9DhydTTY)plJHYrH$*7?pVWs%EX?7p-ehkvDj<< zs^xdh^3~d@ZF*gXlUN*GdwWU$<)FbM9f9m4%3uAcl;W7g5*-O?lI;KYBc%UYLP$sn zO|qF!T}co4H1$qM0{%1Wo&~;rI>G_*J%Kbx_6cAh(I*%KDLercSndhz{|(JxqbG$z z&fp1r<^D+?k<)lm=;igFAV?na1lRIvPoSj05BO_5|DV>H5U(Ic^OyA36Hxwn$G@8H ze=QY>gd+vYrvmtjoa9Q~oK0!Q+p ztziey_%~KapoH^~-6Olj`#X#I<{`Ah^5J1%LoWOI--a~qzP}H#<`aFW&*ggt_9ve4 zz5meIDkvc#S4(f*xq|8T1Z3MWOZwOh8?`GX$>hZN>Ne;LP8aLBBQ;- z-LE_eTYdFXC*5&1xu0@L?w2&bK?((zRD5rAUo@I8-D!pD%?yGa>E_c65p+%$Bkf!H zfg|9N>!Z2ggFLbisI%-aXVLH~u&mHZc5k0A$#eZS<`_QLE6y*aE1yUf^iSfWmpiK{ zExV5f`;IuSg7LkNeXp<3F_*PZVkkFP6W4#yaa|Ertmeorz+?=Lc}T#lY3(&@NX~on z9>0=pEA}fjD4IuqpOy2KYg@&N$NJFnVBfOyRp4y|b+A#VOyw0Dev`jExJQQTDnA~a zeDjs>{CCylxd-RHMaX?j_;3AVE6#hfZl&AAg}bkuAq8tZ*CUHE`X^w{CBu{7y0{sDzv4TCvjjS%&FjeXTC5o}r(~=t3SG9loz_!#H z=;3GO*mvIl4-Ny)oQCEp3w@-a8elE=;9&3*shthV_kd94kEv)L--&Kpfs8CpH zjd~}VtF~2VEY+RFBiO#gicang4b!twnn(7_*$4Dzljs<-gW=G3Dx+GN?gTo{BeFv@ zhXYigqgM20^V3*{FtIsIE=6`CR<l6---dmNIQT+SBtj2!6bLzN_#NbJ~>85 zt0Sow94RA8zcKoT`noe?afp9Su;f&^Y?y@@g7zmvcSMnbhKyBmB`toXp$o1oYELvv z1$jwwdiysg2nX`hjVe~o?Bx}!wGpbP`N{!9nKMz~t1;vDa<7?_B(LoT?I|AgIBE^2 zI{jYS{td(eBgi#H!Z(4w!Va6w=N6F9>d+@CvVZ2Xr}F*9=ZZs>@79c-SaGg5DP+S+ z94jSoAcGL&Po(+}6I4t$K+aY|2J=ROXgyIoN(N}qA|>{EzhbJBVIw;9C-<1$Dq0`$mJwb5@@GcVbo<_F+Q zH3Xl2l!v?CXSUXEb0;B-nm*g5m;aztc4)dsDJWWOf3}2nIhNrEU4K_Gq_u~&_kD9D z%eH<N4+kML%191=H=QUc4|p6}**Khhmw*h6eOWqZB` zyW$dQ*hfm&U)SojUL0rp^ojsQU12p-CwL(SWJsHxB8|`7LKgc^4z5+wQb?e@V6pn?{*Q0yVdd& zgUj>x7F@3bE3oGDlq3Nc)C@q~xP@xSB4N#-pulg_nElCEM`Jph;T~~6(2mG7 zD%&nW&iDfU1Y!I4asMgZ3hMM{qf7U2W>(f`0eiKgx?S-!KoP7rtOwZ&K4E2hG9?hpqPpGk{#{!)wF)_c+fVyPVhgg&&)=XV zWg68vDeXYSl`cI)hecS+mAJkmm3>GfAPWKGa0i z8M)D%GV8o2IkEUN-F<1&BHcI9%iFE8(XBxuZGVKBuuYenZ6<46KR^(`UCtA@P{iT; zF`!ElX}+zP_OzRZJl!D`a>ygQUikh_lQY@VyWJ|S7pNs908g4A7w$sMm|k{2(zho91z7=MJ9Y+kDe zYcb7UmMJ*r|fi8;7Z2hR3y1)=DRz`Mh^m z&5VK=ZYy&JVRhkF`!m@9kl2sm4wSi0OOw8?AAFKfF(fU;G-R zaJS2DuzQ=ZJ>%_oZHDG64Kkd028+tr8S$#_aw(sMUasnZvv1j@SAwV4JcH1LQ*p46rA*4nfNWG&659uj?a2nBKNPy{X5?_df7^SxQ^ zHV>wLtQ&|W9mQ*2u&%d#La)K3a2?S9Or2q0p2jba0TGCEZ2}9I+#nxWmBQBcCJPCt zB+1Y32)G3^6WKC^WQQ0>g2iT*Mw7Lo8ORvLZaDZ**SB?Goa}G$?G2H3lNW-yV$iW$ zLrfWoxRztCE0mb1-%kM2qH&J(EhM$dnEC^rat+uT$bE>*2kp6AMcSUc@Hg29r2A>D zT=@SGEfOg1tz82cE_GCrf)U_XdIer^-Qr1%d|3;GRdKb44%06AVRLBqBeCW)fT7h3 zZGstdkh;x$kT+V>$??4Bq{RnZ^PV>Um+g}Emb7!^RLXjgqX3~H@9C%`%5G4=J$u90 zb8r~+xORt<&zZhr~ zAzXO=ElBEv&fAIdz?$fis{HOPsqP~x-9*V-sURY5@fZ0 zTh=z^nANYfLbg)8#B!xLxFb0};!LsiwR%DA3ja2xEQ%tX44A52kXxIwhC}9l5e*tu?6VavWR&Z%|&H7)Ty18Pe%^_671%pVK4%>}NYu8$)c);9eF_eyB+y(MiYfFlE9i5~azB0$46OTAiH{GI z>Bgk>ocP(C0XYSx`aZQLVcr?0&j?vz1ICw477x3^49ltVepYD72ph)L2ntwCs8Y_u`TWOX4KmGCEDpfcc^$VhWTss$Fx*)8$G+Q{E8 zZZPzde?>V(e^ph|SIuLN9B^175m%nKhh_njr3_5U0-w1X>oMVhCaaxM?(mCC(F;|; zL0saGLek$D7d>oGAZj89G=2A^ib-^H=+v>FFx+iJ5Og>Rs|5*Doe{y+ z1S5x!6LBjr&V^Yt`ilMI`lg6#c1iZiIjM%bu}+%}DiT|m@KnT~4NMd7G~+zmO;+oL zMzyxueP1Qo6)A2yx^BIza9iDA_hY-2HwVD7Mi=237mG)T!1xvY>*U$d^8QxL_8n2e zwBz-CNDw0MYl6orQZ#?4ObYw#n#B-_i^p$9o-ze6U%mUyD6Ibaj%(5z+G@3a=(-Rb zQN_1P5`@3pF*JG332s_M_#J1HL765SA$J2?P8y-@z-Y06WCjOffY=4A(n)|4PwWhE z`s;`jPsdCa{pffp2`%uR^{f@P6-4;8N1*N75>u6y!v;nrtF!6YP_G(*m&@O>lZ-^X za0SzHuY+0d)3?ukz9w-oS9k+UE2J*=klGwDJoD|I=wSSYiK6@$AgCx+inQ6gHJo4e z^{Ov5l|s}l4%~w?Z1}RXs%;QOj>ySc`U3nEJ)`}RALmwakfwjYeHtCAwi*_832od6 z{bLlAE@~8%|8)c4mNF6#zu9>*I#2)j0Vw}(f?FKofi*-Q#@ z#MA(5RP7Wt`27VhZ4tIk>P88WcS@F&Y?+r7;z8$gYs=3wmVgZu3tW=Gz6^}5O?6D& z7!CCJpcP)$PBzeub@EvD_!w30_doB`5+GZPSbRZ7QsFKfc8pzY6mG;Kz7aL8;OL`D zG9XuMdp_c%n#oL2X{>~yO9T$TVbNSoG@FwLn?4`2X-*8849z#&z5AIX>@~zC)blCU zGl~<~0Z4OF#|`giWO*l-T9~Re6mFYNC{*TR`^M`{waj*VLzglj0_{;6VScX$dspRZ z+r4@!=Uc_`AnC_-o|5Z~i2&S;@wt`ls~~^s&@=p=Pu29gjI#GN<)Ps2ME9q>o$bVl z9Xn{FPsy;i!aK6p=I>ex0CLSS%yd;^qzKBva{X`^=Z50z9d3m}oKSMxXwV%4C0^k6 z<^Aht3k?oS_%&L5OP0lccU4vQ*XJw#vAi{7<2~0yo#!iS)cKm)ziFPn3R$E0-wMdxiY_Bzs(@ZpD#FI6O;O z-|36>%gFcNvxdW8G}IE`Mwb9kowI_Lyp05U1iP{`yaG;GZ^Ra`1Y8X+`Nc;`Q>H;T zoLh;|-qrhi*`G5x4Gfu?YoP)aLJ^X|z_-ALpkBQndjtdpya+k4gCRM&IreBf;*eA; zKnak67iR)v(z-Yfliw8Idt{PQpJZ=P&aB|CyPC9_%CqLwvOi|bF5X*#1_YKq7llz0 z#(2J?pL_fkqxs#{jtdX`zsy*9RZ>IZ;9)V!%kfwhXh_ql&`_q(Qu zV`u7slt&Xv>TH7Gl26zy9~4wml<9J=v^*Orre7)BBoq0T(O|iPEcq+*C-bFo!1#|l z1FhyeBr2ne<4Lp5_GS&H5yXbrNRb4H-8$w34n=;-dWyPLH%<1kX8zMGfT@eu{7Vap z0^(52=<48906~knqeDCd4T?#g<$Zz*t!>0yU$=jvtPu!yho}a0mG?K9B4kOuG zX~|)c-fa@FLD>|x1&@sd|AYmx+|O-04b@=`PU#kbmx=pWtT0o{8=VuFp*L}VW@l2J zA*D;eUdNEYBR|sY$cQ7tP24)mRTrp|g&;uo{aB@c(}+M!p^!iDuEOMA5+BDK9Y~zq zLH?_ig7Qf6ZNLb{O8{E)FR?ti7v%bOr;D&w)>9X72FDxAcifz>4M92rxX-+cX0%If zHXC2gX`<5ha()1sjr#ySAZ^GGT;+RVO+T&PM0dDs0m_N~eqN#gTvxT&A1S~pQx zqc4yDG94cIzPs!5e%jDg0ofL&)QEQ;?Fv#}kkm|qX5_9U&{q|)A@!eBTURRCRX_QL zhZ02xt}UePEL4^yvSp6qAcB}pGHW)kPMpJ(iMFlZ^il9~7H#=CU8K^G7!NYz$uCZ3 zB*wzdhaz5RFM2t>IZi3IE>4mEQZSi0-~9bkQ3lRTd&^7KZoZcp?O7SVHG>X8ND5| zCu`WkrYEkY#Q4SsEY*bXukUUyNh8KZAfAl}PoPu2=ujaGGM+-uN$yUN_Eh`wUXSKS zo6}hVcNCkQ>a&SO^46uFyS{=Mu{?V55`zwcNrD!w9w9^(Mz6&1kbJB;q^5O;0*kv| zU@nig)@|b@RRkZqWK;4RRDpK;PXK*0XTzfR!8Oy(g6GJ7iO@%Q_6Sv3?gg>+MwO$yGudX|!BO)SuYI@f0rhE$%TT+3EHG;)L6_AR!FJ|hlUg5; zBWSJ$4fg%yU{R{nuvLbuDe(9AlMtUt%QUGUJ5n6iQ(T$edwc1%zcaZWwG1`R>4w8e zpGB}G?9EJVeCG{WbZqtL+&JfVW$|b~QMwN@kU{#b-24t7`1(hV(psCG^&g?2= (3, 13);"); + return (boolean) interpreter.get("tooNew"); + } + @Test public void testAQuery() { String result = ""; try { - logger.info("Test A query"); + LOGGER.info("Test A query"); result = runPythonScript("query"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -174,9 +180,9 @@ public void testDownSampleQuery() { String result = ""; try { // 设置Python脚本路径 - logger.info("Test downsample query"); + LOGGER.info("Test downsample query"); result = runPythonScript("downsampleQuery"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -198,9 +204,9 @@ public void testDownSampleQueryNoInterval() { String result = ""; try { // 设置Python脚本路径 - logger.info("Test downsample query without time interval"); + LOGGER.info("Test downsample query without time interval"); result = runPythonScript("downsampleQueryNoInterval"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -224,9 +230,9 @@ public void testDownSampleQueryNoInterval() { public void testShowColumnsQuery() { String result = ""; try { - logger.info("Test show columns query"); + LOGGER.info("Test show columns query"); result = runPythonScript("showColumns"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -247,9 +253,9 @@ public void testShowColumnsQuery() { public void testAggregateQuery() { String result = ""; try { - logger.info("Test aggregate query"); + LOGGER.info("Test aggregate query"); result = runPythonScript("aggregateQuery"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -268,9 +274,9 @@ public void testAggregateQuery() { public void testLastQuery() { String result = ""; try { - logger.info("Test last query"); + LOGGER.info("Test last query"); result = runPythonScript("lastQuery"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -292,9 +298,9 @@ public void testDeleteColumn() { } String result = ""; try { - logger.info("Test delete column query"); + LOGGER.info("Test delete column query"); result = runPythonScript("deleteColumn"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -313,10 +319,13 @@ public void testDeleteColumn() { @Test public void testAddStorageEngine() { String output = ""; + // if python >=3.13, fastparquet is not supported(for now). + Assume.assumeFalse( + "Test skipped: Python >= 3.13, fastparquet is not supported.", pythonNewerThan313()); try { - logger.info("add storage engine"); + LOGGER.info("add storage engine"); output = runPythonScript("addStorageEngine"); - logger.info(output); + LOGGER.info(output); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -346,9 +355,9 @@ public void testAddStorageEngine() { public void testInsert() { String result = ""; try { - logger.info("insert data"); + LOGGER.info("insert data"); result = runPythonScript("insert"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -401,9 +410,9 @@ public void testInsert() { public void testInsertDF() { String result = ""; try { - logger.info("insert dataframe"); + LOGGER.info("insert dataframe"); result = runPythonScript("insertDF"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -432,9 +441,9 @@ public void testDeleteRow() { } String result = ""; try { - logger.info("delete row"); + LOGGER.info("delete row"); result = runPythonScript("deleteRow"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -461,9 +470,9 @@ public void testDeleteRow() { public void testDebugInfo() { String result = ""; try { - logger.info("get debug info"); + LOGGER.info("get debug info"); result = runPythonScript("getDebugInfo"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -479,9 +488,9 @@ public void testLoadCSV() { } String result = ""; try { - logger.info("load csv"); + LOGGER.info("load csv"); result = runPythonScript("loadCSV"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -505,9 +514,9 @@ public void testLoadCSV() { public void testLoadDirectory() { String result = ""; try { - logger.info("load directory"); + LOGGER.info("load directory"); result = runPythonScript("loadDirectory"); - logger.info(result); + LOGGER.info(result); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -516,7 +525,7 @@ public void testLoadDirectory() { List expected = Arrays.asList(" key dir.a dir.b", "0 0 b'1' b'4'"); String[] lines = result.split("\n"); List resultLines = Arrays.asList(lines); - logger.info(resultLines.toString()); + LOGGER.info(resultLines.toString()); assertTrue(resultLines.size() >= 2); assertEquals(expected, resultLines.subList(resultLines.size() - 2, resultLines.size())); } @@ -525,9 +534,9 @@ public void testLoadDirectory() { public void testExport() { List result = new ArrayList<>(); try { - logger.info("export to file"); + LOGGER.info("export to file"); String tmp = runPythonScript("exportToFile"); - logger.info(tmp); + LOGGER.info(tmp); } catch (IOException | InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); @@ -582,7 +591,7 @@ public void clearData() { return; } try { - logger.info("Clear all data after executing pysession tests."); + LOGGER.info("Clear all data after executing pysession tests."); String output = runPythonScript("deleteAll"); } catch (IOException | InterruptedException e) { e.printStackTrace(); diff --git a/test/src/test/java/cn/edu/tsinghua/iginx/integration/func/udf/UDFIT.java b/test/src/test/java/cn/edu/tsinghua/iginx/integration/func/udf/UDFIT.java index 42144722e7..b47a90084a 100644 --- a/test/src/test/java/cn/edu/tsinghua/iginx/integration/func/udf/UDFIT.java +++ b/test/src/test/java/cn/edu/tsinghua/iginx/integration/func/udf/UDFIT.java @@ -42,6 +42,7 @@ import org.apache.commons.lang3.RandomStringUtils; import org.junit.After; import org.junit.AfterClass; +import org.junit.Assume; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -219,6 +220,11 @@ public void testDropTask() { taskToBeRemoved.add(udfName); tool.execute(String.format(DROP_SQL, udfName)); + try { + Thread.sleep(1000); // needed in some tests(redis no+no yes+no) + } catch (InterruptedException e) { + LOGGER.error("Thread sleep error.", e); + } // dropped udf cannot be queried assertFalse(tool.isUDFRegistered(udfName)); taskToBeRemoved.clear(); @@ -1411,6 +1417,9 @@ public void testModuleInstallFail() { @Test public void tensorUDFTest() { + boolean torchSupported = System.getenv().getOrDefault("TORCH_SUPPORTED", "true").equals("true"); + Assume.assumeTrue( + "tensorUDFTest is skipped because pytorch is not supported(python>3.12).", torchSupported); String name = "tensorTest"; String filePath = String.join( diff --git a/test/src/test/resources/testConfig.properties b/test/src/test/resources/testConfig.properties index 3482757e76..2905303da9 100644 --- a/test/src/test/resources/testConfig.properties +++ b/test/src/test/resources/testConfig.properties @@ -26,7 +26,7 @@ MySQL-config=isSupportDiffTypeHistoryData=true,isSupportKey=false,isAbleToClearD # DataSources Test Config IoTDB12_mock=127.0.0.1#6667#IoTDB12#username=root#password=root#sessionPoolSize=20#has_data=false#is_read_only=false InfluxDB_mock=127.0.0.1#8086#InfluxDB#url=http://localhost:8086/#token=testToken#organization=testOrg#has_data=false -FileSystem_mock=127.0.0.1#6668#filesystem#iginx_port=6888#has_data=false#is_read_only=false#dir=data#data.config.write.buffer.size=104857600#data.config.write.buffer.timeout=0 +FileSystem_mock=127.0.0.1#6668#filesystem#iginx_port=0#has_data=false#is_read_only=false#dir=data#data.config.write.buffer.size=104857600#data.config.write.buffer.timeout=0 PostgreSQL_mock=127.0.0.1#5432#relational#engine=postgresql#username=postgres#password=postgres#has_data=false MongoDB_mock=127.0.0.1#27017#MongoDB#has_data=false Redis_mock=127.0.0.1#6379#Redis#has_data=false#is_read_only=false#timeout=5000 From 5f48626b84be2c2089127f6831986d05ca18dfdc Mon Sep 17 00:00:00 2001 From: jzl18thu Date: Tue, 21 Jan 2025 15:35:39 +0800 Subject: [PATCH 4/4] feat(core): "LOAD DATA" support large csv & adjust "ADD STORAGEENGINE" extra params (#555) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit LOAD DATA语句支持导入100MB以上的大文件 ADD STORAGEENGINE以及ALTER STORAGEENGINE语句的额外参数的key和value改为用=连接 ——修正macos上IGinX启动的Java环境问题 --- .github/actions/context/action.yml | 17 ++ .github/scripts/iginx/iginx_macos.sh | 4 + .github/scripts/test/cli/test_infile.sh | 4 + .github/scripts/test/cli/test_infile_macos.sh | 2 + .../scripts/test/cli/test_infile_windows.sh | 4 + .github/scripts/test/cli/test_outfile.sh | 2 +- .../scripts/test/cli/test_outfile_macos.sh | 2 +- .../scripts/test/cli/test_outfile_windows.sh | 2 +- .../tsinghua/iginx/client/IginxClient.java | 50 +++- conf/config.properties | 2 +- .../cn/edu/tsinghua/iginx/IginxWorker.java | 36 ++- .../iginx/engine/StatementExecutor.java | 21 +- .../iginx/engine/shared/RequestContext.java | 2 +- .../tsinghua/iginx/sql/IginXSqlVisitor.java | 14 +- .../edu/tsinghua/iginx/sql/SQLConstant.java | 2 + .../cn/edu/tsinghua/iginx/sql/ParseTest.java | 2 +- .../iginx/session/SQLSessionExample.java | 2 +- jdbc/src/test/java/BatchTest.java | 2 +- .../edu/tsinghua/iginx/session/Session.java | 10 +- session_py/iginx/iginx_pyclient/session.py | 23 +- .../iginx_pyclient/thrift/rpc/IService-remote | 7 + .../iginx_pyclient/thrift/rpc/IService.py | 189 ++++++++++++ .../iginx/iginx_pyclient/thrift/rpc/ttypes.py | 270 +++++++++++++++++- .../integration/client/ImportFileIT.java | 41 +++ .../expansion/BaseCapacityExpansionIT.java | 18 +- .../FileSystemCapacityExpansionIT.java | 2 +- .../influxdb/InfluxDBCapacityExpansionIT.java | 4 +- .../iotdb/IoTDB12CapacityExpansionIT.java | 6 +- .../mysql/MySQLCapacityExpansionIT.java | 2 +- .../PostgreSQLCapacityExpansionIT.java | 4 +- thrift/src/main/thrift/rpc.thrift | 20 +- 31 files changed, 689 insertions(+), 77 deletions(-) diff --git a/.github/actions/context/action.yml b/.github/actions/context/action.yml index fb8a4bc509..5ed0e77d06 100644 --- a/.github/actions/context/action.yml +++ b/.github/actions/context/action.yml @@ -60,10 +60,27 @@ runs: "${GITHUB_WORKSPACE}/.github/scripts/test/cli/test_outfile_macos.sh" "${{ inputs.DB-name }}" ${VERSION} fi + - if: inputs.shell=='client-after' + name: Install wget and 7zip + shell: bash + run: | + if [ "$RUNNER_OS" == "Linux" ]; then + sudo apt-get install -y p7zip-full + sudo apt-get install -y wget + elif [ "$RUNNER_OS" == "Windows" ]; then + choco install 7zip -y + choco install wget -y + elif [ "$RUNNER_OS" == "macOS" ]; then + brew install p7zip + brew install wget + fi + - if: inputs.shell=='client-after' name: Pre Test Client Import File shell: bash run: | + wget https://github.com/IGinX-THU/IGinX-resources/raw/main/resources/bigcsv.7z + 7z x bigcsv.7z if [ "$RUNNER_OS" == "Linux" ]; then chmod +x "${GITHUB_WORKSPACE}/.github/scripts/test/cli/test_infile.sh" "${GITHUB_WORKSPACE}/.github/scripts/test/cli/test_infile.sh" ${VERSION} diff --git a/.github/scripts/iginx/iginx_macos.sh b/.github/scripts/iginx/iginx_macos.sh index ec90ad61ad..d2c83ceaef 100644 --- a/.github/scripts/iginx/iginx_macos.sh +++ b/.github/scripts/iginx/iginx_macos.sh @@ -26,6 +26,10 @@ sed -i "" -E "s/#iginx_port=[0-9]+#/#iginx_port=$1#/g" core/target/iginx-core-*/ sed -i "" -E "s/restPort=[0-9]+/restPort=$2/g" core/target/iginx-core-*/conf/config.properties +export JAVA_HOME=$JAVA_HOME_8_ARM64 + +echo "JAVA_HOME is set to $JAVA_HOME" + sh -c "chmod +x core/target/iginx-core-*/sbin/start_iginx.sh" sh -c "nohup core/target/iginx-core-*/sbin/start_iginx.sh > iginx-$1.log 2>&1 &" diff --git a/.github/scripts/test/cli/test_infile.sh b/.github/scripts/test/cli/test_infile.sh index 784cc3299d..c28186ee3c 100644 --- a/.github/scripts/test/cli/test_infile.sh +++ b/.github/scripts/test/cli/test_infile.sh @@ -35,3 +35,7 @@ bash -c "sed -i '1ikey,d m,b,[c],a' 'test/src/test/resources/fileReadAndWrite/cs COMMAND1='LOAD DATA FROM INFILE "'"test/src/test/resources/fileReadAndWrite/csv/test1"'" AS CSV INTO t1 AT 10;' bash -c "echo '$COMMAND1' | xargs -0 -t -i ${SCRIPT_COMMAND}" + +COMMAND2='LOAD DATA FROM INFILE "'"test_bigcsv.csv"'" AS CSV INTO bigcsv;' + +bash -c "echo '$COMMAND2' | xargs -0 -t -i ${SCRIPT_COMMAND}" diff --git a/.github/scripts/test/cli/test_infile_macos.sh b/.github/scripts/test/cli/test_infile_macos.sh index 5a5122b7b6..68971dae3f 100644 --- a/.github/scripts/test/cli/test_infile_macos.sh +++ b/.github/scripts/test/cli/test_infile_macos.sh @@ -29,3 +29,5 @@ sh -c "cat test/src/test/resources/fileReadAndWrite/csv/test.csv > test/src/test sh -c "sed -i '' '1s/^/key,d m,b,[c],a\n/' test/src/test/resources/fileReadAndWrite/csv/test1" sh -c "echo 'LOAD DATA FROM INFILE "'"test/src/test/resources/fileReadAndWrite/csv/test1"'" AS CSV INTO t1 AT 10;' | xargs -0 -t -I F sh client/target/iginx-client-$1/sbin/start_cli.sh -e 'F'" + +sh -c "echo 'LOAD DATA FROM INFILE "'"test_bigcsv.csv"'" AS CSV INTO bigcsv;' | xargs -0 -t -I F sh client/target/iginx-client-$1/sbin/start_cli.sh -e 'F'" diff --git a/.github/scripts/test/cli/test_infile_windows.sh b/.github/scripts/test/cli/test_infile_windows.sh index ccf7d77125..d1ef96bccf 100644 --- a/.github/scripts/test/cli/test_infile_windows.sh +++ b/.github/scripts/test/cli/test_infile_windows.sh @@ -31,3 +31,7 @@ sed -i "1ikey,d m,b,[c],a" "test/src/test/resources/fileReadAndWrite/csv/test1" COMMAND1='LOAD DATA FROM INFILE "'"test/src/test/resources/fileReadAndWrite/csv/test1"'" AS CSV INTO t1 at 10;' bash -c "client/target/iginx-client-$1/sbin/start_cli.bat -e '$COMMAND1'" + +COMMAND2='LOAD DATA FROM INFILE "'"test_bigcsv.csv"'" AS CSV INTO bigcsv;' + +bash -c "client/target/iginx-client-$1/sbin/start_cli.bat -e '$COMMAND2'" diff --git a/.github/scripts/test/cli/test_outfile.sh b/.github/scripts/test/cli/test_outfile.sh index 6cce23937b..2c31a829dd 100644 --- a/.github/scripts/test/cli/test_outfile.sh +++ b/.github/scripts/test/cli/test_outfile.sh @@ -53,7 +53,7 @@ for file in test/src/test/resources/fileReadAndWrite/byteDummy/*; do mv "$file" "${file}.ext" done -bash -c "echo 'ADD STORAGEENGINE ("'"127.0.0.1"'", 6670, "'"filesystem"'", "'"dummy_dir:test/src/test/resources/fileReadAndWrite/byteDummy,iginx_port:6888,has_data:true,is_read_only:true"'");show columns byteDummy.*;' | ${SCRIPT_COMMAND}" +bash -c "echo 'ADD STORAGEENGINE ("'"127.0.0.1"'", 6670, "'"filesystem"'", "'"dummy_dir=test/src/test/resources/fileReadAndWrite/byteDummy,iginx_port=6888,has_data=true,is_read_only=true"'");show columns byteDummy.*;' | ${SCRIPT_COMMAND}" bash -c "echo 'select * from byteDummy into outfile "'"test/src/test/resources/fileReadAndWrite/byteStreamExport"'" as stream;' | ${SCRIPT_COMMAND}" diff --git a/.github/scripts/test/cli/test_outfile_macos.sh b/.github/scripts/test/cli/test_outfile_macos.sh index 45448ec9e0..7543caadb5 100644 --- a/.github/scripts/test/cli/test_outfile_macos.sh +++ b/.github/scripts/test/cli/test_outfile_macos.sh @@ -53,7 +53,7 @@ for file in test/src/test/resources/fileReadAndWrite/byteDummy/*; do mv "$file" "${file}.ext" done -bash -c "echo 'ADD STORAGEENGINE ("'"127.0.0.1"'", 6670, "'"filesystem"'", "'"dummy_dir:test/src/test/resources/fileReadAndWrite/byteDummy,iginx_port:6888,has_data:true,is_read_only:true"'");show columns byteDummy.*;' | ${SCRIPT_COMMAND}" +bash -c "echo 'ADD STORAGEENGINE ("'"127.0.0.1"'", 6670, "'"filesystem"'", "'"dummy_dir=test/src/test/resources/fileReadAndWrite/byteDummy,iginx_port=6888,has_data=true,is_read_only=true"'");show columns byteDummy.*;' | ${SCRIPT_COMMAND}" bash -c "echo 'select * from byteDummy into outfile "'"test/src/test/resources/fileReadAndWrite/byteStreamExport"'" as stream;' | ${SCRIPT_COMMAND}" diff --git a/.github/scripts/test/cli/test_outfile_windows.sh b/.github/scripts/test/cli/test_outfile_windows.sh index c7f949b4e7..4ef239cb73 100644 --- a/.github/scripts/test/cli/test_outfile_windows.sh +++ b/.github/scripts/test/cli/test_outfile_windows.sh @@ -57,7 +57,7 @@ for file in test/src/test/resources/fileReadAndWrite/byteDummy/*; do mv "$file" "${file}.ext" done -bash -c "client/target/iginx-client-$2/sbin/start_cli.bat -e 'ADD STORAGEENGINE ("'"127.0.0.1"'", 6670, "'"filesystem"'", "'"dummy_dir:test/src/test/resources/fileReadAndWrite/byteDummy,iginx_port:6888,has_data:true,is_read_only:true"'");show columns byteDummy.*;'" +bash -c "client/target/iginx-client-$2/sbin/start_cli.bat -e 'ADD STORAGEENGINE ("'"127.0.0.1"'", 6670, "'"filesystem"'", "'"dummy_dir=test/src/test/resources/fileReadAndWrite/byteDummy,iginx_port=6888,has_data=true,is_read_only=true"'");show columns byteDummy.*;'" bash -c "client/target/iginx-client-$2/sbin/start_cli.bat -e 'select * from byteDummy into outfile "'"test/src/test/resources/fileReadAndWrite/byteStreamExport"'" as stream;'" diff --git a/client/src/main/java/cn/edu/tsinghua/iginx/client/IginxClient.java b/client/src/main/java/cn/edu/tsinghua/iginx/client/IginxClient.java index a2787729b0..600e0192dc 100644 --- a/client/src/main/java/cn/edu/tsinghua/iginx/client/IginxClient.java +++ b/client/src/main/java/cn/edu/tsinghua/iginx/client/IginxClient.java @@ -28,6 +28,7 @@ import cn.edu.tsinghua.iginx.session.Session; import cn.edu.tsinghua.iginx.session.SessionExecuteSqlResult; import cn.edu.tsinghua.iginx.thrift.ExportCSV; +import cn.edu.tsinghua.iginx.thrift.FileChunk; import cn.edu.tsinghua.iginx.thrift.LoadUDFResp; import cn.edu.tsinghua.iginx.utils.FormatUtils; import cn.edu.tsinghua.iginx.utils.Pair; @@ -40,7 +41,6 @@ import java.util.*; import org.apache.commons.cli.*; import org.apache.commons.csv.CSVPrinter; -import org.apache.commons.io.FileUtils; import org.jline.reader.Completer; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; @@ -583,8 +583,9 @@ private static void processExportCsv(QueryDataSet res) throws SessionException, String path = exportCSV.getExportCsvPath(); if (!path.endsWith(".csv")) { - throw new InvalidParameterException( + System.out.println( "The file name must end with [.csv], " + path + " doesn't satisfy the requirement!"); + return; } File file = new File(path); @@ -592,7 +593,8 @@ private static void processExportCsv(QueryDataSet res) throws SessionException, Files.deleteIfExists(Paths.get(file.getPath())); Files.createFile(Paths.get(file.getPath())); if (!file.isFile()) { - throw new InvalidParameterException(path + " is not a file!"); + System.out.println(path + " is not a file!"); + return; } try { @@ -610,14 +612,17 @@ private static void processExportCsv(QueryDataSet res) throws SessionException, printer.flush(); printer.close(); } catch (IOException e) { - throw new RuntimeException( + System.out.println( "Encounter an error when writing csv file " + path + ", because " + e.getMessage()); + return; } res.close(); System.out.println("Successfully write csv file: \"" + file.getAbsolutePath() + "\"."); } - private static void processLoadCsv(String sql) throws SessionException, IOException { + private static void processLoadCsv(String sql) throws SessionException { + int CHUNK_SIZE = 1024 * 1024; + SessionExecuteSqlResult res = session.executeSql(sql); String path = res.getLoadCsvPath(); @@ -629,15 +634,40 @@ private static void processLoadCsv(String sql) throws SessionException, IOExcept File file = new File(path); if (!file.exists()) { - throw new InvalidParameterException(path + " does not exist!"); + System.out.println(path + " does not exist!"); + return; } if (!file.isFile()) { - throw new InvalidParameterException(path + " is not a file!"); + System.out.println(path + " is not a file!"); + return; + } + + long offset = 0; + String fileName = System.currentTimeMillis() + ".csv"; + try (RandomAccessFile raf = new RandomAccessFile(file, "r")) { + raf.seek(offset); + byte[] buffer = new byte[CHUNK_SIZE]; + int bytesRead; + while ((bytesRead = raf.read(buffer)) != -1) { + byte[] dataToSend; + if (bytesRead < CHUNK_SIZE) { // 如果最后一块小于 CHUNK_SIZE,只发送实际读取的部分 + dataToSend = new byte[bytesRead]; + System.arraycopy(buffer, 0, dataToSend, 0, bytesRead); + } else { + dataToSend = buffer; + } + ByteBuffer data = ByteBuffer.wrap(dataToSend); + FileChunk chunk = new FileChunk(fileName, offset, data, bytesRead); + session.uploadFileChunk(chunk); + offset += bytesRead; + } + } catch (IOException e) { + System.out.println( + "Encounter an error when reading file " + path + ", because " + e.getMessage()); + return; } - byte[] bytes = FileUtils.readFileToByteArray(file); - ByteBuffer csvFile = ByteBuffer.wrap(bytes); - Pair, Long> pair = session.executeLoadCSV(sql, csvFile); + Pair, Long> pair = session.executeLoadCSV(sql, fileName); List columns = pair.k; long recordsNum = pair.v; diff --git a/conf/config.properties b/conf/config.properties index 61618098bc..05a145f5c6 100644 --- a/conf/config.properties +++ b/conf/config.properties @@ -44,7 +44,7 @@ storageEngineList=127.0.0.1#6668#filesystem#iginx_port=6888#has_data=false#is_re #storageEngineList=127.0.0.1#5432#relational#engine=postgresql#username=postgres#password=postgres#has_data=false #storageEngineList=127.0.0.1#3306#relational#engine=mysql#username=root#has_data=false #storageEngineList=127.0.0.1#27017#mongodb#uri="mongodb://127.0.0.1:27017/?maxPoolSize=200&maxIdleTimeMS=60000&waitQueueTimeoutMS=50000"#has_data=false#schema.sample.size=1000#dummy.sample.size=0 -#storageEngineList=127.0.0.1#6379#redis#has_data=false#is_read_only=false#timeout=10000#data_db=1#dummy_db=0 +#storageEngineList=127.0.0.1#6379#redis#has_data=false#is_read_only=false#timeout=20000#data_db=1#dummy_db=0 # 底层数据库类名 databaseClassNames=iotdb12=cn.edu.tsinghua.iginx.iotdb.IoTDBStorage,influxdb=cn.edu.tsinghua.iginx.influxdb.InfluxDBStorage,filesystem=cn.edu.tsinghua.iginx.filesystem.FileSystemStorage,relational=cn.edu.tsinghua.iginx.relational.RelationalStorage,mongodb=cn.edu.tsinghua.iginx.mongodb.MongoDBStorage,redis=cn.edu.tsinghua.iginx.redis.RedisStorage diff --git a/core/src/main/java/cn/edu/tsinghua/iginx/IginxWorker.java b/core/src/main/java/cn/edu/tsinghua/iginx/IginxWorker.java index 5137a037e1..c5e116482b 100644 --- a/core/src/main/java/cn/edu/tsinghua/iginx/IginxWorker.java +++ b/core/src/main/java/cn/edu/tsinghua/iginx/IginxWorker.java @@ -53,6 +53,7 @@ import cn.edu.tsinghua.iginx.utils.*; import java.io.File; import java.io.IOException; +import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.file.*; import java.util.*; @@ -843,7 +844,7 @@ public FetchResultsResp fetchResults(FetchResultsReq req) { public LoadCSVResp loadCSV(LoadCSVReq req) { StatementExecutor executor = StatementExecutor.getInstance(); RequestContext ctx = contextBuilder.build(req); - ctx.setLoadCSVFileByteBuffer(req.csvFile); + ctx.setLoadCSVFileName(req.csvFileName); executor.execute(ctx); return ctx.getResult().getLoadCSVResp(); } @@ -1350,4 +1351,37 @@ private IRuleCollection getRuleCollection() // 强制转换为接口类型 return (IRuleCollection) enumInstance; } + + @Override + public UploadFileResp uploadFileChunk(UploadFileReq req) { + FileChunk chunk = req.getFileChunk(); + Status status = new Status(); + + String filename = chunk.fileName; + if (filename.contains("..") || filename.contains("/") || filename.contains("\\")) { + status.setCode(RpcUtils.FAILURE.code); + status.setMessage("Invalid filename"); + return new UploadFileResp(status); + } + + String filepath = String.join(File.separator, System.getProperty("java.io.tmpdir"), filename); + try { + File file = new File(filepath); + try (RandomAccessFile raf = new RandomAccessFile(file, "rw")) { + raf.seek(chunk.offset); + raf.write(chunk.data.array()); + LOGGER.debug( + "write {} bytes to file {} at offset {}", + chunk.data.array().length, + file, + chunk.offset); + } + status.setCode(RpcUtils.SUCCESS.code); + return new UploadFileResp(status); + } catch (IOException e) { + status.setCode(RpcUtils.FAILURE.code); + status.setMessage("File chunk upload failed. Caused by: " + e.getMessage()); + return new UploadFileResp(status); + } + } } diff --git a/core/src/main/java/cn/edu/tsinghua/iginx/engine/StatementExecutor.java b/core/src/main/java/cn/edu/tsinghua/iginx/engine/StatementExecutor.java index 7c70c0a786..050d43a79c 100644 --- a/core/src/main/java/cn/edu/tsinghua/iginx/engine/StatementExecutor.java +++ b/core/src/main/java/cn/edu/tsinghua/iginx/engine/StatementExecutor.java @@ -69,7 +69,6 @@ import cn.edu.tsinghua.iginx.utils.*; import cn.hutool.core.io.CharsetDetector; import java.io.File; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.lang.reflect.InvocationTargetException; @@ -452,7 +451,7 @@ private void processInsertFromFile(RequestContext ctx) if (Objects.requireNonNull(importFile.getType()) == FileType.CSV) { ImportCsv importCsv = (ImportCsv) importFile; - if (ctx.getLoadCSVFileByteBuffer() == null) { + if (ctx.getLoadCSVFileName() == null || ctx.getLoadCSVFileName().isEmpty()) { ctx.setResult(new Result(RpcUtils.SUCCESS)); ctx.getResult().setLoadCSVPath(importCsv.getFilepath()); } else { @@ -476,20 +475,11 @@ private void loadValuesSpecFromCsv( String keyCol) throws IOException { final int BATCH_SIZE = config.getBatchSizeImportCsv(); - File tmpCSV = File.createTempFile("temp", ".csv"); - - try (FileOutputStream fos = new FileOutputStream(tmpCSV)) { - fos.write(ctx.getLoadCSVFileByteBuffer().array()); - fos.flush(); - } catch (IOException e) { - throw new RuntimeException( - "Encounter an error when writing file " - + tmpCSV.getCanonicalPath() - + ", because " - + e.getMessage()); - } - + String filepath = + String.join(File.separator, System.getProperty("java.io.tmpdir"), ctx.getLoadCSVFileName()); + File tmpCSV = new File(filepath); long count = 0; + LOGGER.info("Begin to load data from csv file: {}", tmpCSV.getCanonicalPath()); try { CSVParser parser = importCsv @@ -662,6 +652,7 @@ private void loadValuesSpecFromCsv( insertStatement.setBitmaps(bitmaps); // do the actual insert + LOGGER.info("Inserting {} rows, {} rows completed", recordsSize, count); RequestContext subInsertContext = new RequestContext(ctx.getSessionId(), insertStatement); process(subInsertContext); diff --git a/core/src/main/java/cn/edu/tsinghua/iginx/engine/shared/RequestContext.java b/core/src/main/java/cn/edu/tsinghua/iginx/engine/shared/RequestContext.java index f4d90c3b66..2a20d7df3a 100644 --- a/core/src/main/java/cn/edu/tsinghua/iginx/engine/shared/RequestContext.java +++ b/core/src/main/java/cn/edu/tsinghua/iginx/engine/shared/RequestContext.java @@ -58,7 +58,7 @@ public class RequestContext { private PhysicalTask physicalTree; - private ByteBuffer loadCSVFileByteBuffer; + private String loadCSVFileName; private ByteBuffer UDFModuleByteBuffer; diff --git a/core/src/main/java/cn/edu/tsinghua/iginx/sql/IginXSqlVisitor.java b/core/src/main/java/cn/edu/tsinghua/iginx/sql/IginXSqlVisitor.java index ae48feae2f..48ebe53709 100644 --- a/core/src/main/java/cn/edu/tsinghua/iginx/sql/IginXSqlVisitor.java +++ b/core/src/main/java/cn/edu/tsinghua/iginx/sql/IginXSqlVisitor.java @@ -2063,20 +2063,20 @@ private SelectStatement buildSubStatement( private Map parseExtra(StringLiteralContext ctx) { Map map = new HashMap<>(); String extra = ctx.getText().trim(); - if (extra.length() == 0 || extra.equals(SQLConstant.DOUBLE_QUOTES)) { + if (extra.isEmpty() + || extra.equals(SQLConstant.DOUBLE_QUOTES) + || extra.equals(SQLConstant.SINGLE_QUOTES)) { return map; } extra = extra.substring(1, extra.length() - 1); String[] kvStr = extra.split(SQLConstant.COMMA); for (String kv : kvStr) { - String[] kvArray = kv.split(SQLConstant.COLON); + String[] kvArray = kv.split(SQLConstant.EQUAL); if (kvArray.length != 2) { - if (kv.contains("url")) { - map.put("url", kv.substring(kv.indexOf(":") + 1)); - } else if (kv.contains("dir")) { + if (kv.contains("dir")) { // for windows absolute path - String dirType = kv.substring(0, kv.indexOf(":")).trim(); - String dirPath = kv.substring(kv.indexOf(":") + 1).trim(); + String dirType = kv.substring(0, kv.indexOf(SQLConstant.EQUAL)).trim(); + String dirPath = kv.substring(kv.indexOf(SQLConstant.EQUAL) + 1).trim(); map.put(dirType, dirPath); } continue; diff --git a/core/src/main/java/cn/edu/tsinghua/iginx/sql/SQLConstant.java b/core/src/main/java/cn/edu/tsinghua/iginx/sql/SQLConstant.java index 5c3308165c..1e8f53ec21 100644 --- a/core/src/main/java/cn/edu/tsinghua/iginx/sql/SQLConstant.java +++ b/core/src/main/java/cn/edu/tsinghua/iginx/sql/SQLConstant.java @@ -25,11 +25,13 @@ public class SQLConstant { public static final String PLUS = "+"; public static final String COMMA = ","; public static final String COLON = ":"; + public static final String EQUAL = "="; public static final String LBRACE = "{"; public static final String RBRACE = "}"; public static final String L_PARENTHESES = "("; public static final String R_PARENTHESES = ")"; public static final String QUOTE = "\""; + public static final String SINGLE_QUOTES = "''"; public static final String DOUBLE_QUOTES = "\"\""; public static final String KEY = "key"; diff --git a/core/src/test/java/cn/edu/tsinghua/iginx/sql/ParseTest.java b/core/src/test/java/cn/edu/tsinghua/iginx/sql/ParseTest.java index 8ca746b591..725b60876b 100644 --- a/core/src/test/java/cn/edu/tsinghua/iginx/sql/ParseTest.java +++ b/core/src/test/java/cn/edu/tsinghua/iginx/sql/ParseTest.java @@ -253,7 +253,7 @@ public void testParseShowReplication() { @Test public void testParseAddStorageEngine() { String addStorageEngineStr = - "ADD STORAGEENGINE (\"127.0.0.1\", 6667, \"iotdb12\", \"username: root, password: root\"), ('127.0.0.1', 6668, 'influxdb', 'key1: val1, key2: val2');"; + "ADD STORAGEENGINE (\"127.0.0.1\", 6667, \"iotdb12\", \"username=root, password=root\"), ('127.0.0.1', 6668, 'influxdb', 'key1=val1, key2=val2');"; AddStorageEngineStatement statement = (AddStorageEngineStatement) TestUtils.buildStatement(addStorageEngineStr); diff --git a/example/src/main/java/cn/edu/tsinghua/iginx/session/SQLSessionExample.java b/example/src/main/java/cn/edu/tsinghua/iginx/session/SQLSessionExample.java index eb3c1c1ce4..9e1cc1b989 100644 --- a/example/src/main/java/cn/edu/tsinghua/iginx/session/SQLSessionExample.java +++ b/example/src/main/java/cn/edu/tsinghua/iginx/session/SQLSessionExample.java @@ -60,7 +60,7 @@ public class SQLSessionExample { private static final String deleteColumns = "DELETE COLUMNS us.d1.s2, us.d1.s4;"; private static final String addStorageEngines = - "ADD STORAGEENGINE (\"127.0.0.1\", 6667, \"iotdb12\", \"username: root, password: root\"), (\"127.0.0.1\", 6668, \"influxdb\", \"key: val\");"; + "ADD STORAGEENGINE (\"127.0.0.1\", 6667, \"iotdb12\", \"username=root, password=root\"), (\"127.0.0.1\", 6668, \"influxdb\", \"key=val\");"; private static final String countPoints = "COUNT POINTS;"; private static final String showReplication = "SHOW REPLICA NUMBER;"; diff --git a/jdbc/src/test/java/BatchTest.java b/jdbc/src/test/java/BatchTest.java index da42681494..bcb65e137e 100644 --- a/jdbc/src/test/java/BatchTest.java +++ b/jdbc/src/test/java/BatchTest.java @@ -38,7 +38,7 @@ public void testBatch() throws SQLException { "INSERT INTO test.batch (a, b, c) values (3, 3.1, \"three\");", "DELETE FROM test.batch.c WHERE c = \"two\"", "DELETE FROM test.batch.c WHERE c = \"three\"", - "ADD STORAGEENGINE (\"127.0.0.1\", 6667, IOTDB, \"{\"hello\": \"world\"}\");")); + "ADD STORAGEENGINE (\"127.0.0.1\", 6667, IOTDB, \"{\"hello\"=\"world\"}\");")); IginXStatement statement = new IginXStatement(null, null); diff --git a/session/src/main/java/cn/edu/tsinghua/iginx/session/Session.java b/session/src/main/java/cn/edu/tsinghua/iginx/session/Session.java index f26d37bb09..16fb45ca78 100644 --- a/session/src/main/java/cn/edu/tsinghua/iginx/session/Session.java +++ b/session/src/main/java/cn/edu/tsinghua/iginx/session/Session.java @@ -1059,9 +1059,15 @@ Pair fetchResult(long queryId, int fetchSize) throws Se return new Pair<>(ref.resp.getQueryDataSet(), ref.resp.isHasMoreResults()); } - public Pair, Long> executeLoadCSV(String statement, ByteBuffer csvFile) + public void uploadFileChunk(FileChunk chunk) throws SessionException { + UploadFileReq req = new UploadFileReq(sessionId, chunk); + Reference ref = new Reference<>(); + executeWithCheck(() -> (ref.resp = client.uploadFileChunk(req)).status); + } + + public Pair, Long> executeLoadCSV(String statement, String fileName) throws SessionException { - LoadCSVReq req = new LoadCSVReq(sessionId, statement, csvFile); + LoadCSVReq req = new LoadCSVReq(sessionId, statement, fileName); Reference ref = new Reference<>(); executeWithCheck(() -> (ref.resp = client.loadCSV(req)).status); diff --git a/session_py/iginx/iginx_pyclient/session.py b/session_py/iginx/iginx_pyclient/session.py index 20fb9ece84..5945cfd76e 100644 --- a/session_py/iginx/iginx_pyclient/session.py +++ b/session_py/iginx/iginx_pyclient/session.py @@ -21,6 +21,7 @@ import csv import logging import os.path +import time from datetime import datetime import pandas as pd @@ -58,7 +59,7 @@ DebugInfoReq, LoadCSVReq, - StorageEngine, DataType, + StorageEngine, DataType, FileChunk, UploadFileReq, ) from .time_series import TimeSeries from .utils.bitmap import Bitmap @@ -529,11 +530,23 @@ def load_csv(self, statement): if not path.endswith(".csv"): raise ValueError(f"The file name must end with [.csv], {path} doesn't satisfy the requirement!") + chunk_size = 1024 * 1024 + filename = str(time.time() * 1000) + ".csv" with open(file, 'rb') as f: - bytes_content = f.read() # 读取文件内容为bytes - - csv_content = bytes_content - req = LoadCSVReq(sessionId=self.__session_id, statement=statement, csvFile=csv_content) + offset = 0 + while chunk := f.read(chunk_size): + chunk_data = FileChunk( + fileName=filename, + offset=offset, + data=chunk, + chunkSize=len(chunk) + ) + req = UploadFileReq(sessionId=self.__session_id, fileChunk=chunk_data) + resp = self.__client.uploadFileChunk(req) + Session.verify_status(resp.status) + offset += len(chunk) + + req = LoadCSVReq(sessionId=self.__session_id, statement=statement, csvFileName=filename) resp = self.__client.loadCSV(req) Session.verify_status(resp.status) return resp diff --git a/session_py/iginx/iginx_pyclient/thrift/rpc/IService-remote b/session_py/iginx/iginx_pyclient/thrift/rpc/IService-remote index d3df2ac87c..19f184e674 100644 --- a/session_py/iginx/iginx_pyclient/thrift/rpc/IService-remote +++ b/session_py/iginx/iginx_pyclient/thrift/rpc/IService-remote @@ -64,6 +64,7 @@ if len(sys.argv) <= 1 or sys.argv[1] == '--help': print(' ShowSessionIDResp showSessionID(ShowSessionIDReq req)') print(' ShowRulesResp showRules(ShowRulesReq req)') print(' Status setRules(SetRulesReq req)') + print(' UploadFileResp uploadFileChunk(UploadFileReq req)') print('') sys.exit(0) @@ -383,6 +384,12 @@ elif cmd == 'setRules': sys.exit(1) pp.pprint(client.setRules(eval(args[0]),)) +elif cmd == 'uploadFileChunk': + if len(args) != 1: + print('uploadFileChunk requires 1 args') + sys.exit(1) + pp.pprint(client.uploadFileChunk(eval(args[0]),)) + else: print('Unrecognized method %s' % cmd) sys.exit(1) diff --git a/session_py/iginx/iginx_pyclient/thrift/rpc/IService.py b/session_py/iginx/iginx_pyclient/thrift/rpc/IService.py index 093311745d..80800b851a 100644 --- a/session_py/iginx/iginx_pyclient/thrift/rpc/IService.py +++ b/session_py/iginx/iginx_pyclient/thrift/rpc/IService.py @@ -359,6 +359,14 @@ def setRules(self, req): """ pass + def uploadFileChunk(self, req): + """ + Parameters: + - req + + """ + pass + class Client(Iface): def __init__(self, iprot, oprot=None): @@ -1647,6 +1655,38 @@ def recv_setRules(self): return result.success raise TApplicationException(TApplicationException.MISSING_RESULT, "setRules failed: unknown result") + def uploadFileChunk(self, req): + """ + Parameters: + - req + + """ + self.send_uploadFileChunk(req) + return self.recv_uploadFileChunk() + + def send_uploadFileChunk(self, req): + self._oprot.writeMessageBegin('uploadFileChunk', TMessageType.CALL, self._seqid) + args = uploadFileChunk_args() + args.req = req + args.write(self._oprot) + self._oprot.writeMessageEnd() + self._oprot.trans.flush() + + def recv_uploadFileChunk(self): + iprot = self._iprot + (fname, mtype, rseqid) = iprot.readMessageBegin() + if mtype == TMessageType.EXCEPTION: + x = TApplicationException() + x.read(iprot) + iprot.readMessageEnd() + raise x + result = uploadFileChunk_result() + result.read(iprot) + iprot.readMessageEnd() + if result.success is not None: + return result.success + raise TApplicationException(TApplicationException.MISSING_RESULT, "uploadFileChunk failed: unknown result") + class Processor(Iface, TProcessor): def __init__(self, handler): @@ -1692,6 +1732,7 @@ def __init__(self, handler): self._processMap["showSessionID"] = Processor.process_showSessionID self._processMap["showRules"] = Processor.process_showRules self._processMap["setRules"] = Processor.process_setRules + self._processMap["uploadFileChunk"] = Processor.process_uploadFileChunk self._on_message_begin = None def on_message_begin(self, func): @@ -2634,6 +2675,29 @@ def process_setRules(self, seqid, iprot, oprot): oprot.writeMessageEnd() oprot.trans.flush() + def process_uploadFileChunk(self, seqid, iprot, oprot): + args = uploadFileChunk_args() + args.read(iprot) + iprot.readMessageEnd() + result = uploadFileChunk_result() + try: + result.success = self._handler.uploadFileChunk(args.req) + msg_type = TMessageType.REPLY + except TTransport.TTransportException: + raise + except TApplicationException as ex: + logging.exception('TApplication exception in handler') + msg_type = TMessageType.EXCEPTION + result = ex + except Exception: + logging.exception('Unexpected exception in handler') + msg_type = TMessageType.EXCEPTION + result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') + oprot.writeMessageBegin("uploadFileChunk", msg_type, seqid) + result.write(oprot) + oprot.writeMessageEnd() + oprot.trans.flush() + # HELPER FUNCTIONS AND STRUCTURES @@ -7635,5 +7699,130 @@ def __ne__(self, other): setRules_result.thrift_spec = ( (0, TType.STRUCT, 'success', [Status, None], None, ), # 0 ) + + +class uploadFileChunk_args(object): + """ + Attributes: + - req + + """ + + + def __init__(self, req=None,): + self.req = req + + def read(self, iprot): + if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: + iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 1: + if ftype == TType.STRUCT: + self.req = UploadFileReq() + self.req.read(iprot) + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot._fast_encode is not None and self.thrift_spec is not None: + oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) + return + oprot.writeStructBegin('uploadFileChunk_args') + if self.req is not None: + oprot.writeFieldBegin('req', TType.STRUCT, 1) + self.req.write(oprot) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + + def validate(self): + return + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.items()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) +all_structs.append(uploadFileChunk_args) +uploadFileChunk_args.thrift_spec = ( + None, # 0 + (1, TType.STRUCT, 'req', [UploadFileReq, None], None, ), # 1 +) + + +class uploadFileChunk_result(object): + """ + Attributes: + - success + + """ + + + def __init__(self, success=None,): + self.success = success + + def read(self, iprot): + if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: + iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 0: + if ftype == TType.STRUCT: + self.success = UploadFileResp() + self.success.read(iprot) + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot._fast_encode is not None and self.thrift_spec is not None: + oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) + return + oprot.writeStructBegin('uploadFileChunk_result') + if self.success is not None: + oprot.writeFieldBegin('success', TType.STRUCT, 0) + self.success.write(oprot) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + + def validate(self): + return + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.items()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) +all_structs.append(uploadFileChunk_result) +uploadFileChunk_result.thrift_spec = ( + (0, TType.STRUCT, 'success', [UploadFileResp, None], None, ), # 0 +) fix_spec(all_structs) del all_structs diff --git a/session_py/iginx/iginx_pyclient/thrift/rpc/ttypes.py b/session_py/iginx/iginx_pyclient/thrift/rpc/ttypes.py index 0832bbf298..ffdbfdec13 100644 --- a/session_py/iginx/iginx_pyclient/thrift/rpc/ttypes.py +++ b/session_py/iginx/iginx_pyclient/thrift/rpc/ttypes.py @@ -6297,15 +6297,15 @@ class LoadCSVReq(object): Attributes: - sessionId - statement - - csvFile + - csvFileName """ - def __init__(self, sessionId=None, statement=None, csvFile=None,): + def __init__(self, sessionId=None, statement=None, csvFileName=None,): self.sessionId = sessionId self.statement = statement - self.csvFile = csvFile + self.csvFileName = csvFileName def read(self, iprot): if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: @@ -6328,7 +6328,7 @@ def read(self, iprot): iprot.skip(ftype) elif fid == 3: if ftype == TType.STRING: - self.csvFile = iprot.readBinary() + self.csvFileName = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() else: iprot.skip(ftype) else: @@ -6349,9 +6349,9 @@ def write(self, oprot): oprot.writeFieldBegin('statement', TType.STRING, 2) oprot.writeString(self.statement.encode('utf-8') if sys.version_info[0] == 2 else self.statement) oprot.writeFieldEnd() - if self.csvFile is not None: - oprot.writeFieldBegin('csvFile', TType.STRING, 3) - oprot.writeBinary(self.csvFile) + if self.csvFileName is not None: + oprot.writeFieldBegin('csvFileName', TType.STRING, 3) + oprot.writeString(self.csvFileName.encode('utf-8') if sys.version_info[0] == 2 else self.csvFileName) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() @@ -6361,8 +6361,8 @@ def validate(self): raise TProtocolException(message='Required field sessionId is unset!') if self.statement is None: raise TProtocolException(message='Required field statement is unset!') - if self.csvFile is None: - raise TProtocolException(message='Required field csvFile is unset!') + if self.csvFileName is None: + raise TProtocolException(message='Required field csvFileName is unset!') return def __repr__(self): @@ -9464,6 +9464,237 @@ def __eq__(self, other): def __ne__(self, other): return not (self == other) + + +class FileChunk(object): + """ + Attributes: + - fileName + - offset + - data + - chunkSize + + """ + + + def __init__(self, fileName=None, offset=None, data=None, chunkSize=None,): + self.fileName = fileName + self.offset = offset + self.data = data + self.chunkSize = chunkSize + + def read(self, iprot): + if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: + iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 1: + if ftype == TType.STRING: + self.fileName = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() + else: + iprot.skip(ftype) + elif fid == 2: + if ftype == TType.I64: + self.offset = iprot.readI64() + else: + iprot.skip(ftype) + elif fid == 3: + if ftype == TType.STRING: + self.data = iprot.readBinary() + else: + iprot.skip(ftype) + elif fid == 4: + if ftype == TType.I64: + self.chunkSize = iprot.readI64() + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot._fast_encode is not None and self.thrift_spec is not None: + oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) + return + oprot.writeStructBegin('FileChunk') + if self.fileName is not None: + oprot.writeFieldBegin('fileName', TType.STRING, 1) + oprot.writeString(self.fileName.encode('utf-8') if sys.version_info[0] == 2 else self.fileName) + oprot.writeFieldEnd() + if self.offset is not None: + oprot.writeFieldBegin('offset', TType.I64, 2) + oprot.writeI64(self.offset) + oprot.writeFieldEnd() + if self.data is not None: + oprot.writeFieldBegin('data', TType.STRING, 3) + oprot.writeBinary(self.data) + oprot.writeFieldEnd() + if self.chunkSize is not None: + oprot.writeFieldBegin('chunkSize', TType.I64, 4) + oprot.writeI64(self.chunkSize) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + + def validate(self): + if self.fileName is None: + raise TProtocolException(message='Required field fileName is unset!') + if self.offset is None: + raise TProtocolException(message='Required field offset is unset!') + if self.data is None: + raise TProtocolException(message='Required field data is unset!') + if self.chunkSize is None: + raise TProtocolException(message='Required field chunkSize is unset!') + return + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.items()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) + + +class UploadFileReq(object): + """ + Attributes: + - sessionId + - fileChunk + + """ + + + def __init__(self, sessionId=None, fileChunk=None,): + self.sessionId = sessionId + self.fileChunk = fileChunk + + def read(self, iprot): + if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: + iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 1: + if ftype == TType.I64: + self.sessionId = iprot.readI64() + else: + iprot.skip(ftype) + elif fid == 2: + if ftype == TType.STRUCT: + self.fileChunk = FileChunk() + self.fileChunk.read(iprot) + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot._fast_encode is not None and self.thrift_spec is not None: + oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) + return + oprot.writeStructBegin('UploadFileReq') + if self.sessionId is not None: + oprot.writeFieldBegin('sessionId', TType.I64, 1) + oprot.writeI64(self.sessionId) + oprot.writeFieldEnd() + if self.fileChunk is not None: + oprot.writeFieldBegin('fileChunk', TType.STRUCT, 2) + self.fileChunk.write(oprot) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + + def validate(self): + if self.sessionId is None: + raise TProtocolException(message='Required field sessionId is unset!') + if self.fileChunk is None: + raise TProtocolException(message='Required field fileChunk is unset!') + return + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.items()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) + + +class UploadFileResp(object): + """ + Attributes: + - status + + """ + + + def __init__(self, status=None,): + self.status = status + + def read(self, iprot): + if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: + iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 1: + if ftype == TType.STRUCT: + self.status = Status() + self.status.read(iprot) + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot._fast_encode is not None and self.thrift_spec is not None: + oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) + return + oprot.writeStructBegin('UploadFileResp') + if self.status is not None: + oprot.writeFieldBegin('status', TType.STRUCT, 1) + self.status.write(oprot) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + + def validate(self): + if self.status is None: + raise TProtocolException(message='Required field status is unset!') + return + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.items()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) all_structs.append(Status) Status.thrift_spec = ( None, # 0 @@ -9884,7 +10115,7 @@ def __ne__(self, other): None, # 0 (1, TType.I64, 'sessionId', None, None, ), # 1 (2, TType.STRING, 'statement', 'UTF8', None, ), # 2 - (3, TType.STRING, 'csvFile', 'BINARY', None, ), # 3 + (3, TType.STRING, 'csvFileName', 'UTF8', None, ), # 3 ) all_structs.append(LoadCSVResp) LoadCSVResp.thrift_spec = ( @@ -10129,5 +10360,24 @@ def __ne__(self, other): (1, TType.I64, 'sessionId', None, None, ), # 1 (2, TType.MAP, 'rulesChange', (TType.STRING, 'UTF8', TType.BOOL, None, False), None, ), # 2 ) +all_structs.append(FileChunk) +FileChunk.thrift_spec = ( + None, # 0 + (1, TType.STRING, 'fileName', 'UTF8', None, ), # 1 + (2, TType.I64, 'offset', None, None, ), # 2 + (3, TType.STRING, 'data', 'BINARY', None, ), # 3 + (4, TType.I64, 'chunkSize', None, None, ), # 4 +) +all_structs.append(UploadFileReq) +UploadFileReq.thrift_spec = ( + None, # 0 + (1, TType.I64, 'sessionId', None, None, ), # 1 + (2, TType.STRUCT, 'fileChunk', [FileChunk, None], None, ), # 2 +) +all_structs.append(UploadFileResp) +UploadFileResp.thrift_spec = ( + None, # 0 + (1, TType.STRUCT, 'status', [Status, None], None, ), # 1 +) fix_spec(all_structs) del all_structs diff --git a/test/src/test/java/cn/edu/tsinghua/iginx/integration/client/ImportFileIT.java b/test/src/test/java/cn/edu/tsinghua/iginx/integration/client/ImportFileIT.java index 99391e3b07..dc7b8e3e83 100644 --- a/test/src/test/java/cn/edu/tsinghua/iginx/integration/client/ImportFileIT.java +++ b/test/src/test/java/cn/edu/tsinghua/iginx/integration/client/ImportFileIT.java @@ -19,6 +19,8 @@ */ package cn.edu.tsinghua.iginx.integration.client; +import static org.junit.Assert.assertTrue; + import cn.edu.tsinghua.iginx.exception.SessionException; import cn.edu.tsinghua.iginx.integration.tool.MultiConnection; import cn.edu.tsinghua.iginx.integration.tool.SQLExecutor; @@ -27,9 +29,13 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class ImportFileIT { + private static final Logger LOGGER = LoggerFactory.getLogger(ImportFileIT.class); + protected static SQLExecutor executor; @BeforeClass @@ -81,5 +87,40 @@ public void testLoadData() { + "+---+------+----+----+------+\n" + "Total line number = 5\n"; executor.executeAndCompare(query, expected); + + testLoadBigCsv(); + } + + private void testLoadBigCsv() { + String statement = "SHOW COLUMNS bigcsv.*;"; + String ret = executor.execute(statement); + if (!ret.contains("Total line number = 100")) { + LOGGER.info(ret); + assertTrue(ret.contains("Total line number = 100")); + } + + statement = "SELECT COUNT(test_c33) FROM bigcsv;"; + String expected = + "ResultSets:\n" + + "+----------------------+\n" + + "|count(bigcsv.test_c33)|\n" + + "+----------------------+\n" + + "| 120000|\n" + + "+----------------------+\n" + + "Total line number = 1\n"; + executor.executeAndCompare(statement, expected); + + statement = "SELECT test_c0, test_c99 FROM bigcsv WHERE key > 119996;"; + expected = + "ResultSets:\n" + + "+------+--------------+---------------+\n" + + "| key|bigcsv.test_c0|bigcsv.test_c99|\n" + + "+------+--------------+---------------+\n" + + "|119997| gHH3VRCeqV| JwBz3cs51P|\n" + + "|119998| 9kKtsslw5L| ja5wByfKIu|\n" + + "|119999| m9DGS5q36W| UY5geS31Nu|\n" + + "+------+--------------+---------------+\n" + + "Total line number = 3\n"; + executor.executeAndCompare(statement, expected); } } diff --git a/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/BaseCapacityExpansionIT.java b/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/BaseCapacityExpansionIT.java index 2b4c14e292..3cc34716ba 100644 --- a/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/BaseCapacityExpansionIT.java +++ b/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/BaseCapacityExpansionIT.java @@ -111,33 +111,33 @@ protected String addStorageEngine( statement.append(", \""); statement.append(type.name()); statement.append("\", \""); - statement.append("has_data:"); + statement.append("has_data="); statement.append(hasData); - statement.append(", is_read_only:"); + statement.append(", is_read_only="); statement.append(isReadOnly); if (this instanceof InfluxDBCapacityExpansionIT) { - statement.append(", url:http://localhost:"); + statement.append(", url=http://localhost:"); statement.append(port); statement.append("/"); } if (IS_EMBEDDED) { - statement.append(String.format(", dummy_dir:%s/", DBCE_PARQUET_FS_TEST_DIR)); + statement.append(String.format(", dummy_dir=%s/", DBCE_PARQUET_FS_TEST_DIR)); statement.append(PORT_TO_ROOT.get(port)); statement.append( - String.format(", dir:%s/" + IGINX_DATA_PATH_PREFIX_NAME, DBCE_PARQUET_FS_TEST_DIR)); + String.format(", dir=%s/" + IGINX_DATA_PATH_PREFIX_NAME, DBCE_PARQUET_FS_TEST_DIR)); statement.append(PORT_TO_ROOT.get(port)); - statement.append(", iginx_port:").append(oriPortIginx); + statement.append(", iginx_port=").append(oriPortIginx); } if (extraParams != null) { statement.append(", "); statement.append(extraParams); } if (dataPrefix != null) { - statement.append(", data_prefix:"); + statement.append(", data_prefix="); statement.append(dataPrefix); } if (schemaPrefix != null) { - statement.append(", schema_prefix:"); + statement.append(", schema_prefix="); statement.append(schemaPrefix); } statement.append("\");"); @@ -395,7 +395,7 @@ protected void testUpdateEngineParams() throws SessionException { String newParams = updatedParams.entrySet().stream() - .map(entry -> entry.getKey() + ":" + entry.getValue()) + .map(entry -> entry.getKey() + "=" + entry.getValue()) .collect(Collectors.joining(", ")); session.executeSql(String.format(ALTER_ENGINE_STRING, id, newParams)); diff --git a/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/filesystem/FileSystemCapacityExpansionIT.java b/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/filesystem/FileSystemCapacityExpansionIT.java index 2932f15356..48f3be98dc 100644 --- a/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/filesystem/FileSystemCapacityExpansionIT.java +++ b/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/filesystem/FileSystemCapacityExpansionIT.java @@ -54,7 +54,7 @@ private static String getAddStorageParams() { public static String getAddStorageParams(Map params) { StringJoiner joiner = new StringJoiner(","); for (Map.Entry entry : params.entrySet()) { - joiner.add(entry.getKey() + ":" + entry.getValue()); + joiner.add(entry.getKey() + "=" + entry.getValue()); } return joiner.toString(); } diff --git a/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/influxdb/InfluxDBCapacityExpansionIT.java b/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/influxdb/InfluxDBCapacityExpansionIT.java index 77c752305e..d1e826abec 100644 --- a/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/influxdb/InfluxDBCapacityExpansionIT.java +++ b/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/influxdb/InfluxDBCapacityExpansionIT.java @@ -38,7 +38,7 @@ public class InfluxDBCapacityExpansionIT extends BaseCapacityExpansionIT { public InfluxDBCapacityExpansionIT() { super( influxdb, - "username:user, password:12345678, token:testToken, organization:testOrg", + "username=user, password=12345678, token=testToken, organization=testOrg", new InfluxDBHistoryDataGenerator()); ConfLoader conf = new ConfLoader(Controller.CONFIG_FILE); DBConf dbConf = conf.loadDBConf(conf.getStorageType()); @@ -46,7 +46,7 @@ public InfluxDBCapacityExpansionIT() { Constant.expPort = dbConf.getDBCEPortMap().get(Constant.EXP_PORT_NAME); Constant.readOnlyPort = dbConf.getDBCEPortMap().get(Constant.READ_ONLY_PORT_NAME); wrongExtraParams.add( - "username:user, password:12345678, token:testToken, organization:wrongOrg"); + "username=user, password=12345678, token=testToken, organization=wrongOrg"); updatedParams.put("organization", "newOrg"); } diff --git a/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/iotdb/IoTDB12CapacityExpansionIT.java b/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/iotdb/IoTDB12CapacityExpansionIT.java index 192d42d987..24bfccd591 100644 --- a/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/iotdb/IoTDB12CapacityExpansionIT.java +++ b/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/iotdb/IoTDB12CapacityExpansionIT.java @@ -38,10 +38,10 @@ public class IoTDB12CapacityExpansionIT extends BaseCapacityExpansionIT { public IoTDB12CapacityExpansionIT() { super( iotdb12, - "username:root, password:root, sessionPoolSize:20", + "username=root, password=root, sessionPoolSize=20", new IoTDB12HistoryDataGenerator()); - wrongExtraParams.add("username:root, password:wrong, sessionPoolSize:20"); - wrongExtraParams.add("username:wrong, password:root, sessionPoolSize:20"); + wrongExtraParams.add("username=root, password=wrong, sessionPoolSize=20"); + wrongExtraParams.add("username=wrong, password=root, sessionPoolSize=20"); updatedParams.put("password", "newPassword"); } diff --git a/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/mysql/MySQLCapacityExpansionIT.java b/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/mysql/MySQLCapacityExpansionIT.java index 46ee1f42bc..385829995a 100644 --- a/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/mysql/MySQLCapacityExpansionIT.java +++ b/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/mysql/MySQLCapacityExpansionIT.java @@ -42,7 +42,7 @@ public class MySQLCapacityExpansionIT extends BaseCapacityExpansionIT { public MySQLCapacityExpansionIT() { super( StorageEngineType.relational, - "engine:mysql, username:root", + "engine=mysql, username=root", new MySQLHistoryDataGenerator()); ConfLoader conf = new ConfLoader(Controller.CONFIG_FILE); DBConf dbConf = conf.loadDBConf(conf.getStorageType()); diff --git a/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/postgresql/PostgreSQLCapacityExpansionIT.java b/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/postgresql/PostgreSQLCapacityExpansionIT.java index 51809c4892..d3c0f3a270 100644 --- a/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/postgresql/PostgreSQLCapacityExpansionIT.java +++ b/test/src/test/java/cn/edu/tsinghua/iginx/integration/expansion/postgresql/PostgreSQLCapacityExpansionIT.java @@ -44,14 +44,14 @@ public class PostgreSQLCapacityExpansionIT extends BaseCapacityExpansionIT { public PostgreSQLCapacityExpansionIT() { super( StorageEngineType.relational, - "engine:postgresql, username:postgres, password:postgres", + "engine=postgresql, username=postgres, password=postgres", new PostgreSQLHistoryDataGenerator()); ConfLoader conf = new ConfLoader(Controller.CONFIG_FILE); DBConf dbConf = conf.loadDBConf(conf.getStorageType()); Constant.oriPort = dbConf.getDBCEPortMap().get(Constant.ORI_PORT_NAME); Constant.expPort = dbConf.getDBCEPortMap().get(Constant.EXP_PORT_NAME); Constant.readOnlyPort = dbConf.getDBCEPortMap().get(Constant.READ_ONLY_PORT_NAME); - wrongExtraParams.add("username:wrong, password:postgres"); + wrongExtraParams.add("username=wrong, password=postgres"); // wrong password situation cannot be tested because trust mode is used updatedParams.put("password", "newPassword"); diff --git a/thrift/src/main/thrift/rpc.thrift b/thrift/src/main/thrift/rpc.thrift index 766e089e74..7cf9f04ccb 100644 --- a/thrift/src/main/thrift/rpc.thrift +++ b/thrift/src/main/thrift/rpc.thrift @@ -531,7 +531,7 @@ struct FetchResultsResp { struct LoadCSVReq { 1: required i64 sessionId 2: required string statement - 3: required binary csvFile + 3: required string csvFileName } struct LoadCSVResp { @@ -746,6 +746,22 @@ struct SetRulesReq { 2: required map rulesChange } +struct FileChunk { + 1: required string fileName; + 2: required i64 offset; + 3: required binary data; + 4: required i64 chunkSize; +} + +struct UploadFileReq { + 1: required i64 sessionId + 2: required FileChunk fileChunk +} + +struct UploadFileResp { + 1: required Status status +} + service IService { OpenSessionResp openSession(1: OpenSessionReq req); @@ -827,4 +843,6 @@ service IService { ShowRulesResp showRules(1: ShowRulesReq req); Status setRules(1: SetRulesReq req); + + UploadFileResp uploadFileChunk(1: UploadFileReq req); }