Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
// Modifications Copyright 2021 StarRocks Limited.
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

package com.starrocks.connector.spark.exception;

public class NotSupportedOperationException extends StarRocksException {
public NotSupportedOperationException(String msg) {
super(msg);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
// Modifications Copyright 2021 StarRocks Limited.
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

package com.starrocks.connector.spark.rest.models;

public enum PartitionType {
NONE, LIST, RANGE, EXPRESSION
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,7 @@
import com.starrocks.connector.spark.sql.conf.WriteStarRocksConfig;
import com.starrocks.connector.spark.sql.schema.StarRocksSchema;
import com.starrocks.connector.spark.sql.write.StarRocksWriteBuilder;
import org.apache.spark.sql.connector.catalog.Identifier;
import org.apache.spark.sql.connector.catalog.SupportsRead;
import org.apache.spark.sql.connector.catalog.SupportsWrite;
import org.apache.spark.sql.connector.catalog.Table;
import org.apache.spark.sql.connector.catalog.TableCapability;
import org.apache.spark.sql.connector.catalog.*;
import org.apache.spark.sql.connector.read.ScanBuilder;
import org.apache.spark.sql.connector.write.LogicalWriteInfo;
import org.apache.spark.sql.connector.write.WriteBuilder;
Expand Down Expand Up @@ -105,7 +101,8 @@ private void checkWriteParameter(WriteStarRocksConfig config) {
}

private static final Set<TableCapability> TABLE_CAPABILITY_SET = new HashSet<>(
Arrays.asList(TableCapability.BATCH_READ, TableCapability.BATCH_WRITE, TableCapability.STREAMING_WRITE));
Arrays.asList(TableCapability.BATCH_READ, TableCapability.BATCH_WRITE, TableCapability.OVERWRITE_DYNAMIC,
TableCapability.STREAMING_WRITE, TableCapability.OVERWRITE_BY_FILTER));

@Override
public ScanBuilder newScanBuilder(CaseInsensitiveStringMap options) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,26 +26,19 @@
import com.starrocks.data.load.stream.StreamLoadUtils;
import com.starrocks.data.load.stream.properties.StreamLoadProperties;
import com.starrocks.data.load.stream.properties.StreamLoadTableProperties;
import org.apache.spark.sql.types.ArrayType;
import org.apache.spark.sql.types.ByteType;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.IntegerType;
import org.apache.spark.sql.types.LongType;
import org.apache.spark.sql.types.ShortType;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.sql.sources.Filter;
import org.apache.spark.sql.types.*;
import org.apache.spark.util.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.*;
import java.util.stream.Collectors;

public class WriteStarRocksConfig extends StarRocksConfigBase {

private static final Logger LOG = LoggerFactory.getLogger(WriteStarRocksConfig.class);
private static final long serialVersionUID = 1L;

public static final String WRITE_PREFIX = PREFIX + "write.";
Expand Down Expand Up @@ -77,6 +70,8 @@ public class WriteStarRocksConfig extends StarRocksConfigBase {
private static final String KEY_NUM_PARTITIONS = WRITE_PREFIX + "num.partitions";
private static final String KEY_PARTITION_COLUMNS = WRITE_PREFIX + "partition.columns";

private static final String KEY_OVERWRITE_PARTITION_PREFIX = WRITE_PREFIX + "overwrite.partitions.";

private String labelPrefix = "spark";
private int socketTimeoutMs = -1;
private int waitForContinueTimeoutMs = 30000;
Expand Down Expand Up @@ -106,6 +101,56 @@ public class WriteStarRocksConfig extends StarRocksConfigBase {
private String[] streamLoadColumnNames;
private final Set<String> starRocksJsonColumnNames;

private boolean overwrite;
private Filter[] filters;
private String tempTableName;

// <partition_name, partition_value>
private Map<String, String> overwritePartitions;

// <temp_partition_name, partition_name>
private Map<String, String> overwriteTempPartitionMappings;
// <temp_partition_name, partition_value>
private Map<String, String> overwriteTempPartitions;

public static final String TEMPORARY_PARTITION_SUFFIX = "_created_by_sr_spark_connector_";

public Map<String, String> getOverwritePartitions() {
return overwritePartitions;
}

public Map<String, String> getOverwriteTempPartitionMappings() {
return overwriteTempPartitionMappings;
}

public Map<String, String> getOverwriteTempPartitions() {
return overwriteTempPartitions;
}

public void setTempTableName(String tempTableName) {
this.tempTableName = tempTableName;
}

public String getTempTableName() {
return tempTableName;
}

public void setOverwrite(boolean overwrite) {
this.overwrite = overwrite;
}

public void setFilters(Filter[] filters) {
this.filters = filters;
}

public Filter[] getFilters() {
return filters;
}

public boolean isOverwrite() {
return overwrite;
}

public WriteStarRocksConfig(Map<String, String> originOptions, StructType sparkSchema, StarRocksSchema starRocksSchema) {
super(originOptions);
load(sparkSchema);
Expand Down Expand Up @@ -139,6 +184,40 @@ private void load(StructType sparkSchema) {
Map.Entry::getValue
)
);
overwritePartitions = originOptions.entrySet().stream()
.filter(entry -> entry.getKey().startsWith(KEY_OVERWRITE_PARTITION_PREFIX))
.peek(entry -> {
if (StringUtils.isEmpty(entry.getValue())) {
throw new IllegalArgumentException("value of `"+ entry.getKey() +"` cannot be empty !!!");
}
})
.collect(
Collectors.toMap(
entry -> entry.getKey().replaceFirst(KEY_OVERWRITE_PARTITION_PREFIX, ""),
Map.Entry::getValue
)
);
overwriteTempPartitionMappings = overwritePartitions.entrySet().stream()
.collect(
Collectors.toMap(
entry -> entry.getKey() + TEMPORARY_PARTITION_SUFFIX + System.currentTimeMillis(),
Map.Entry::getKey
)
);
overwriteTempPartitions = overwriteTempPartitionMappings.entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
entry -> overwritePartitions.get(entry.getValue())
));
if (!overwritePartitions.isEmpty()) {
String temporaryPartitionsList = String.join(",", overwriteTempPartitionMappings.keySet());
String temporaryPartitions = "temporary_partitions";
String oldSetting = properties.get(temporaryPartitions);
if (StringUtils.isNotEmpty(oldSetting)) {
LOG.warn("replace temporary_partitions value({}) with {}", oldSetting, temporaryPartitionsList);
}
properties.put(temporaryPartitions, temporaryPartitionsList);
}
format = originOptions.getOrDefault(KEY_PROPS_FORMAT, "CSV");
rowDelimiter = DelimiterParser.convertDelimiter(
originOptions.getOrDefault(KEY_PROPS_ROW_DELIMITER, "\n"));
Expand Down Expand Up @@ -255,10 +334,15 @@ public boolean isPartialUpdate() {
public StreamLoadProperties toStreamLoadProperties() {
StreamLoadDataFormat dataFormat = "json".equalsIgnoreCase(format) ?
StreamLoadDataFormat.JSON : new StreamLoadDataFormat.CSVFormat(rowDelimiter);

String table;
if (isOverwrite() && getTempTableName() != null) {
table = getTempTableName();
} else {
table = getTable();
}
StreamLoadTableProperties tableProperties = StreamLoadTableProperties.builder()
.database(getDatabase())
.table(getTable())
.table(table)
.columns(streamLoadColumnProperty)
.streamLoadDataFormat(dataFormat)
.chunkLimit(chunkLimit)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,26 +19,19 @@

package com.starrocks.connector.spark.sql.connect;

import com.google.common.annotations.VisibleForTesting;
import com.starrocks.connector.spark.exception.StarRocksException;
import com.starrocks.connector.spark.rest.models.PartitionType;
import com.starrocks.connector.spark.sql.conf.StarRocksConfig;
import com.starrocks.connector.spark.sql.conf.WriteStarRocksConfig;
import com.starrocks.connector.spark.sql.schema.StarRocksField;
import com.starrocks.connector.spark.sql.schema.StarRocksSchema;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.sql.*;
import java.util.*;

public class StarRocksConnector {
private static Logger logger = LoggerFactory.getLogger(StarRocksConnector.class);
Expand Down Expand Up @@ -81,6 +74,43 @@ public static StarRocksSchema getSchema(StarRocksConfig config) {
return new StarRocksSchema(columns, pks);
}

public static PartitionType getPartitionType(StarRocksConfig config) {
String showCreateTableDDL = String.format("SHOW CREATE TABLE `%s`.`%s`", config.getDatabase(), config.getTable());
String createTableDDL = "";
try (Connection conn = createJdbcConnection(config.getFeJdbcUrl(), config.getUsername(), config.getPassword());
PreparedStatement ps = conn.prepareStatement(showCreateTableDDL)) {
ResultSet rs = ps.executeQuery();
if (rs.next()) {
createTableDDL = rs.getString(2);
}
rs.close();
} catch (Exception e) {
throw new IllegalStateException("show create table ddl by sql error, " + e.getMessage(), e);
}
return createTableDDL.contains("PARTITION BY RANGE(") ?
PartitionType.RANGE:
createTableDDL.contains("PARTITION BY LIST(") ?
PartitionType.LIST:
createTableDDL.contains("PARTITION BY") ?
PartitionType.EXPRESSION : PartitionType.NONE;
}

public static boolean isDynamicPartitionTable(StarRocksConfig config) {
String showCreateTableDDL = String.format("SHOW CREATE TABLE `%s`.`%s`", config.getDatabase(), config.getTable());
String createTableDDL = "";
try (Connection conn = createJdbcConnection(config.getFeJdbcUrl(), config.getUsername(), config.getPassword());
PreparedStatement ps = conn.prepareStatement(showCreateTableDDL)) {
ResultSet rs = ps.executeQuery();
if (rs.next()) {
createTableDDL = rs.getString(2);
}
rs.close();
} catch (Exception e) {
throw new IllegalStateException("show create table ddl by sql error, " + e.getMessage(), e);
}
return createTableDDL.contains("\"dynamic_partition.enable\" = \"true\"");
}

public static List<String> getDatabases(StarRocksConfig config) {
List<Map<String, String>> dbs = extractColumnValuesBySql(config, ALL_DBS_QUERY, Arrays.asList());
List<String> dbNames = new ArrayList<>();
Expand Down Expand Up @@ -128,7 +158,8 @@ public static Map<String, String> getTables(StarRocksConfig config, List<String>
return table2Db;
}

private static Connection createJdbcConnection(String jdbcUrl, String username, String password) throws Exception {
@VisibleForTesting
public static Connection createJdbcConnection(String jdbcUrl, String username, String password) throws Exception {
try {
Class.forName(MYSQL_80_DRIVER_NAME);
} catch (ClassNotFoundException e) {
Expand Down Expand Up @@ -182,4 +213,57 @@ private static List<Map<String, String>> extractColumnValuesBySql(StarRocksConfi
return columnValues;
}

private static boolean executeSql(StarRocksConfig config, String sql, String errorMsg) {
try (Connection conn = createJdbcConnection(config.getFeJdbcUrl(), config.getUsername(), config.getPassword());
Statement statement = conn.createStatement()) {
return statement.execute(sql);
} catch (Exception e) {
throw new IllegalStateException(errorMsg + " , sql: " + sql + " , " + e.getMessage(), e);
}
}

public static boolean createTableBySql(StarRocksConfig config, String sql) {
return executeSql(config, sql, "create table by sql error");
}

public static boolean createTemporaryPartitionBySql(StarRocksConfig config, String sql) {
return executeSql(config, sql, "create temporary partition by sql error");
}

public static boolean createPartitionBySql(StarRocksConfig config, String sql) {
return executeSql(config, sql, "create partition by sql error");
}

public static boolean dropAndCreatePartitionBySql(StarRocksConfig config, String sql, String overwritePartition) {
String queryPartitionDDL = "SELECT DB_NAME, TABLE_NAME, PARTITION_NAME, PARTITION_KEY, PARTITION_VALUE FROM `information_schema`.`partitions_meta` WHERE IS_TEMP = 1 AND "
+ "DB_NAME = ? AND TABLE_NAME = ? AND PARTITION_NAME LIKE ?";
List<Map<String, String>> existsPartitions = extractColumnValuesBySql(config, queryPartitionDDL,
Arrays.asList(config.getDatabase(), config.getTable(), overwritePartition + WriteStarRocksConfig.TEMPORARY_PARTITION_SUFFIX + "%"));
existsPartitions.forEach(partition -> {
String partitionName = partition.get("PARTITION_NAME");
String partitionValue = partition.get("PARTITION_VALUE");
logger.info("exists partition {} with value : {}, drop it ...", partitionName, partitionValue);
String dropTempPartitionDDL = String.format("ALTER TABLE `%s`.`%s` DROP TEMPORARY PARTITION IF EXISTS %s",
config.getDatabase(), config.getTable(), partitionName);
dropTempPartitionBySql(config, dropTempPartitionDDL);
}
);
return executeSql(config, sql, "create partition by sql error");
}

public static boolean swapTableBySql(StarRocksConfig config, String sql) {
return executeSql(config, sql, "swap table by sql error");
}

public static boolean replacePartitionBySql(StarRocksConfig config, String sql) {
return executeSql(config, sql, "replace partition by sql error");
}

public static boolean dropTempPartitionBySql(StarRocksConfig config, String sql) {
return executeSql(config, sql, "drop temporary partition by sql error");
}

public static boolean dropTableBySql(StarRocksConfig config, String sql) {
return executeSql(config, sql, "drop table by sql error");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ static DataType inferDataType(StarRocksField field) {
return DataTypes.createDecimalType(field.getPrecision(), field.getScale());
case "char":
case "varchar":
case "varbinary":
case "string":
case "json":
return DataTypes.StringType;
Expand Down
Loading