Skip to content

Commit

Permalink
Merge pull request #55 from arenadata/6.8.0-sync
Browse files Browse the repository at this point in the history
ADBDEV-3954 6.8.0 sync
  • Loading branch information
xardazzzzzz authored Sep 29, 2023
2 parents 561c3e1 + ad82eda commit f6ee1ce
Show file tree
Hide file tree
Showing 118 changed files with 6,815 additions and 1,703 deletions.
21 changes: 5 additions & 16 deletions .github/workflows/create-release-on-tag.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,24 +22,13 @@ jobs:
tag_name: ${{ github.ref }}
release_name: PXF Version ${{ github.ref }}
body: |
## 6.7.0 (07/13/2023)
## 6.8.0 (09/25/2023)
### Enhancements:
- [#956](https://github.com/greenplum-db/pxf/pull/956) Add pxfdelimited_import formatter to support multibyte delimiters for TEXT and CSV profiles
- [#960](https://github.com/greenplum-db/pxf/pull/960) Add support year with more than 4 digits in 'date' or 'timestamp'
- [#973](https://github.com/greenplum-db/pxf/pull/973) Enable write flow for FDW for non-text/csv formats
- [#976](https://github.com/greenplum-db/pxf/pull/976) Restrict PXF to listen to local requests only
- [#979](https://github.com/greenplum-db/pxf/pull/979) Add logging to the LineBreakAccessor for the write
- [#983](https://github.com/greenplum-db/pxf/pull/983) Bump Springboot to 2.7.12
- [#984](https://github.com/greenplum-db/pxf/pull/984) Enable writing data in JSON format using *:json profiles
- [#989](https://github.com/greenplum-db/pxf/pull/989) Bump snappy to 1.1.10.1
### Bug Fixes:
- [#967](https://github.com/greenplum-db/pxf/pull/967) FDW: Fix for skipping the dropped and correctly counting Projection Index
- [#978](https://github.com/greenplum-db/pxf/pull/978) Added erroring out logic for decimal overflow for ORC
- [#1013](https://github.com/greenplum-db/pxf/pull/1013) Bumped Azure Storage dependency to 5.5.0
- [#1018](https://github.com/greenplum-db/pxf/pull/1018) Add pxf.service.kerberos.ticket-renew-window option to pxf-site.xml
- [#1019](https://github.com/greenplum-db/pxf/pull/1019) Add pushdown of NUMERIC and handling of CHAR and VARCHAR predicates for JDBC profile
draft: false
prerelease: false

8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Changelog

## 6.8.0 (09/25/2023)

### Enhancements:

- [#1013](https://github.com/greenplum-db/pxf/pull/1013) Bumped Azure Storage dependency to 5.5.0
- [#1018](https://github.com/greenplum-db/pxf/pull/1018) Add pxf.service.kerberos.ticket-renew-window option to pxf-site.xml
- [#1019](https://github.com/greenplum-db/pxf/pull/1019) Add pushdown of NUMERIC and handling of CHAR and VARCHAR predicates for JDBC profile

## 6.7.0 (07/13/2023)

### Enhancements:
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ We warmly welcome and appreciate contributions from the community!
By participating you agree to the [code of conduct](https://github.com/greenplum-db/pxf/blob/main/CODE-OF-CONDUCT.md).
To contribute:

- Sign our [Contributor License Agreement](https://cla.pivotal.io/sign/greenplum).
- Sign our [Contributor License Agreement](https://cla.vmware.com/cla/1/preview).

- Fork the PXF repository on GitHub.

Expand Down
9 changes: 6 additions & 3 deletions automation/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -166,16 +166,19 @@ sync_jdbc_config:
sync_cloud_configs:
ifneq "$(PROTOCOL)" ""
@mkdir -p $(PROTOCOL_HOME)
@if [ ! -f "$(PROTOCOL_HOME)/$(PROTOCOL)-site.xml" ]; then \
cp $(TEMPLATES_DIR)/$(PROTOCOL)-site.xml $(PROTOCOL_HOME)/; \
@ set -e; \
if [ ! -f "$(PROTOCOL_HOME)/$(PROTOCOL)-site.xml" ]; then \
if [ $(PROTOCOL) != file ]; then \
cp $(TEMPLATES_DIR)/$(PROTOCOL)-site.xml $(PROTOCOL_HOME)/; \
fi; \
cp $(TEMPLATES_DIR)/mapred-site.xml $(PROTOCOL_HOME)/; \
if [ $(PROTOCOL) = file ]; then \
if [ ! -d "$(BASE_PATH)" ]; then \
echo "The NFS directory '$(BASE_PATH)' does not exist. Please configure it and try again"; \
rm -rf $(PROTOCOL_HOME); \
exit 1; \
fi; \
echo "Make sure your $PXF_BASE/conf/pxf-profiles.xml file configures the file:AvroSequenceFile and file:SequenceFile profiles"; \
echo "Make sure your $$PXF_BASE/conf/pxf-profiles.xml file configures the file:AvroSequenceFile and file:SequenceFile profiles"; \
cp $(TEMPLATES_DIR)/pxf-site.xml $(PROTOCOL_HOME)/; \
sed $(SED_OPTS) 's|</configuration>|<property><name>pxf.fs.basePath</name><value>$(BASE_PATH)</value></property></configuration>|g' $(PROTOCOL_HOME)/pxf-site.xml; \
fi; \
Expand Down
2 changes: 1 addition & 1 deletion automation/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@
<dependency>
<groupId>com.microsoft.azure</groupId>
<artifactId>azure-storage</artifactId>
<version>5.4.0</version>
<version>5.5.0</version>
</dependency>

<!-- HADOOP Dependencies -->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;

import org.greenplum.pxf.automation.components.cluster.installer.nodes.MasterNode;
import org.greenplum.pxf.automation.components.cluster.installer.nodes.CoordinatorNode;
import org.greenplum.pxf.automation.components.cluster.installer.nodes.Node;
import org.greenplum.pxf.automation.components.cluster.installer.nodes.SlaveNode;
import org.greenplum.pxf.automation.components.cluster.installer.nodes.SegmentNode;
import org.greenplum.pxf.automation.components.common.cli.ParallelShellActions;
import org.greenplum.pxf.automation.utils.jsystem.report.ReportUtils;

Expand Down Expand Up @@ -113,11 +113,11 @@ private void handleOperation(String operation, EnumClusterServices service) thro
List<Node> nodesListByService;
switch (service) {
case hive:
nodesListByService = getNode(MasterNode.class, service);
nodesListByService = getNode(CoordinatorNode.class, service);
break;
case pxf:
nodesListByService = getNode(service).stream()
.filter(n -> n instanceof SlaveNode)
.filter(n -> n instanceof SegmentNode)
.collect(Collectors.toList());
break;
default:
Expand Down Expand Up @@ -150,7 +150,7 @@ public void fetchConfiguration(String targetDirectory) throws Exception {
// currently copy only the pxf-conf to tempClusterConfDirectory
FileUtils.copyDirectory(new File(getPxfConfLocation()), new File(tempClusterConfDirectory + "/" + getPathToPxfConfInGeneralConf()));
// if current node is not pxf node, it requires copying pxf/conf directory from the pxf node
Node pxfNode = getNode(MasterNode.class, EnumClusterServices.pxf).get(0);
Node pxfNode = getNode(CoordinatorNode.class, EnumClusterServices.pxf).get(0);
// if pxf node is same as local node, then pxf conf is already there, skip pxf conf copying
String localHostName = Inet4Address.getLocalHost().getHostName();
if (!localHostName.equals(pxfNode.getHost())) {
Expand Down Expand Up @@ -223,7 +223,7 @@ public void runCommandOnNodes(List<Node> nodes, String command) throws Exception
/**
* Gets node List from nodes array according to {@link Node} type and serviceType
*
* @param nodeType {@link MasterNode} or {@link SlaveNode}
* @param nodeType {@link CoordinatorNode} or {@link SegmentNode}
* @param serviceType required service type to locate in nodes
* @return list of nodes of given nodeType and serviceType
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
package org.greenplum.pxf.automation.components.cluster.installer.nodes;

/**
* Represents Coordinator Node for a cluster
*/
public class CoordinatorNode extends Node {

}

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
package org.greenplum.pxf.automation.components.cluster.installer.nodes;

/**
* Represents a single Segment Node in a cluster
*/
public class SegmentNode extends Node {

}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ public void init() throws Exception {

if (FDWUtils.useFDW) {
createTestFDW(true);
createSystemFDW(true);
createForeignServers(true);
}

Expand Down Expand Up @@ -248,11 +249,18 @@ private void createTestFDW(boolean ignoreFail) throws Exception {
ignoreFail, false);
}

private void createSystemFDW(boolean ignoreFail) throws Exception {
runQuery("DROP FOREIGN DATA WRAPPER IF EXISTS system_pxf_fdw CASCADE", ignoreFail, false);
runQuery("CREATE FOREIGN DATA WRAPPER system_pxf_fdw HANDLER pxf_fdw_handler " +
"VALIDATOR pxf_fdw_validator OPTIONS (protocol 'system', mpp_execute 'all segments')",
ignoreFail, false);
}

private void createForeignServers(boolean ignoreFail) throws Exception {
List<String> servers = Lists.newArrayList(
"default_hdfs",
"default_hive",
"db_hive_jdbc", // Needed for JdbcHiveTest
"db-hive_jdbc", // Needed for JdbcHiveTest
"default_hbase",
"default_jdbc", // Needed for JdbcHiveTest and other JdbcTest which refers to the default server.
"database_jdbc",
Expand All @@ -268,7 +276,8 @@ private void createForeignServers(boolean ignoreFail) throws Exception {
"hdfs-non-secure_hdfs",
"hdfs-secure_hdfs",
"hdfs-ipa_hdfs",
"default_test");
"default_test",
"default_system");

// version below GP7 do not have IF EXISTS / IF NOT EXISTS command options for foreign SERVER creation
String option = (version < 7) ? "" : IF_NOT_EXISTS_OPTION;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,26 @@ public static ReadableExternalTable getPxfReadableTestTextTable(String name,
return exTable;
}

/**
* Prepares PXF Readable External or Foreign Table for test data, using CSV format and "test:csv" profile.
* Since "test:*" profiles are ephemeral, it should be used when testing with custom Fragmenter, Accessor or Resolver.
*
* @param name name of the table
* @param fields fields of the table
* @param path for external table path
* @param delimiter delimiter used in the external data
* @return PXF Readable External or Foreign table
*/
public static ReadableExternalTable getPxfReadableTestCSVTable(String name,
String[] fields,
String path,
String delimiter) {
ReadableExternalTable exTable = getReadableExternalOrForeignTable(name, fields, path, "CSV");
exTable.setProfile("test:csv");
exTable.setDelimiter(delimiter);
return exTable;
}

/**
* Prepares PXF Readable External or Foreign Table for TEXT data, using TEXT format and "<protocol>:text" profile.
*
Expand All @@ -228,6 +248,27 @@ public static ReadableExternalTable getPxfReadableTextTable(String name,
return exTable;
}

/**
* Prepares PXF Readable External or Foreign Table for Json data, using custom format or CSV format and "<protocol>:json" profile.
*
* @param name name of the table
* @param fields fields of the table
* @param path for external table path
* @param format format used in the external data
* @return PXF Readable External or Foreign table
*/
public static ReadableExternalTable getPxfReadableJsonTable(String name,
String[] fields,
String path,
String format) {
ReadableExternalTable exTable = getReadableExternalOrForeignTable(name, fields, path, format);
if (StringUtils.equals(format, "custom")) {
exTable.setFormatter("pxfwritable_import");
}
exTable.setProfile(ProtocolUtils.getProtocol().value() + ":json");
return exTable;
}

/**
* Prepares PXF Readable External or Foreign Table for CSV data, using CSV format and "<protocol>:csv" profile.
*
Expand Down

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ private void addColumns() {
*/
private void addFilters() throws Exception {

// TODO whitelist option
// TODO allowlist option
String filterStr = context.getOption("TEST-HBASE-FILTER");
LOG.debug("user defined filter: {}", filterStr);
if ((filterStr == null) || filterStr.isEmpty() || "null".equals(filterStr))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ public void afterPropertiesSet() {
*/
private void addFilters() {

//TODO whitelist the option
//TODO allowlist option
String filterStr = context.getOption("TEST-HIVE-FILTER");
LOG.debug("user defined filter: " + filterStr);
if ((filterStr == null) || filterStr.isEmpty() || "null".equals(filterStr))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ public void afterPropertiesSet() {
*/
private void addFilters() {

// TODO: whitelist the option
// TODO: allowlist the option
String filterStr = context.getOption("TEST-HIVE-FILTER");
LOG.debug("user defined filter: " + filterStr);
if ((filterStr == null) || filterStr.isEmpty() || "null".equals(filterStr))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ public void afterPropertiesSet() {

@Override
public List<Fragment> getFragments() throws Exception {
// TODO whitelist property
// TODO allowlist property
int fragmentsNum = Integer.parseInt(context.getOption("TEST-FRAGMENTS-NUM"));
Metadata.Item tblDesc = hiveClientWrapper.extractTableFromName(context.getDataSource());
Table tbl;
Expand Down
Loading

0 comments on commit f6ee1ce

Please sign in to comment.