Skip to content

Commit 7911458

Browse files
committed
ci: Add test case for hdfs over gcs bucket (#3504)
1 parent d324fb2 commit 7911458

File tree

3 files changed

+116
-0
lines changed

3 files changed

+116
-0
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
# Licensed to the Apache Software Foundation (ASF) under one
2+
# or more contributor license agreements. See the NOTICE file
3+
# distributed with this work for additional information
4+
# regarding copyright ownership. The ASF licenses this file
5+
# to you under the Apache License, Version 2.0 (the
6+
# "License"); you may not use this file except in compliance
7+
# with the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing,
12+
# software distributed under the License is distributed on an
13+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14+
# KIND, either express or implied. See the License for the
15+
# specific language governing permissions and limitations
16+
# under the License.
17+
18+
name: hdfs_default_gcs
19+
description: 'Behavior test for hdfs default over gcs'
20+
21+
runs:
22+
using: "composite"
23+
steps:
24+
- name: Setup java env
25+
uses: actions/setup-java@v4
26+
with:
27+
distribution: temurin
28+
java-version: "11"
29+
- name: Load secrets
30+
uses: 1password/load-secrets-action@v1
31+
with:
32+
export-env: true
33+
env:
34+
OPENDAL_GCS_BUCKET: op://services/gcs/bucket
35+
OPENDAL_GCS_CREDENTIAL: op://services/gcs/credential
36+
- name: Setup
37+
shell: bash
38+
run: |
39+
curl -LsSf https://dlcdn.apache.org/hadoop/common/hadoop-3.3.5/hadoop-3.3.5.tar.gz | tar zxf - -C /home/runner
40+
41+
export HADOOP_HOME="/home/runner/hadoop-3.3.5"
42+
export CLASSPATH=$(${HADOOP_HOME}/bin/hadoop classpath --glob)
43+
44+
curl -LsSf -o ${HADOOP_HOME}/share/hadoop/common/lib/gcs-connector-hadoop3-2.2.19-shaded.jar https://github.com/GoogleCloudDataproc/hadoop-connectors/releases/download/v2.2.19/gcs-connector-hadoop3-2.2.19-shaded.jar
45+
46+
cp ./fixtures/hdfs/hdfs-site.xml ${HADOOP_HOME}/etc/hadoop/hdfs-site.xml
47+
cp ./fixtures/hdfs/gcs-core-site.xml ${HADOOP_HOME}/etc/hadoop/core-site.xml
48+
49+
cat << EOF >> $GITHUB_ENV
50+
HADOOP_HOME=${HADOOP_HOME}
51+
CLASSPATH=${CLASSPATH}
52+
LD_LIBRARY_PATH=${JAVA_HOME}/lib/server:${HADOOP_HOME}/lib/native
53+
OPENDAL_HDFS_ROOT=/tmp/opendal/
54+
OPENDAL_HDFS_NAME_NODE=${OPENDAL_GCS_BUCKET}
55+
OPENDAL_HDFS_ENABLE_APPEND=false
56+
EOF
57+
58+
mkdir -p /tmp/hdfs
59+
rm -f /tmp/hdfs/gcs-credentials.json
60+
ln -s $(pwd)/gcs-credentials.json /tmp/hdfs/gcs-credentials.json
61+
62+
- name: create-json
63+
id: create-json
64+
uses: jsdaniell/[email protected]
65+
with:
66+
name: "gcs-credentials.json"
67+
json: ${{ env.OPENDAL_GCS_CREDENTIAL }}

core/src/services/hdfs/docs.md

+1
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,7 @@ async fn main() -> Result<()> {
121121
// Create fs backend builder.
122122
let mut builder = Hdfs::default();
123123
// Set the name node for hdfs.
124+
// If the string starts with a protocol type such as file://, hdfs://, or gs://, this protocol type will be used.
124125
builder.name_node("hdfs://127.0.0.1:9000");
125126
// Set the root for hdfs, all operations will happen under this root.
126127
//

fixtures/hdfs/gcs-core-site.xml

+48
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
<?xml version="1.0" encoding="UTF-8"?>
2+
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
3+
<!--
4+
Licensed under the Apache License, Version 2.0 (the "License");
5+
you may not use this file except in compliance with the License.
6+
You may obtain a copy of the License at
7+
8+
http://www.apache.org/licenses/LICENSE-2.0
9+
10+
Unless required by applicable law or agreed to in writing, software
11+
distributed under the License is distributed on an "AS IS" BASIS,
12+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
See the License for the specific language governing permissions and
14+
limitations under the License. See accompanying LICENSE file.
15+
-->
16+
17+
<!-- Put site-specific property overrides in this file. -->
18+
19+
<configuration>
20+
<property>
21+
<name>fs.AbstractFileSystem.gs.impl</name>
22+
<value>com.google.cloud.hadoop.fs.gcs.GoogleHadoopFS</value>
23+
<description>The AbstractFileSystem for 'gs:' URIs.</description>
24+
</property>
25+
<property>
26+
<name>fs.gs.project.id</name>
27+
<value></value>
28+
<description>
29+
Optional. Google Cloud Project ID with access to GCS buckets.
30+
Required only for list buckets and create bucket operations.
31+
</description>
32+
</property>
33+
<property>
34+
<name>google.cloud.auth.type</name>
35+
<value>SERVICE_ACCOUNT_JSON_KEYFILE</value>
36+
<description>
37+
Authentication type to use for GCS access.
38+
</description>
39+
</property>
40+
<property>
41+
<name>google.cloud.auth.service.account.json.keyfile</name>
42+
<value>/tmp/hdfs/gcs-credentials.json</value>
43+
<description>
44+
The JSON keyfile of the service account used for GCS
45+
access when google.cloud.auth.type is SERVICE_ACCOUNT_JSON_KEYFILE.
46+
</description>
47+
</property>
48+
</configuration>

0 commit comments

Comments
 (0)