Skip to content

Commit 413069e

Browse files
akshat0395kokila-19tanishq-chugh
authored
HIVE-26473: Hive JDK-17 support for compile and runtime (#5404)
Co-authored-by: kokila-19 <[email protected]> Co-authored-by: Tanishq Chugh <[email protected]>
1 parent 3d7d82a commit 413069e

File tree

133 files changed

+2658
-2435
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

133 files changed

+2658
-2435
lines changed

.github/workflows/build.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,15 +28,15 @@ env:
2828
DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
2929

3030
jobs:
31-
macos-jdk8:
32-
name: 'macOS (JDK 8)'
31+
macos-jdk17:
32+
name: 'macOS (JDK 17)'
3333
runs-on: macos-latest
3434
steps:
3535
- uses: actions/checkout@v2
36-
- name: 'Set up JDK 8'
36+
- name: 'Set up JDK 17'
3737
uses: actions/setup-java@v1
3838
with:
39-
java-version: 8
39+
java-version: 17
4040
- name: 'Build project'
4141
run: |
4242
mvn clean install -DskipTests -Pitests

.github/workflows/docker-GA-images.yml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -49,10 +49,10 @@ jobs:
4949
uses: actions/checkout@v3
5050

5151
-
52-
name: 'Set up JDK 8'
52+
name: 'Set up JDK 17'
5353
uses: actions/setup-java@v1
5454
with:
55-
java-version: 8
55+
java-version: 17
5656

5757
-
5858
name: Hive version
@@ -101,10 +101,10 @@ jobs:
101101
- name: Checkout
102102
uses: actions/checkout@v3
103103

104-
- name: 'Set up JDK 8'
104+
- name: 'Set up JDK 17'
105105
uses: actions/setup-java@v1
106106
with:
107-
java-version: 8
107+
java-version: 17
108108

109109
- name: Hive version
110110
run: echo "HIVE_VERSION=$(mvn -f "pom.xml" -q help:evaluate -Dexpression=project.version -DforceStdout)" >> $GITHUB_ENV
@@ -146,7 +146,7 @@ jobs:
146146
file: ./packaging/src/docker/Dockerfile
147147
platforms: linux/amd64,linux/arm64
148148
push: true
149-
tags: apache/hive:${{ env.tag }}
149+
tags: apache/hive-17:${{ env.tag }}
150150
build-args:
151151
|
152152
HIVE_VERSION=${{ env.HIVE_VERSION }}

Jenkinsfile

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,8 +91,9 @@ def buildHive(args) {
9191
set -x
9292
. /etc/profile.d/confs.sh
9393
export USER="`whoami`"
94-
export MAVEN_OPTS="-Xmx2g"
94+
export MAVEN_OPTS="-Xmx4G"
9595
export -n HIVE_CONF_DIR
96+
sw java 17 && . /etc/profile.d/java.sh
9697
mkdir -p .m2/repository
9798
cp $SETTINGS .m2/settings.xml
9899
OPTS=" -s $PWD/.m2/settings.xml -B -Dtest.groups= "
@@ -295,12 +296,14 @@ fi
295296
stage('init-metastore') {
296297
withEnv(["dbType=$dbType"]) {
297298
sh '''#!/bin/bash -e
299+
sw java 17 && . /etc/profile.d/java.sh
298300
set -x
299301
echo 127.0.0.1 dev_$dbType | sudo tee -a /etc/hosts
300302
. /etc/profile.d/confs.sh
301303
sw hive-dev $PWD
302304
export DOCKER_NETWORK=host
303305
export DBNAME=metastore
306+
export HADOOP_CLIENT_OPTS="--add-opens java.base/java.net=ALL-UNNAMED"
304307
reinit_metastore $dbType
305308
time docker rm -f dev_$dbType || true
306309
'''
@@ -403,6 +406,7 @@ tar -xzf packaging/target/apache-hive-*-nightly-*-src.tar.gz
403406
}
404407
stage('Generate javadoc') {
405408
sh """#!/bin/bash -e
409+
sw java 17 && . /etc/profile.d/java.sh
406410
mvn install javadoc:javadoc javadoc:aggregate -DskipTests -pl '!itests/hive-jmh,!itests/util'
407411
"""
408412
}

bin/ext/beeline.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ beeline () {
2929
hadoopClasspath="${HADOOP_CLASSPATH}:"
3030
fi
3131
export HADOOP_CLASSPATH="${hadoopClasspath}${HIVE_CONF_DIR}:${beelineJarPath}:${superCsvJarPath}:${jlineJarPath}"
32-
export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=beeline-log4j2.properties "
32+
export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=beeline-log4j2.properties --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.net=ALL-UNNAMED --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED --add-opens java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens java.base/java.util.regex=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED "
3333

3434
if [ "$EXECUTE_WITH_JAVA" != "true" ] ; then
3535
# if CLIUSER is not empty, then pass it as user id / password during beeline redirect

bin/ext/hiveserver2.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ hiveserver2() {
5050
timeout=$(exec $HADOOP jar $JAR $CLASS $HIVE_OPTS --getHiveConf $TIMEOUT_KEY | grep $TIMEOUT_KEY'=' | awk -F'=' '{print $2}')
5151
killAndWait $pid $timeout
5252
else
53-
export HADOOP_CLIENT_OPTS=" -Dproc_hiveserver2 $HADOOP_CLIENT_OPTS "
53+
export HADOOP_CLIENT_OPTS=" -Dproc_hiveserver2 --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.net=ALL-UNNAMED --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED --add-opens java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens java.base/java.util.regex=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED $HADOOP_CLIENT_OPTS "
5454
export HADOOP_OPTS="$HIVESERVER2_HADOOP_OPTS $HADOOP_OPTS"
5555
commands=$(exec $HADOOP jar $JAR $CLASS -H | grep -v '-hiveconf' | awk '{print $1}')
5656
start_hiveserver2='Y'

bin/ext/llap.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ llap () {
3030

3131
set -e;
3232

33-
export HADOOP_CLIENT_OPTS=" -Dproc_llapcli $HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=llap-cli-log4j2.properties "
33+
export HADOOP_CLIENT_OPTS=" -Dproc_llapcli $HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=llap-cli-log4j2.properties --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.net=ALL-UNNAMED --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED --add-opens java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens java.base/java.util.regex=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED "
3434
# hadoop 20 or newer - skip the aux_jars option. picked up from hiveconf
3535
$HADOOP $CLASS $HIVE_OPTS -directory $TMPDIR "$@"
3636

bin/ext/llapstatus.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ llapstatus () {
2929

3030
set -e;
3131

32-
export HADOOP_CLIENT_OPTS=" -Dproc_llapstatuscli $HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=llap-cli-log4j2.properties "
32+
export HADOOP_CLIENT_OPTS=" -Dproc_llapstatuscli $HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=llap-cli-log4j2.properties --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.net=ALL-UNNAMED --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED --add-opens java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/java.util.regex=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED "
3333
# hadoop 20 or newer - skip the aux_jars option. picked up from hiveconf
3434
$HADOOP $CLASS $HIVE_OPTS "$@"
3535

bin/ext/metastore.sh

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,9 @@ metastore() {
2525
JAR=${HIVE_LIB}/hive-metastore-*.jar
2626

2727
# hadoop 20 or newer - skip the aux_jars option and hiveconf
28+
# Append --add-opens args that is required for JDK-17
29+
export HADOOP_CLIENT_OPTS=" -Dproc_metastore --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.net=ALL-UNNAMED --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED --add-opens java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/java.util.regex=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED $HADOOP_CLIENT_OPTS "
2830

29-
export HADOOP_CLIENT_OPTS=" -Dproc_metastore $HADOOP_CLIENT_OPTS "
3031
export HADOOP_OPTS="$HIVE_METASTORE_HADOOP_OPTS $HADOOP_OPTS"
3132
exec $HADOOP jar $JAR $CLASS "$@"
3233
}

common/pom.xml

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,11 @@
3434
<artifactId>hive-classification</artifactId>
3535
<version>${project.version}</version>
3636
</dependency>
37+
<dependency>
38+
<groupId>org.apache.avro</groupId>
39+
<artifactId>avro</artifactId>
40+
<version>${avro.version}</version>
41+
</dependency>
3742
<dependency>
3843
<groupId>org.apache.hive</groupId>
3944
<artifactId>hive-shims</artifactId>
@@ -69,6 +74,12 @@
6974
<dependency>
7075
<groupId>org.apache.orc</groupId>
7176
<artifactId>orc-core</artifactId>
77+
<exclusions>
78+
<exclusion>
79+
<groupId>org.apache.hadoop</groupId>
80+
<artifactId>hadoop-client-api</artifactId>
81+
</exclusion>
82+
</exclusions>
7283
</dependency>
7384
<dependency>
7485
<groupId>jline</groupId>
@@ -346,6 +357,11 @@
346357
<artifactId>opentelemetry-sdk</artifactId>
347358
<version>${otel.version}</version>
348359
</dependency>
360+
<dependency>
361+
<groupId>org.mockito</groupId>
362+
<artifactId>mockito-core</artifactId>
363+
<version>${mockito-core.version}</version>
364+
</dependency>
349365
</dependencies>
350366
<profiles>
351367
<profile>
@@ -405,6 +421,18 @@
405421
</testResource>
406422
</testResources>
407423
<plugins>
424+
<plugin>
425+
<groupId>org.apache.maven.plugins</groupId>
426+
<artifactId>maven-compiler-plugin</artifactId>
427+
<configuration>
428+
<compilerArgs>
429+
<arg>--add-opens</arg>
430+
<arg>org.apache.hadoop/org.apache.hadoop.fs=ALL-UNNAMED</arg>
431+
<arg>--add-opens</arg>
432+
<arg>java.base/java.net=ALL-UNNAMED</arg>
433+
</compilerArgs>
434+
</configuration>
435+
</plugin>
408436
<plugin>
409437
<groupId>org.apache.maven.plugins</groupId>
410438
<artifactId>maven-antrun-plugin</artifactId>

common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java

Lines changed: 17 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -73,21 +73,23 @@ private Document generateTemplate() throws Exception {
7373
doc.appendChild(doc.createProcessingInstruction(
7474
"xml-stylesheet", "type=\"text/xsl\" href=\"configuration.xsl\""));
7575

76-
doc.appendChild(doc.createComment("\n" +
77-
" Licensed to the Apache Software Foundation (ASF) under one or more\n" +
78-
" contributor license agreements. See the NOTICE file distributed with\n" +
79-
" this work for additional information regarding copyright ownership.\n" +
80-
" The ASF licenses this file to You under the Apache License, Version 2.0\n" +
81-
" (the \"License\"); you may not use this file except in compliance with\n" +
82-
" the License. You may obtain a copy of the License at\n" +
83-
"\n" +
84-
" http://www.apache.org/licenses/LICENSE-2.0\n" +
85-
"\n" +
86-
" Unless required by applicable law or agreed to in writing, software\n" +
87-
" distributed under the License is distributed on an \"AS IS\" BASIS,\n" +
88-
" WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" +
89-
" See the License for the specific language governing permissions and\n" +
90-
" limitations under the License.\n"));
76+
doc.appendChild(doc.createComment("""
77+
78+
Licensed to the Apache Software Foundation (ASF) under one or more
79+
contributor license agreements. See the NOTICE file distributed with
80+
this work for additional information regarding copyright ownership.
81+
The ASF licenses this file to You under the Apache License, Version 2.0
82+
(the "License"); you may not use this file except in compliance with
83+
the License. You may obtain a copy of the License at
84+
85+
http://www.apache.org/licenses/LICENSE-2.0
86+
87+
Unless required by applicable law or agreed to in writing, software
88+
distributed under the License is distributed on an "AS IS" BASIS,
89+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
90+
See the License for the specific language governing permissions and
91+
limitations under the License.
92+
"""));
9193

9294
Element root = doc.createElement("configuration");
9395
doc.appendChild(root);

common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -169,30 +169,27 @@ public static List<File> unTar(final String inputFileName, final String outputDi
169169
// no sub-directories
170170
continue;
171171
}
172-
LOG.debug(String.format("Attempting to write output directory %s.",
173-
outputFile.getAbsolutePath()));
172+
LOG.debug("Attempting to write output directory {}.", outputFile.getAbsolutePath());
174173
if (!outputFile.exists()) {
175-
LOG.debug(String.format("Attempting to create output directory %s.",
176-
outputFile.getAbsolutePath()));
174+
LOG.debug("Attempting to create output directory {}.", outputFile.getAbsolutePath());
177175
if (!outputFile.mkdirs()) {
178-
throw new IllegalStateException(String.format("Couldn't create directory %s.",
179-
outputFile.getAbsolutePath()));
176+
throw new IllegalStateException("Couldn't create directory %s.".formatted(
177+
outputFile.getAbsolutePath()));
180178
}
181179
}
182180
} else {
183181
final OutputStream outputFileStream;
184182
if (flatten) {
185183
File flatOutputFile = new File(outputDir, outputFile.getName());
186-
LOG.debug(String.format("Creating flat output file %s.", flatOutputFile.getAbsolutePath()));
184+
LOG.debug("Creating flat output file {}.", flatOutputFile.getAbsolutePath());
187185
outputFileStream = new FileOutputStream(flatOutputFile);
188186
} else if (!outputFile.getParentFile().exists()) {
189-
LOG.debug(String.format("Attempting to create output directory %s.",
190-
outputFile.getParentFile().getAbsoluteFile()));
187+
LOG.debug("Attempting to create output directory {}.", outputFile.getParentFile().getAbsoluteFile());
191188
if (!outputFile.getParentFile().getAbsoluteFile().mkdirs()) {
192-
throw new IllegalStateException(String.format("Couldn't create directory %s.",
193-
outputFile.getParentFile().getAbsolutePath()));
189+
throw new IllegalStateException("Couldn't create directory %s.".formatted(
190+
outputFile.getParentFile().getAbsolutePath()));
194191
}
195-
LOG.debug(String.format("Creating output file %s.", outputFile.getAbsolutePath()));
192+
LOG.debug("Creating output file {}.", outputFile.getAbsolutePath());
196193
outputFileStream = new FileOutputStream(outputFile);
197194
} else {
198195
outputFileStream = new FileOutputStream(outputFile);

common/src/java/org/apache/hadoop/hive/common/FileUtils.java

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1008,7 +1008,7 @@ public static boolean rename(FileSystem fs, Path sourcePath,
10081008
// into destPath without failing. So check it before renaming.
10091009
if (fs.exists(destPath)) {
10101010
throw new IOException("Cannot rename the source path. The destination "
1011-
+ "path already exists.");
1011+
+ "path already exists.");
10121012
}
10131013
return fs.rename(sourcePath, destPath);
10141014
}
@@ -1094,8 +1094,10 @@ public static void checkDeletePermission(Path path, Configuration conf, String u
10941094
if (childStatus.getOwner().equals(user)) {
10951095
return;
10961096
}
1097-
String msg = String.format("Permission Denied: User %s can't delete %s because sticky bit is"
1098-
+ " set on the parent dir and user does not own this file or its parent", user, path);
1097+
String msg = ("""
1098+
Permission Denied: User %s can't delete %s because sticky bit is\
1099+
set on the parent dir and user does not own this file or its parent\
1100+
""").formatted(user, path);
10991101
throw new IOException(msg);
11001102

11011103
}

common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,10 @@ private static MemoryPoolMXBean getTenuredGenPool() {
100100
if (isUsageThresholdSupported) {
101101
return pool;
102102
} else {
103-
LOG.error("{} vendor does not support isCollectionUsageThresholdSupported() and isUsageThresholdSupported()" +
104-
" for tenured memory pool '{}'.", vendor, pool.getName());
103+
LOG.error("""
104+
{} vendor does not support isCollectionUsageThresholdSupported() and isUsageThresholdSupported()\
105+
for tenured memory pool '{}'.\
106+
""", vendor, pool.getName());
105107
}
106108
}
107109
}

common/src/java/org/apache/hadoop/hive/common/JavaUtils.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -70,8 +70,8 @@ public static boolean closeClassLoadersTo(ClassLoader current, ClassLoader stop)
7070
try {
7171
closeClassLoader(current);
7272
} catch (IOException e) {
73-
String detailedMessage = current instanceof URLClassLoader ?
74-
Arrays.toString(((URLClassLoader) current).getURLs()) :
73+
String detailedMessage = (current instanceof URLClassLoader urlcl) ?
74+
Arrays.toString(urlcl.getURLs()) :
7575
"";
7676
LOG.info("Failed to close class loader " + current + " " + detailedMessage, e);
7777
}
@@ -90,8 +90,8 @@ private static boolean isValidHierarchy(ClassLoader current, ClassLoader stop) {
9090
}
9191

9292
public static void closeClassLoader(ClassLoader loader) throws IOException {
93-
if (loader instanceof Closeable) {
94-
((Closeable) loader).close();
93+
if (loader instanceof Closeable closeable) {
94+
closeable.close();
9595
} else {
9696
LOG.warn("Ignoring attempt to close class loader ({}) -- not instance of UDFClassLoader.",
9797
loader == null ? "mull" : loader.getClass().getSimpleName());

common/src/java/org/apache/hadoop/hive/common/LogUtils.java

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -245,16 +245,14 @@ public static void unregisterLoggingContext() {
245245
public static String getLogFilePath() {
246246
String logFilePath = null;
247247
org.apache.logging.log4j.Logger rootLogger = LogManager.getRootLogger();
248-
if (rootLogger instanceof org.apache.logging.log4j.core.Logger) {
249-
org.apache.logging.log4j.core.Logger coreLogger =
250-
(org.apache.logging.log4j.core.Logger)rootLogger;
248+
if (rootLogger instanceof org.apache.logging.log4j.core.Logger coreLogger) {
251249
for (Appender appender : coreLogger.getAppenders().values()) {
252-
if (appender instanceof FileAppender) {
253-
logFilePath = ((FileAppender) appender).getFileName();
254-
} else if (appender instanceof RollingFileAppender) {
255-
logFilePath = ((RollingFileAppender) appender).getFileName();
256-
} else if (appender instanceof RollingRandomAccessFileAppender) {
257-
logFilePath = ((RollingRandomAccessFileAppender) appender).getFileName();
250+
if (appender instanceof FileAppender fileAppender) {
251+
logFilePath = fileAppender.getFileName();
252+
} else if (appender instanceof RollingFileAppender fileAppender) {
253+
logFilePath = fileAppender.getFileName();
254+
} else if (appender instanceof RollingRandomAccessFileAppender fileAppender) {
255+
logFilePath = fileAppender.getFileName();
258256
}
259257
}
260258
}

common/src/java/org/apache/hadoop/hive/common/format/datetime/HiveSqlDateTimeFormatter.java

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -847,17 +847,21 @@ private void verifyForParse() {
847847
!(temporalFields.contains(ChronoField.MONTH_OF_YEAR) &&
848848
temporalFields.contains(ChronoField.DAY_OF_MONTH) ||
849849
temporalFields.contains(ChronoField.DAY_OF_YEAR))) {
850-
throw new IllegalArgumentException("Missing day of year or (month of year + day of month)"
851-
+ " tokens.");
850+
throw new IllegalArgumentException("""
851+
Missing day of year or (month of year + day of month)\
852+
tokens.\
853+
""");
852854
}
853855
if (containsIsoFields &&
854856
!(temporalFields.contains(IsoFields.WEEK_OF_WEEK_BASED_YEAR) &&
855857
temporalFields.contains(ChronoField.DAY_OF_WEEK))) {
856858
throw new IllegalArgumentException("Missing week of year (iw) or day of week (id) tokens.");
857859
}
858860
if (roundYearCount > 0 && yearCount > 0) {
859-
throw new IllegalArgumentException("Invalid duplication of format element: Both year and"
860-
+ "round year are provided");
861+
throw new IllegalArgumentException("""
862+
Invalid duplication of format element: Both year and\
863+
round year are provided\
864+
""");
861865
}
862866
for (TemporalField tokenType : temporalFields) {
863867
if (Collections.frequency(temporalFields, tokenType) > 1) {
@@ -1281,8 +1285,10 @@ private int parseNumericTemporal(String substring, Token token) {
12811285
return 0;
12821286
}
12831287
if ("0".equals(substring)) {
1284-
throw new IllegalArgumentException("Value of hour of day (hh/hh12) in input is 0. "
1285-
+ "The value should be between 1 and 12.");
1288+
throw new IllegalArgumentException("""
1289+
Value of hour of day (hh/hh12) in input is 0. \
1290+
The value should be between 1 and 12.\
1291+
""");
12861292
}
12871293
}
12881294
if (token.temporalField == ChronoField.YEAR

0 commit comments

Comments
 (0)