Skip to content

Commit 954c090

Browse files
authored
MINOR: apply Utils.isBlank to code base (apache#10124)
Reviewers: Chia-Ping Tsai <[email protected]>
1 parent 236ddda commit 954c090

File tree

23 files changed

+53
-45
lines changed

23 files changed

+53
-45
lines changed

Diff for: clients/src/main/java/org/apache/kafka/clients/admin/KafkaAdminClient.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -4540,7 +4540,7 @@ public UpdateFeaturesResult updateFeatures(final Map<String, FeatureUpdate> feat
45404540
final Map<String, KafkaFutureImpl<Void>> updateFutures = new HashMap<>();
45414541
for (final Map.Entry<String, FeatureUpdate> entry : featureUpdates.entrySet()) {
45424542
final String feature = entry.getKey();
4543-
if (feature.trim().isEmpty()) {
4543+
if (Utils.isBlank(feature)) {
45444544
throw new IllegalArgumentException("Provided feature can not be empty.");
45454545
}
45464546
updateFutures.put(entry.getKey(), new KafkaFutureImpl<>());

Diff for: clients/src/main/java/org/apache/kafka/clients/consumer/KafkaConsumer.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -957,7 +957,7 @@ public void subscribe(Collection<String> topics, ConsumerRebalanceListener liste
957957
this.unsubscribe();
958958
} else {
959959
for (String topic : topics) {
960-
if (topic == null || topic.trim().isEmpty())
960+
if (Utils.isBlank(topic))
961961
throw new IllegalArgumentException("Topic collection to subscribe to cannot contain null or empty topic");
962962
}
963963

@@ -1108,7 +1108,7 @@ public void assign(Collection<TopicPartition> partitions) {
11081108
} else {
11091109
for (TopicPartition tp : partitions) {
11101110
String topic = (tp != null) ? tp.topic() : null;
1111-
if (topic == null || topic.trim().isEmpty())
1111+
if (Utils.isBlank(topic))
11121112
throw new IllegalArgumentException("Topic partitions to assign to cannot have null or empty topic");
11131113
}
11141114
fetcher.clearBufferedDataForUnassignedPartitions(partitions);

Diff for: clients/src/main/java/org/apache/kafka/common/security/oauthbearer/internals/unsecured/OAuthBearerUnsecuredJws.java

+4-3
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131
import java.util.Set;
3232

3333
import org.apache.kafka.common.security.oauthbearer.OAuthBearerToken;
34+
import org.apache.kafka.common.utils.Utils;
3435

3536
import com.fasterxml.jackson.databind.JsonNode;
3637
import com.fasterxml.jackson.databind.ObjectMapper;
@@ -103,7 +104,7 @@ public OAuthBearerUnsecuredJws(String compactSerialization, String principalClai
103104
OAuthBearerValidationResult.newFailure("No expiration time in JWT"));
104105
lifetime = convertClaimTimeInSecondsToMs(expirationTimeSeconds);
105106
String principalName = claim(this.principalClaimName, String.class);
106-
if (principalName == null || principalName.trim().isEmpty())
107+
if (Utils.isBlank(principalName))
107108
throw new OAuthBearerIllegalTokenException(OAuthBearerValidationResult
108109
.newFailure("No principal name in JWT claim: " + this.principalClaimName));
109110
this.principalName = principalName;
@@ -345,7 +346,7 @@ private Set<String> calculateScope() {
345346
String scopeClaimName = scopeClaimName();
346347
if (isClaimType(scopeClaimName, String.class)) {
347348
String scopeClaimValue = claim(scopeClaimName, String.class);
348-
if (scopeClaimValue.trim().isEmpty())
349+
if (Utils.isBlank(scopeClaimValue))
349350
return Collections.emptySet();
350351
else {
351352
Set<String> retval = new HashSet<>();
@@ -360,7 +361,7 @@ private Set<String> calculateScope() {
360361
List<String> stringList = (List<String>) scopeClaimValue;
361362
Set<String> retval = new HashSet<>();
362363
for (String scope : stringList) {
363-
if (scope != null && !scope.trim().isEmpty()) {
364+
if (!Utils.isBlank(scope)) {
364365
retval.add(scope.trim());
365366
}
366367
}

Diff for: clients/src/main/java/org/apache/kafka/common/security/oauthbearer/internals/unsecured/OAuthBearerUnsecuredLoginCallbackHandler.java

+3-6
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
import org.apache.kafka.common.security.oauthbearer.OAuthBearerTokenCallback;
4646
import org.apache.kafka.common.security.oauthbearer.internals.OAuthBearerClientInitialResponse;
4747
import org.apache.kafka.common.utils.Time;
48+
import org.apache.kafka.common.utils.Utils;
4849
import org.slf4j.Logger;
4950
import org.slf4j.LoggerFactory;
5051

@@ -191,13 +192,9 @@ private void handleTokenCallback(OAuthBearerTokenCallback callback) {
191192
throw new OAuthBearerConfigException("Extensions provided in login context without a token");
192193
}
193194
String principalClaimNameValue = optionValue(PRINCIPAL_CLAIM_NAME_OPTION);
194-
String principalClaimName = principalClaimNameValue != null && !principalClaimNameValue.trim().isEmpty()
195-
? principalClaimNameValue.trim()
196-
: DEFAULT_PRINCIPAL_CLAIM_NAME;
195+
String principalClaimName = Utils.isBlank(principalClaimNameValue) ? DEFAULT_PRINCIPAL_CLAIM_NAME : principalClaimNameValue.trim();
197196
String scopeClaimNameValue = optionValue(SCOPE_CLAIM_NAME_OPTION);
198-
String scopeClaimName = scopeClaimNameValue != null && !scopeClaimNameValue.trim().isEmpty()
199-
? scopeClaimNameValue.trim()
200-
: DEFAULT_SCOPE_CLAIM_NAME;
197+
String scopeClaimName = Utils.isBlank(scopeClaimNameValue) ? DEFAULT_SCOPE_CLAIM_NAME : scopeClaimNameValue.trim();
201198
String headerJson = "{" + claimOrHeaderJsonText("alg", "none") + "}";
202199
String lifetimeSecondsValueToUse = optionValue(LIFETIME_SECONDS_OPTION, DEFAULT_LIFETIME_SECONDS_ONE_HOUR);
203200
String claimsJson;

Diff for: clients/src/main/java/org/apache/kafka/common/security/oauthbearer/internals/unsecured/OAuthBearerUnsecuredValidatorCallbackHandler.java

+1-2
Original file line numberDiff line numberDiff line change
@@ -195,8 +195,7 @@ private int allowableClockSkewMs() {
195195
String allowableClockSkewMsValue = option(ALLOWABLE_CLOCK_SKEW_MILLIS_OPTION);
196196
int allowableClockSkewMs = 0;
197197
try {
198-
allowableClockSkewMs = allowableClockSkewMsValue == null || allowableClockSkewMsValue.trim().isEmpty() ? 0
199-
: Integer.parseInt(allowableClockSkewMsValue.trim());
198+
allowableClockSkewMs = Utils.isBlank(allowableClockSkewMsValue) ? 0 : Integer.parseInt(allowableClockSkewMsValue.trim());
200199
} catch (NumberFormatException e) {
201200
throw new OAuthBearerConfigException(e.getMessage(), e);
202201
}

Diff for: clients/src/test/java/org/apache/kafka/common/security/oauthbearer/internals/unsecured/OAuthBearerScopeUtilsTest.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,15 @@
2121

2222
import java.util.List;
2323

24+
import org.apache.kafka.common.utils.Utils;
2425
import org.junit.jupiter.api.Test;
2526

2627
public class OAuthBearerScopeUtilsTest {
2728
@Test
2829
public void validScope() {
2930
for (String validScope : new String[] {"", " ", "scope1", " scope1 ", "scope1 Scope2", "scope1 Scope2"}) {
3031
List<String> parsedScope = OAuthBearerScopeUtils.parseScope(validScope);
31-
if (validScope.trim().isEmpty()) {
32+
if (Utils.isBlank(validScope)) {
3233
assertTrue(parsedScope.isEmpty());
3334
} else if (validScope.contains("Scope2")) {
3435
assertTrue(parsedScope.size() == 2 && parsedScope.get(0).equals("scope1")

Diff for: connect/api/src/main/java/org/apache/kafka/connect/data/Values.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -891,7 +891,7 @@ protected static SchemaAndValue parse(Parser parser, boolean embedded) throws No
891891
}
892892

893893
String token = parser.next();
894-
if (token.trim().isEmpty()) {
894+
if (Utils.isBlank(token)) {
895895
return new SchemaAndValue(Schema.STRING_SCHEMA, token);
896896
}
897897
token = token.trim();
@@ -1253,7 +1253,7 @@ protected boolean isNext(String expected, boolean ignoreLeadingAndTrailingWhites
12531253
nextToken = consumeNextToken();
12541254
}
12551255
if (ignoreLeadingAndTrailingWhitespace) {
1256-
while (nextToken.trim().isEmpty() && canConsumeNextToken()) {
1256+
while (Utils.isBlank(nextToken) && canConsumeNextToken()) {
12571257
nextToken = consumeNextToken();
12581258
}
12591259
}

Diff for: connect/api/src/main/java/org/apache/kafka/connect/health/AbstractState.java

+4-2
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@
1919

2020
import java.util.Objects;
2121

22+
import org.apache.kafka.common.utils.Utils;
23+
2224
/**
2325
* Provides the current status along with identifier for Connect worker and tasks.
2426
*/
@@ -36,10 +38,10 @@ public abstract class AbstractState {
3638
* @param traceMessage any error trace message associated with the connector or the task; may be null or empty
3739
*/
3840
public AbstractState(String state, String workerId, String traceMessage) {
39-
if (state == null || state.trim().isEmpty()) {
41+
if (Utils.isBlank(state)) {
4042
throw new IllegalArgumentException("State must not be null or empty");
4143
}
42-
if (workerId == null || workerId.trim().isEmpty()) {
44+
if (Utils.isBlank(workerId)) {
4345
throw new IllegalArgumentException("Worker ID must not be null or empty");
4446
}
4547
this.state = state;

Diff for: connect/api/src/main/java/org/apache/kafka/connect/health/ConnectorHealth.java

+3-2
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,11 @@
1616
*/
1717
package org.apache.kafka.connect.health;
1818

19-
2019
import java.util.Map;
2120
import java.util.Objects;
2221

22+
import org.apache.kafka.common.utils.Utils;
23+
2324
/**
2425
* Provides basic health information about the connector and its tasks.
2526
*/
@@ -35,7 +36,7 @@ public ConnectorHealth(String name,
3536
ConnectorState connectorState,
3637
Map<Integer, TaskState> tasks,
3738
ConnectorType type) {
38-
if (name == null || name.trim().isEmpty()) {
39+
if (Utils.isBlank(name)) {
3940
throw new IllegalArgumentException("Connector name is required");
4041
}
4142
Objects.requireNonNull(connectorState, "connectorState can't be null");

Diff for: connect/api/src/test/java/org/apache/kafka/connect/data/ValuesTest.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
*/
1717
package org.apache.kafka.connect.data;
1818

19+
import org.apache.kafka.common.utils.Utils;
1920
import org.apache.kafka.connect.data.Schema.Type;
2021
import org.apache.kafka.connect.data.Values.Parser;
2122
import org.apache.kafka.connect.errors.DataException;
@@ -911,7 +912,7 @@ protected void assertParsed(String input, String... expectedTokens) {
911912

912913
protected void assertConsumable(Parser parser, String... expectedTokens) {
913914
for (String expectedToken : expectedTokens) {
914-
if (!expectedToken.trim().isEmpty()) {
915+
if (!Utils.isBlank(expectedToken)) {
915916
int position = parser.mark();
916917
assertTrue(parser.canConsume(expectedToken.trim()));
917918
parser.rewindTo(position);

Diff for: connect/basic-auth-extension/src/main/java/org/apache/kafka/connect/rest/basic/auth/extension/PropertyFileLoginModule.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
package org.apache.kafka.connect.rest.basic.auth.extension;
1919

2020
import org.apache.kafka.common.config.ConfigException;
21+
import org.apache.kafka.common.utils.Utils;
2122
import org.slf4j.Logger;
2223
import org.slf4j.LoggerFactory;
2324

@@ -59,7 +60,7 @@ public class PropertyFileLoginModule implements LoginModule {
5960
public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String, ?> sharedState, Map<String, ?> options) {
6061
this.callbackHandler = callbackHandler;
6162
fileName = (String) options.get(FILE_OPTIONS);
62-
if (fileName == null || fileName.trim().isEmpty()) {
63+
if (Utils.isBlank(fileName)) {
6364
throw new ConfigException("Property Credentials file must be specified");
6465
}
6566

Diff for: connect/runtime/src/main/java/org/apache/kafka/connect/runtime/ConnectorConfig.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -494,7 +494,7 @@ ConfigDef getConfigDefFromConfigProvidingClass(String key, Class<?> cls) {
494494
.filter(c -> Modifier.isPublic(c.getModifiers()))
495495
.map(Class::getName)
496496
.collect(Collectors.joining(", "));
497-
String message = childClassNames.trim().isEmpty() ?
497+
String message = Utils.isBlank(childClassNames) ?
498498
aliasKind + " is abstract and cannot be created." :
499499
aliasKind + " is abstract and cannot be created. Did you mean " + childClassNames + "?";
500500
throw new ConfigException(key, String.valueOf(cls), message);

Diff for: connect/runtime/src/main/java/org/apache/kafka/connect/runtime/SinkConnectorConfig.java

+4-3
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
import org.apache.kafka.common.config.ConfigDef;
2020
import org.apache.kafka.common.config.ConfigDef.Importance;
2121
import org.apache.kafka.common.config.ConfigDef.Type;
22+
import org.apache.kafka.common.utils.Utils;
2223
import org.apache.kafka.common.config.ConfigException;
2324
import org.apache.kafka.connect.runtime.isolation.Plugins;
2425
import org.apache.kafka.connect.sink.SinkTask;
@@ -125,17 +126,17 @@ public static void validate(Map<String, String> props) {
125126

126127
public static boolean hasTopicsConfig(Map<String, String> props) {
127128
String topicsStr = props.get(TOPICS_CONFIG);
128-
return topicsStr != null && !topicsStr.trim().isEmpty();
129+
return !Utils.isBlank(topicsStr);
129130
}
130131

131132
public static boolean hasTopicsRegexConfig(Map<String, String> props) {
132133
String topicsRegexStr = props.get(TOPICS_REGEX_CONFIG);
133-
return topicsRegexStr != null && !topicsRegexStr.trim().isEmpty();
134+
return !Utils.isBlank(topicsRegexStr);
134135
}
135136

136137
public static boolean hasDlqTopicConfig(Map<String, String> props) {
137138
String dqlTopicStr = props.get(DLQ_TOPIC_NAME_CONFIG);
138-
return dqlTopicStr != null && !dqlTopicStr.trim().isEmpty();
139+
return !Utils.isBlank(dqlTopicStr);
139140
}
140141

141142
@SuppressWarnings("unchecked")

Diff for: connect/runtime/src/main/java/org/apache/kafka/connect/runtime/WorkerConfig.java

+3-2
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
import org.apache.kafka.common.config.ConfigException;
2626
import org.apache.kafka.common.config.internals.BrokerSecurityConfigs;
2727
import org.apache.kafka.common.metrics.Sensor;
28+
import org.apache.kafka.common.utils.Utils;
2829
import org.apache.kafka.connect.json.JsonConverter;
2930
import org.apache.kafka.connect.json.JsonConverterConfig;
3031
import org.apache.kafka.connect.storage.Converter;
@@ -516,7 +517,7 @@ public void ensureValid(String name, Object value) {
516517
if (!(item instanceof String)) {
517518
throw new ConfigException("Invalid type for admin listener (expected String).");
518519
}
519-
if (((String) item).trim().isEmpty()) {
520+
if (Utils.isBlank((String) item)) {
520521
throw new ConfigException("Empty listener found when parsing list.");
521522
}
522523
}
@@ -527,7 +528,7 @@ private static class ResponseHttpHeadersValidator implements ConfigDef.Validator
527528
@Override
528529
public void ensureValid(String name, Object value) {
529530
String strValue = (String) value;
530-
if (strValue == null || strValue.trim().isEmpty()) {
531+
if (Utils.isBlank(strValue)) {
531532
return;
532533
}
533534

Diff for: connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/DistributedHerder.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -1447,7 +1447,7 @@ private void reconfigureConnector(final String connName, final Callback<Void> cb
14471447
forwardRequestExecutor.submit(() -> {
14481448
try {
14491449
String leaderUrl = leaderUrl();
1450-
if (leaderUrl == null || leaderUrl.trim().isEmpty()) {
1450+
if (Utils.isBlank(leaderUrl)) {
14511451
cb.onCompletion(new ConnectException("Request to leader to " +
14521452
"reconfigure connector tasks failed " +
14531453
"because the URL of the leader's REST interface is empty!"), null);

Diff for: connect/runtime/src/main/java/org/apache/kafka/connect/runtime/isolation/Plugins.java

+1-2
Original file line numberDiff line numberDiff line change
@@ -445,8 +445,7 @@ public <T> T newPlugin(String klassName, AbstractConfig config, Class<T> pluginK
445445
plugin = newPlugin(klass);
446446
if (plugin instanceof Versioned) {
447447
Versioned versionedPlugin = (Versioned) plugin;
448-
if (versionedPlugin.version() == null || versionedPlugin.version().trim()
449-
.isEmpty()) {
448+
if (Utils.isBlank(versionedPlugin.version())) {
450449
throw new ConnectException("Version not defined for '" + klassName + "'");
451450
}
452451
}

Diff for: connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/RestServer.java

+4-3
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818

1919
import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider;
2020
import org.apache.kafka.common.config.ConfigException;
21+
import org.apache.kafka.common.utils.Utils;
2122
import org.apache.kafka.connect.errors.ConnectException;
2223
import org.apache.kafka.connect.health.ConnectClusterDetails;
2324
import org.apache.kafka.connect.rest.ConnectRestExtension;
@@ -275,19 +276,19 @@ public void initializeResources(Herder herder) {
275276
}
276277

277278
String allowedOrigins = config.getString(WorkerConfig.ACCESS_CONTROL_ALLOW_ORIGIN_CONFIG);
278-
if (allowedOrigins != null && !allowedOrigins.trim().isEmpty()) {
279+
if (!Utils.isBlank(allowedOrigins)) {
279280
FilterHolder filterHolder = new FilterHolder(new CrossOriginFilter());
280281
filterHolder.setName("cross-origin");
281282
filterHolder.setInitParameter(CrossOriginFilter.ALLOWED_ORIGINS_PARAM, allowedOrigins);
282283
String allowedMethods = config.getString(WorkerConfig.ACCESS_CONTROL_ALLOW_METHODS_CONFIG);
283-
if (allowedMethods != null && !allowedOrigins.trim().isEmpty()) {
284+
if (!Utils.isBlank(allowedMethods)) {
284285
filterHolder.setInitParameter(CrossOriginFilter.ALLOWED_METHODS_PARAM, allowedMethods);
285286
}
286287
context.addFilter(filterHolder, "/*", EnumSet.of(DispatcherType.REQUEST));
287288
}
288289

289290
String headerConfig = config.getString(WorkerConfig.RESPONSE_HTTP_HEADERS_CONFIG);
290-
if (headerConfig != null && !headerConfig.trim().isEmpty()) {
291+
if (!Utils.isBlank(headerConfig)) {
291292
configureHttpResponsHeaderFilter(context);
292293
}
293294

Diff for: connect/transforms/src/main/java/org/apache/kafka/connect/transforms/TimestampConverter.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -255,11 +255,11 @@ public void configure(Map<String, ?> configs) {
255255
throw new ConfigException("Unknown timestamp type in TimestampConverter: " + type + ". Valid values are "
256256
+ Utils.join(VALID_TYPES, ", ") + ".");
257257
}
258-
if (type.equals(TYPE_STRING) && formatPattern.trim().isEmpty()) {
258+
if (type.equals(TYPE_STRING) && Utils.isBlank(formatPattern)) {
259259
throw new ConfigException("TimestampConverter requires format option to be specified when using string timestamps");
260260
}
261261
SimpleDateFormat format = null;
262-
if (formatPattern != null && !formatPattern.trim().isEmpty()) {
262+
if (!Utils.isBlank(formatPattern)) {
263263
try {
264264
format = new SimpleDateFormat(formatPattern);
265265
format.setTimeZone(UTC);

Diff for: core/src/main/scala/kafka/network/SocketServer.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ import org.apache.kafka.common.network.{ChannelBuilder, ChannelBuilders, ClientI
4747
import org.apache.kafka.common.protocol.ApiKeys
4848
import org.apache.kafka.common.requests.{ApiVersionsRequest, RequestContext, RequestHeader}
4949
import org.apache.kafka.common.security.auth.SecurityProtocol
50-
import org.apache.kafka.common.utils.{KafkaThread, LogContext, Time}
50+
import org.apache.kafka.common.utils.{KafkaThread, LogContext, Time, Utils}
5151
import org.apache.kafka.common.{Endpoint, KafkaException, MetricName, Reconfigurable}
5252
import org.slf4j.event.Level
5353

@@ -650,7 +650,7 @@ private[kafka] class Acceptor(val endPoint: EndPoint,
650650
*/
651651
private def openServerSocket(host: String, port: Int): ServerSocketChannel = {
652652
val socketAddress =
653-
if (host == null || host.trim.isEmpty)
653+
if (Utils.isBlank(host))
654654
new InetSocketAddress(port)
655655
else
656656
new InetSocketAddress(host, port)

Diff for: core/src/main/scala/kafka/server/KafkaServer.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ import org.apache.kafka.common.requests.{ControlledShutdownRequest, ControlledSh
4646
import org.apache.kafka.common.security.scram.internals.ScramMechanism
4747
import org.apache.kafka.common.security.token.delegation.internals.DelegationTokenCache
4848
import org.apache.kafka.common.security.{JaasContext, JaasUtils}
49-
import org.apache.kafka.common.utils.{AppInfoParser, LogContext, Time}
49+
import org.apache.kafka.common.utils.{AppInfoParser, LogContext, Time, Utils}
5050
import org.apache.kafka.common.{Endpoint, Node}
5151
import org.apache.kafka.metadata.BrokerState
5252
import org.apache.kafka.server.authorizer.Authorizer
@@ -477,7 +477,7 @@ class KafkaServer(
477477
}
478478

479479
val updatedEndpoints = listeners.map(endpoint =>
480-
if (endpoint.host == null || endpoint.host.trim.isEmpty)
480+
if (Utils.isBlank(endpoint.host))
481481
endpoint.copy(host = InetAddress.getLocalHost.getCanonicalHostName)
482482
else
483483
endpoint

Diff for: core/src/main/scala/kafka/utils/Log4jController.scala

+3-2
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ package kafka.utils
2020
import java.util
2121
import java.util.Locale
2222

23+
import org.apache.kafka.common.utils.Utils
2324
import org.apache.log4j.{Level, LogManager, Logger}
2425

2526
import scala.collection.mutable
@@ -71,7 +72,7 @@ object Log4jController {
7172
*/
7273
def logLevel(loggerName: String, logLevel: String): Boolean = {
7374
val log = existingLogger(loggerName)
74-
if (!loggerName.trim.isEmpty && !logLevel.trim.isEmpty && log != null) {
75+
if (!Utils.isBlank(loggerName) && !Utils.isBlank(logLevel) && log != null) {
7576
log.setLevel(Level.toLevel(logLevel.toUpperCase(Locale.ROOT)))
7677
true
7778
}
@@ -80,7 +81,7 @@ object Log4jController {
8081

8182
def unsetLogLevel(loggerName: String): Boolean = {
8283
val log = existingLogger(loggerName)
83-
if (!loggerName.trim.isEmpty && log != null) {
84+
if (!Utils.isBlank(loggerName) && log != null) {
8485
log.setLevel(null)
8586
true
8687
}

0 commit comments

Comments
 (0)