This is an automated email from the ASF dual-hosted git repository.
adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new f7e04eaf3fe HDDS-13719. Replace HadoopIllegalArgumentException (#9077)
f7e04eaf3fe is described below
commit f7e04eaf3fe38864435b40ae4ff5efae7319f084
Author: Doroszlai, Attila <[email protected]>
AuthorDate: Tue Sep 30 09:28:57 2025 +0200
HDDS-13719. Replace HadoopIllegalArgumentException (#9077)
---
.../org/apache/hadoop/hdds/scm/net/NodeSchema.java | 3 +-
.../rawcoder/ByteArrayEncodingState.java | 5 ++-
.../rawcoder/ByteBufferEncodingState.java | 7 ++--
.../ozone/erasurecode/rawcoder/EncodingState.java | 5 ++-
.../ozone/erasurecode/rawcoder/RSRawDecoder.java | 3 +-
.../apache/hadoop/hdds/server/http/HttpConfig.java | 3 +-
.../hadoop/hdds/server/http/HttpServer2.java | 3 +-
.../org/apache/hadoop/hdds/utils/LogLevel.java | 41 +++++++++++-----------
pom.xml | 11 ++++++
9 files changed, 42 insertions(+), 39 deletions(-)
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NodeSchema.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NodeSchema.java
index c1cf4cb7e19..f0e09be760b 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NodeSchema.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NodeSchema.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hdds.scm.net;
import java.util.List;
-import org.apache.hadoop.HadoopIllegalArgumentException;
/**
* Network topology schema to housekeeper relevant information.
@@ -66,7 +65,7 @@ public Builder setDefaultName(String nodeDefaultName) {
public NodeSchema build() {
if (type == null) {
- throw new HadoopIllegalArgumentException("Type is mandatory for a " +
+ throw new IllegalArgumentException("Type is mandatory for a " +
"network topology node layer definition");
}
if (cost == -1) {
diff --git
a/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/ByteArrayEncodingState.java
b/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/ByteArrayEncodingState.java
index a56ec9720f0..5e9b64b3513 100644
---
a/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/ByteArrayEncodingState.java
+++
b/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/ByteArrayEncodingState.java
@@ -18,7 +18,6 @@
package org.apache.ozone.erasurecode.rawcoder;
import java.nio.ByteBuffer;
-import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.hdds.annotation.InterfaceAudience;
/**
@@ -91,12 +90,12 @@ ByteBufferEncodingState convertToByteBufferState() {
void checkBuffers(byte[][] buffers) {
for (byte[] buffer : buffers) {
if (buffer == null) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Invalid buffer found, not allowing null");
}
if (buffer.length != encodeLength) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Invalid buffer not of length " + encodeLength);
}
}
diff --git
a/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/ByteBufferEncodingState.java
b/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/ByteBufferEncodingState.java
index 5e7512f8143..14766a3c0ee 100644
---
a/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/ByteBufferEncodingState.java
+++
b/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/ByteBufferEncodingState.java
@@ -18,7 +18,6 @@
package org.apache.ozone.erasurecode.rawcoder;
import java.nio.ByteBuffer;
-import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.hdds.annotation.InterfaceAudience;
/**
@@ -91,17 +90,17 @@ ByteArrayEncodingState convertToByteArrayState() {
void checkBuffers(ByteBuffer[] buffers) {
for (ByteBuffer buffer : buffers) {
if (buffer == null) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Invalid buffer found, not allowing null");
}
if (buffer.remaining() != encodeLength) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Invalid buffer remaining " + buffer.remaining()
+ ", not of length " + encodeLength);
}
if (buffer.isDirect() != usingDirectBuffer) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Invalid buffer, isDirect should be " + usingDirectBuffer);
}
}
diff --git
a/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/EncodingState.java
b/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/EncodingState.java
index 6acf4b01df6..8819d6894c1 100644
---
a/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/EncodingState.java
+++
b/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/EncodingState.java
@@ -17,7 +17,6 @@
package org.apache.ozone.erasurecode.rawcoder;
-import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.hdds.annotation.InterfaceAudience;
/**
@@ -36,11 +35,11 @@ abstract class EncodingState {
*/
<T> void checkParameters(T[] inputs, T[] outputs) {
if (inputs.length != encoder.getNumDataUnits()) {
- throw new HadoopIllegalArgumentException("Invalid inputs length "
+ throw new IllegalArgumentException("Invalid inputs length "
+ inputs.length + " !=" + encoder.getNumDataUnits());
}
if (outputs.length != encoder.getNumParityUnits()) {
- throw new HadoopIllegalArgumentException("Invalid outputs length "
+ throw new IllegalArgumentException("Invalid outputs length "
+ outputs.length + " !=" + encoder.getNumParityUnits());
}
}
diff --git
a/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/RSRawDecoder.java
b/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/RSRawDecoder.java
index 72a5506f6ab..62a233194a5 100644
---
a/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/RSRawDecoder.java
+++
b/hadoop-hdds/erasurecode/src/main/java/org/apache/ozone/erasurecode/rawcoder/RSRawDecoder.java
@@ -19,7 +19,6 @@
import java.nio.ByteBuffer;
import java.util.Arrays;
-import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.hdds.annotation.InterfaceAudience;
import org.apache.hadoop.hdds.client.ECReplicationConfig;
import org.apache.ozone.erasurecode.rawcoder.util.DumpUtil;
@@ -60,7 +59,7 @@ public RSRawDecoder(ECReplicationConfig ecReplicationConfig) {
int numAllUnits = getNumAllUnits();
if (getNumAllUnits() >= RSUtil.GF.getFieldSize()) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Invalid getNumDataUnits() and numParityUnits");
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpConfig.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpConfig.java
index caa34f13fcd..aacdf08c3c7 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpConfig.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpConfig.java
@@ -17,7 +17,6 @@
package org.apache.hadoop.hdds.server.http;
-import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.hdds.annotation.InterfaceAudience;
import org.apache.hadoop.hdds.annotation.InterfaceStability;
import org.apache.hadoop.hdds.conf.MutableConfigurationSource;
@@ -66,7 +65,7 @@ public static Policy getHttpPolicy(MutableConfigurationSource
conf) {
OzoneConfigKeys.OZONE_HTTP_POLICY_DEFAULT);
HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
if (policy == null) {
- throw new HadoopIllegalArgumentException("Unrecognized value '"
+ throw new IllegalArgumentException("Unrecognized value '"
+ policyStr + "' for " + OzoneConfigKeys.OZONE_HTTP_POLICY_KEY);
}
conf.set(OzoneConfigKeys.OZONE_HTTP_POLICY_KEY, policy.name());
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
index 151af357da1..44afe93fb43 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
@@ -59,7 +59,6 @@
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.ConfServlet;
import org.apache.hadoop.conf.Configuration.IntegerRanges;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@@ -509,7 +508,7 @@ public HttpServer2 build() throws IOException {
connector = createHttpsChannelConnector(server.webServer,
httpConfig);
} else {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"unknown scheme for endpoint:" + ep);
}
connector.setHost(ep.getHost());
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/LogLevel.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/LogLevel.java
index 0ed1682dccc..de228d8dcc2 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/LogLevel.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/LogLevel.java
@@ -33,7 +33,6 @@
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hdds.annotation.InterfaceAudience;
@@ -144,7 +143,7 @@ public int run(String[] args) throws Exception {
try {
parseArguments(args);
sendLogLevelRequest();
- } catch (HadoopIllegalArgumentException e) {
+ } catch (IllegalArgumentException e) {
printUsage();
return -1;
}
@@ -153,11 +152,11 @@ public int run(String[] args) throws Exception {
/**
* Send HTTP/HTTPS request to the daemon.
- * @throws HadoopIllegalArgumentException if arguments are invalid.
+ * @throws IllegalArgumentException if arguments are invalid.
* @throws Exception if unable to connect
*/
private void sendLogLevelRequest()
- throws HadoopIllegalArgumentException, Exception {
+ throws IllegalArgumentException, Exception {
switch (operation) {
case GETLEVEL:
doGetLevel();
@@ -166,15 +165,15 @@ private void sendLogLevelRequest()
doSetLevel();
break;
default:
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Expect either -getlevel or -setlevel");
}
}
public void parseArguments(String[] args) throws
- HadoopIllegalArgumentException {
+ IllegalArgumentException {
if (args.length == 0) {
- throw new HadoopIllegalArgumentException("No arguments specified");
+ throw new IllegalArgumentException("No arguments specified");
}
int nextArgIndex = 0;
while (nextArgIndex < args.length) {
@@ -185,14 +184,14 @@ public void parseArguments(String[] args) throws
} else if (args[nextArgIndex].equals("-protocol")) {
nextArgIndex = parseProtocolArgs(args, nextArgIndex);
} else {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Unexpected argument " + args[nextArgIndex]);
}
}
// if operation is never specified in the arguments
if (operation == Operations.UNKNOWN) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Must specify either -getlevel or -setlevel");
}
@@ -203,15 +202,15 @@ public void parseArguments(String[] args) throws
}
private int parseGetLevelArgs(String[] args, int index) throws
- HadoopIllegalArgumentException {
+ IllegalArgumentException {
// fail if multiple operations are specified in the arguments
if (operation != Operations.UNKNOWN) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Redundant -getlevel command");
}
// check number of arguments is sufficient
if (index + 2 >= args.length) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"-getlevel needs two parameters");
}
operation = Operations.GETLEVEL;
@@ -221,15 +220,15 @@ private int parseGetLevelArgs(String[] args, int index)
throws
}
private int parseSetLevelArgs(String[] args, int index) throws
- HadoopIllegalArgumentException {
+ IllegalArgumentException {
// fail if multiple operations are specified in the arguments
if (operation != Operations.UNKNOWN) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Redundant -setlevel command");
}
// check number of arguments is sufficient
if (index + 3 >= args.length) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"-setlevel needs three parameters");
}
operation = Operations.SETLEVEL;
@@ -240,21 +239,21 @@ private int parseSetLevelArgs(String[] args, int index)
throws
}
private int parseProtocolArgs(String[] args, int index) throws
- HadoopIllegalArgumentException {
+ IllegalArgumentException {
// make sure only -protocol is specified
if (protocol != null) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Redundant -protocol command");
}
// check number of arguments is sufficient
if (index + 1 >= args.length) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"-protocol needs one parameter");
}
// check protocol is valid
protocol = args[index + 1];
if (!isValidProtocol(protocol)) {
- throw new HadoopIllegalArgumentException(
+ throw new IllegalArgumentException(
"Invalid protocol: " + protocol);
}
return index + 2;
@@ -263,7 +262,7 @@ private int parseProtocolArgs(String[] args, int index)
throws
/**
* Send HTTP/HTTPS request to get log level.
*
- * @throws HadoopIllegalArgumentException if arguments are invalid.
+ * @throws IllegalArgumentException if arguments are invalid.
* @throws Exception if unable to connect
*/
private void doGetLevel() throws Exception {
@@ -273,7 +272,7 @@ private void doGetLevel() throws Exception {
/**
* Send HTTP/HTTPS request to set log level.
*
- * @throws HadoopIllegalArgumentException if arguments are invalid.
+ * @throws IllegalArgumentException if arguments are invalid.
* @throws Exception if unable to connect
*/
private void doSetLevel() throws Exception {
diff --git a/pom.xml b/pom.xml
index ac28e69125a..e6436fead2c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2019,6 +2019,17 @@
<excludedSourceRoot>${project.build.directory}/generated-sources/protobuf/java</excludedSourceRoot>
</excludedSourceRoots>
</restrictImports>
+ <restrictImports>
+ <includeTestCode>true</includeTestCode>
+ <reason>Prefer JDK built-in</reason>
+ <bannedImports>
+
<bannedImport>org.apache.hadoop.HadoopIllegalArgumentException</bannedImport>
+ </bannedImports>
+ <excludedSourceRoots>
+
<excludedSourceRoot>${project.build.directory}/generated-sources/java</excludedSourceRoot>
+
<excludedSourceRoot>${project.build.directory}/generated-sources/protobuf/java</excludedSourceRoot>
+ </excludedSourceRoots>
+ </restrictImports>
<restrictImports>
<includeTestCode>true</includeTestCode>
<reason>Use Ozone's version of the same class</reason>
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]