This is an automated email from the ASF dual-hosted git repository.

szetszwo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new 810e148ea9 HDDS-12939. Remove UnknownPipelineStateException. (#8372)
810e148ea9 is described below

commit 810e148ea90293f5c4ca2904f3b5f15b66e5f84d
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Thu May 1 08:12:53 2025 -0700

    HDDS-12939. Remove UnknownPipelineStateException. (#8372)
---
 .../common/helpers/ContainerWithPipeline.java      | 18 +++-----
 .../apache/hadoop/hdds/scm/pipeline/Pipeline.java  | 46 ++++++++-----------
 .../pipeline/UnknownPipelineStateException.java    | 45 -------------------
 .../hadoop/hdds/scm/pipeline/TestPipeline.java     |  2 +-
 ...inerLocationProtocolClientSideTranslatorPB.java | 14 +-----
 .../hadoop/ozone/om/helpers/OmKeyLocationInfo.java | 52 +++++++++-------------
 6 files changed, 48 insertions(+), 129 deletions(-)

diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java
index cfd2d93a5d..8db875c181 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java
@@ -24,7 +24,6 @@
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.hdds.scm.container.ContainerInfo;
 import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
-import org.apache.hadoop.hdds.scm.pipeline.UnknownPipelineStateException;
 
 /**
  * Class wraps ozone container info.
@@ -48,22 +47,17 @@ public Pipeline getPipeline() {
     return pipeline;
   }
 
-  public static ContainerWithPipeline fromProtobuf(
-      HddsProtos.ContainerWithPipeline allocatedContainer)
-      throws UnknownPipelineStateException {
+  public static ContainerWithPipeline 
fromProtobuf(HddsProtos.ContainerWithPipeline allocatedContainer) {
     return new ContainerWithPipeline(
         ContainerInfo.fromProtobuf(allocatedContainer.getContainerInfo()),
         Pipeline.getFromProtobuf(allocatedContainer.getPipeline()));
   }
 
-  public HddsProtos.ContainerWithPipeline getProtobuf(int clientVersion)
-      throws UnknownPipelineStateException {
-    HddsProtos.ContainerWithPipeline.Builder builder =
-        HddsProtos.ContainerWithPipeline.newBuilder();
-    builder.setContainerInfo(getContainerInfo().getProtobuf())
-        .setPipeline(getPipeline().getProtobufMessage(clientVersion, 
Name.IO_PORTS));
-
-    return builder.build();
+  public HddsProtos.ContainerWithPipeline getProtobuf(int clientVersion) {
+    return HddsProtos.ContainerWithPipeline.newBuilder()
+        .setContainerInfo(getContainerInfo().getProtobuf())
+        .setPipeline(getPipeline().getProtobufMessage(clientVersion, 
Name.IO_PORTS))
+        .build();
   }
 
   @Override
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
index 1549f4f782..ccf7b308d4 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hdds.scm.pipeline;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import java.io.IOException;
@@ -31,6 +30,7 @@
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Optional;
 import java.util.Set;
 import java.util.UUID;
@@ -360,14 +360,11 @@ public ReplicationConfig getReplicationConfig() {
     return replicationConfig;
   }
 
-  public HddsProtos.Pipeline getProtobufMessage(int clientVersion)
-      throws UnknownPipelineStateException {
+  public HddsProtos.Pipeline getProtobufMessage(int clientVersion) {
     return getProtobufMessage(clientVersion, Collections.emptySet());
   }
 
-  public HddsProtos.Pipeline getProtobufMessage(int clientVersion, 
Set<DatanodeDetails.Port.Name> filterPorts)
-      throws UnknownPipelineStateException {
-
+  public HddsProtos.Pipeline getProtobufMessage(int clientVersion, 
Set<DatanodeDetails.Port.Name> filterPorts) {
     List<HddsProtos.DatanodeDetailsProto> members = new ArrayList<>();
     List<Integer> memberReplicaIndexes = new ArrayList<>();
 
@@ -426,8 +423,7 @@ public HddsProtos.Pipeline getProtobufMessage(int 
clientVersion, Set<DatanodeDet
     return builder.build();
   }
 
-  private static Pipeline getFromProtobufSetCreationTimestamp(
-      HddsProtos.Pipeline proto) throws UnknownPipelineStateException {
+  private static Pipeline 
getFromProtobufSetCreationTimestamp(HddsProtos.Pipeline proto) {
     return toBuilder(proto)
         .setCreateTimestamp(Instant.now())
         .build();
@@ -441,9 +437,8 @@ public Builder toBuilder() {
     return newBuilder(this);
   }
 
-  public static Builder toBuilder(HddsProtos.Pipeline pipeline)
-      throws UnknownPipelineStateException {
-    Preconditions.checkNotNull(pipeline, "Pipeline is null");
+  public static Builder toBuilder(HddsProtos.Pipeline pipeline) {
+    Objects.requireNonNull(pipeline, "pipeline == null");
 
     Map<DatanodeDetails, Integer> nodes = new LinkedHashMap<>();
     int index = 0;
@@ -486,8 +481,7 @@ public static Builder toBuilder(HddsProtos.Pipeline 
pipeline)
         .setCreateTimestamp(pipeline.getCreationTimeStamp());
   }
 
-  public static Pipeline getFromProtobuf(HddsProtos.Pipeline pipeline)
-      throws UnknownPipelineStateException {
+  public static Pipeline getFromProtobuf(HddsProtos.Pipeline pipeline) {
     return toBuilder(pipeline).build();
   }
 
@@ -648,10 +642,10 @@ public Builder setReplicaIndexes(Map<DatanodeDetails, 
Integer> indexes) {
     }
 
     public Pipeline build() {
-      Preconditions.checkNotNull(id);
-      Preconditions.checkNotNull(replicationConfig);
-      Preconditions.checkNotNull(state);
-      Preconditions.checkNotNull(nodeStatus);
+      Objects.requireNonNull(id, "id == null");
+      Objects.requireNonNull(replicationConfig, "replicationConfig == null");
+      Objects.requireNonNull(state, "state == null");
+      Objects.requireNonNull(nodeStatus, "nodeStatus == null");
 
       if (nodeOrder != null && !nodeOrder.isEmpty()) {
         List<DatanodeDetails> nodesWithOrder = new ArrayList<>();
@@ -683,31 +677,29 @@ public Pipeline build() {
   public enum PipelineState {
     ALLOCATED, OPEN, DORMANT, CLOSED;
 
-    public static PipelineState fromProtobuf(HddsProtos.PipelineState state)
-        throws UnknownPipelineStateException {
-      Preconditions.checkNotNull(state, "Pipeline state is null");
+    public static PipelineState fromProtobuf(HddsProtos.PipelineState state) {
+      Objects.requireNonNull(state, "state == null");
       switch (state) {
       case PIPELINE_ALLOCATED: return ALLOCATED;
       case PIPELINE_OPEN: return OPEN;
       case PIPELINE_DORMANT: return DORMANT;
       case PIPELINE_CLOSED: return CLOSED;
       default:
-        throw new UnknownPipelineStateException(
-            "Pipeline state: " + state + " is not recognized.");
+        throw new IllegalArgumentException("Unexpected value " + state
+            + " from " + state.getClass());
       }
     }
 
-    public static HddsProtos.PipelineState getProtobuf(PipelineState state)
-        throws UnknownPipelineStateException {
-      Preconditions.checkNotNull(state, "Pipeline state is null");
+    public static HddsProtos.PipelineState getProtobuf(PipelineState state) {
+      Objects.requireNonNull(state, "state == null");
       switch (state) {
       case ALLOCATED: return HddsProtos.PipelineState.PIPELINE_ALLOCATED;
       case OPEN: return HddsProtos.PipelineState.PIPELINE_OPEN;
       case DORMANT: return HddsProtos.PipelineState.PIPELINE_DORMANT;
       case CLOSED: return HddsProtos.PipelineState.PIPELINE_CLOSED;
       default:
-        throw new UnknownPipelineStateException(
-            "Pipeline state: " + state + " is not recognized.");
+        throw new IllegalArgumentException("Unexpected value " + state
+            + " from " + state.getClass());
       }
     }
   }
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/UnknownPipelineStateException.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/UnknownPipelineStateException.java
deleted file mode 100644
index bcbca39b9b..0000000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/UnknownPipelineStateException.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hdds.scm.pipeline;
-
-import org.apache.hadoop.hdds.scm.exceptions.SCMException;
-
-/**
- * Signals that a pipeline state is not recognized.
- */
-public class UnknownPipelineStateException extends SCMException {
-  /**
-   * Constructs an {@code UnknownPipelineStateException} with {@code null}
-   * as its error detail message.
-   */
-  public UnknownPipelineStateException() {
-    super(ResultCodes.UNKNOWN_PIPELINE_STATE);
-  }
-
-  /**
-   * Constructs an {@code UnknownPipelineStateException} with the specified
-   * detail message.
-   *
-   * @param message
-   *        The detail message (which is saved for later retrieval
-   *        by the {@link #getMessage()} method)
-   */
-  public UnknownPipelineStateException(String message) {
-    super(message, ResultCodes.UNKNOWN_PIPELINE_STATE);
-  }
-}
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java
index 500f9c947a..4697e1f241 100644
--- 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java
@@ -71,7 +71,7 @@ public void getProtobufMessageEC() throws IOException {
   }
 
   @Test
-  public void testReplicaIndexesSerialisedCorrectly() throws IOException {
+  public void testReplicaIndexesSerialisedCorrectly() {
     Pipeline pipeline = MockPipeline.createEcPipeline();
     HddsProtos.Pipeline protobufMessage = pipeline.getProtobufMessage(1);
     Pipeline reloadedPipeline = Pipeline.getFromProtobuf(protobufMessage);
diff --git 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
index 8d9e5c36cf..0f38716afb 100644
--- 
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
+++ 
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
@@ -27,7 +27,6 @@
 import java.io.Closeable;
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -159,12 +158,6 @@ public final class 
StorageContainerLocationProtocolClientSideTranslatorPB
   private final StorageContainerLocationProtocolPB rpcProxy;
   private final SCMContainerLocationFailoverProxyProvider fpp;
 
-  /**
-   * This is used to check if 'leader' or 'follower' exists,
-   * in order to confirm whether we have enabled Ratis.
-   */
-  private final List<String> scmRatisRolesToCheck = Arrays.asList("leader", 
"follower");
-
   /**
    * Creates a new StorageContainerLocationProtocolClientSideTranslatorPB.
    *
@@ -376,12 +369,7 @@ public List<ContainerWithPipeline> 
getExistContainerWithPipelinesInBatch(
         .getContainerWithPipelinesList();
 
     for (HddsProtos.ContainerWithPipeline cp : protoCps) {
-      try {
-        cps.add(ContainerWithPipeline.fromProtobuf(cp));
-      } catch (IOException uex) {
-          // "fromProtobuf" may throw an exception
-          // do nothing , just go ahead
-      }
+      cps.add(ContainerWithPipeline.fromProtobuf(cp));
     }
     return cps;
   }
diff --git 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyLocationInfo.java
 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyLocationInfo.java
index aba9e09071..d3fea73b21 100644
--- 
a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyLocationInfo.java
+++ 
b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyLocationInfo.java
@@ -19,7 +19,6 @@
 
 import org.apache.hadoop.hdds.client.BlockID;
 import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
-import org.apache.hadoop.hdds.scm.pipeline.UnknownPipelineStateException;
 import org.apache.hadoop.hdds.scm.storage.BlockLocationInfo;
 import org.apache.hadoop.hdds.security.token.OzoneBlockTokenIdentifier;
 import 
org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.KeyLocation;
@@ -101,42 +100,33 @@ public KeyLocation getProtobuf(boolean ignorePipeline, 
int clientVersion) {
         .setCreateVersion(getCreateVersion())
         .setPartNumber(getPartNumber());
     if (!ignorePipeline) {
-      try {
-        Token<OzoneBlockTokenIdentifier> token = getToken();
-        if (token != null) {
-          builder.setToken(OMPBHelper.protoFromToken(token));
-        }
-
-        // Pipeline can be null when key create with override and
-        // on a versioning enabled bucket. for older versions of blocks
-        // We do not need to return pipeline as part of createKey,
-        // so we do not refresh pipeline in createKey, because of this reason
-        // for older version of blocks pipeline can be null.
-        // And also for key create we never need to return pipeline info
-        // for older version of blocks irrespective of versioning.
-
-        // Currently, we do not completely support bucket versioning.
-        // TODO: this needs to be revisited when bucket versioning
-        //  implementation is handled.
-
-        Pipeline pipeline = getPipeline();
-        if (pipeline != null) {
-          builder.setPipeline(pipeline.getProtobufMessage(clientVersion));
-        }
-      } catch (UnknownPipelineStateException e) {
-        //TODO: fix me: we should not return KeyLocation without pipeline.
+      Token<OzoneBlockTokenIdentifier> token = getToken();
+      if (token != null) {
+        builder.setToken(OMPBHelper.protoFromToken(token));
+      }
+
+      // Pipeline can be null when key create with override and
+      // on a versioning enabled bucket. for older versions of blocks
+      // We do not need to return pipeline as part of createKey,
+      // so we do not refresh pipeline in createKey, because of this reason
+      // for older version of blocks pipeline can be null.
+      // And also for key create we never need to return pipeline info
+      // for older version of blocks irrespective of versioning.
+
+      // Currently, we do not completely support bucket versioning.
+      // TODO: this needs to be revisited when bucket versioning
+      //  implementation is handled.
+
+      Pipeline pipeline = getPipeline();
+      if (pipeline != null) {
+        builder.setPipeline(pipeline.getProtobufMessage(clientVersion));
       }
     }
     return builder.build();
   }
 
   private static Pipeline getPipeline(KeyLocation keyLocation) {
-    try {
-      return keyLocation.hasPipeline() ?
-          Pipeline.getFromProtobuf(keyLocation.getPipeline()) : null;
-    } catch (UnknownPipelineStateException e) {
-      return null;
-    }
+    return keyLocation.hasPipeline() ? 
Pipeline.getFromProtobuf(keyLocation.getPipeline()) : null;
   }
 
   public static OmKeyLocationInfo getFromProtobuf(KeyLocation keyLocation) {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to