Jackie-Jiang commented on a change in pull request #8335:
URL: https://github.com/apache/pinot/pull/8335#discussion_r825080526



##########
File path: 
pinot-core/src/main/java/org/apache/pinot/core/data/manager/realtime/TransformPipeline.java
##########
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.data.manager.realtime;
+
+import com.google.common.collect.ImmutableList;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import org.apache.pinot.segment.local.recordtransformer.ComplexTypeTransformer;
+import org.apache.pinot.segment.local.recordtransformer.CompositeTransformer;
+import org.apache.pinot.segment.local.recordtransformer.RecordTransformer;
+import org.apache.pinot.segment.local.utils.IngestionUtils;
+import org.apache.pinot.spi.config.table.TableConfig;
+import org.apache.pinot.spi.data.Schema;
+import org.apache.pinot.spi.data.readers.GenericRow;
+
+
+public class TransformPipeline {
+  private final RecordTransformer _recordTransformer;
+  private final ComplexTypeTransformer _complexTypeTransformer;
+  public TransformPipeline(TableConfig tableConfig, Schema schema) {

Review comment:
       (minor) add an empty line before the constructor

##########
File path: 
pinot-core/src/main/java/org/apache/pinot/core/data/manager/realtime/TransformPipeline.java
##########
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.data.manager.realtime;
+
+import com.google.common.collect.ImmutableList;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import org.apache.pinot.segment.local.recordtransformer.ComplexTypeTransformer;
+import org.apache.pinot.segment.local.recordtransformer.CompositeTransformer;
+import org.apache.pinot.segment.local.recordtransformer.RecordTransformer;
+import org.apache.pinot.segment.local.utils.IngestionUtils;
+import org.apache.pinot.spi.config.table.TableConfig;
+import org.apache.pinot.spi.data.Schema;
+import org.apache.pinot.spi.data.readers.GenericRow;
+
+
+public class TransformPipeline {
+  private final RecordTransformer _recordTransformer;
+  private final ComplexTypeTransformer _complexTypeTransformer;
+  public TransformPipeline(TableConfig tableConfig, Schema schema) {
+    // Create record transformer
+    _recordTransformer = 
CompositeTransformer.getDefaultTransformer(tableConfig, schema);
+
+    // Create complex type transformer
+    _complexTypeTransformer = 
ComplexTypeTransformer.getComplexTypeTransformer(tableConfig);
+  }
+
+  /**
+   * Process and validate the decoded row against schema.
+   * @param decodedRow the row data to pass in
+   * @return both processed rows and failed rows in a struct.
+   * @throws TransformException when data has issues like schema validation. 
Fetch the partialResult from Exception
+   */
+  public Result processRow(GenericRow decodedRow) throws TransformException {

Review comment:
       Consider passing the `Result` in so that it can be reused to reduce 
garbage

##########
File path: 
pinot-core/src/main/java/org/apache/pinot/core/data/manager/realtime/TransformPipeline.java
##########
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.data.manager.realtime;
+
+import com.google.common.collect.ImmutableList;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import org.apache.pinot.segment.local.recordtransformer.ComplexTypeTransformer;
+import org.apache.pinot.segment.local.recordtransformer.CompositeTransformer;
+import org.apache.pinot.segment.local.recordtransformer.RecordTransformer;
+import org.apache.pinot.segment.local.utils.IngestionUtils;
+import org.apache.pinot.spi.config.table.TableConfig;
+import org.apache.pinot.spi.data.Schema;
+import org.apache.pinot.spi.data.readers.GenericRow;
+
+
+public class TransformPipeline {
+  private final RecordTransformer _recordTransformer;
+  private final ComplexTypeTransformer _complexTypeTransformer;
+  public TransformPipeline(TableConfig tableConfig, Schema schema) {
+    // Create record transformer
+    _recordTransformer = 
CompositeTransformer.getDefaultTransformer(tableConfig, schema);
+
+    // Create complex type transformer
+    _complexTypeTransformer = 
ComplexTypeTransformer.getComplexTypeTransformer(tableConfig);
+  }
+
+  /**
+   * Process and validate the decoded row against schema.
+   * @param decodedRow the row data to pass in
+   * @return both processed rows and failed rows in a struct.
+   * @throws TransformException when data has issues like schema validation. 
Fetch the partialResult from Exception
+   */
+  public Result processRow(GenericRow decodedRow) throws TransformException {
+    Result res = new Result();
+    // to keep track and add to "failedRows" when exception happens
+    GenericRow currentRow = null;
+    try {
+      if (_complexTypeTransformer != null) {
+        // TODO: consolidate complex type transformer into composite type 
transformer
+        decodedRow = _complexTypeTransformer.transform(decodedRow);
+      }
+      Collection<GenericRow> rows = (Collection<GenericRow>) 
decodedRow.getValue(GenericRow.MULTIPLE_RECORDS_KEY);
+      if (rows == null) {
+        rows = ImmutableList.of(decodedRow);
+      }
+      for (GenericRow row : rows) {
+        currentRow = row;
+        GenericRow transformedRow = _recordTransformer.transform(row);
+        if (transformedRow != null && IngestionUtils.shouldIngestRow(row)) {
+          res.addTransformedRows(transformedRow);
+        } else {
+          res.addFailedRows(row);
+        }
+      }
+      return res;
+    } catch (Exception ex) {
+      // when exception happens, the current processing row needs to be added 
to failed list
+      res.addFailedRows(currentRow);
+      throw new TransformException("Encountered error while processing row", 
res, ex);
+    }
+  }
+
+  public static class Result {
+    private final List<GenericRow> _transformedRows = new ArrayList<>();
+    private final List<GenericRow> _failedRows = new ArrayList<>();

Review comment:
       Just keep a count for `numInvalidRows`

##########
File path: 
pinot-core/src/main/java/org/apache/pinot/core/data/manager/realtime/TransformPipeline.java
##########
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.data.manager.realtime;
+
+import com.google.common.collect.ImmutableList;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import org.apache.pinot.segment.local.recordtransformer.ComplexTypeTransformer;
+import org.apache.pinot.segment.local.recordtransformer.CompositeTransformer;
+import org.apache.pinot.segment.local.recordtransformer.RecordTransformer;
+import org.apache.pinot.segment.local.utils.IngestionUtils;
+import org.apache.pinot.spi.config.table.TableConfig;
+import org.apache.pinot.spi.data.Schema;
+import org.apache.pinot.spi.data.readers.GenericRow;
+
+
+public class TransformPipeline {

Review comment:
       Add some javadoc

##########
File path: 
pinot-core/src/main/java/org/apache/pinot/core/data/manager/realtime/TransformPipeline.java
##########
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.data.manager.realtime;
+
+import com.google.common.collect.ImmutableList;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import org.apache.pinot.segment.local.recordtransformer.ComplexTypeTransformer;
+import org.apache.pinot.segment.local.recordtransformer.CompositeTransformer;
+import org.apache.pinot.segment.local.recordtransformer.RecordTransformer;
+import org.apache.pinot.segment.local.utils.IngestionUtils;
+import org.apache.pinot.spi.config.table.TableConfig;
+import org.apache.pinot.spi.data.Schema;
+import org.apache.pinot.spi.data.readers.GenericRow;
+
+
+public class TransformPipeline {
+  private final RecordTransformer _recordTransformer;
+  private final ComplexTypeTransformer _complexTypeTransformer;
+  public TransformPipeline(TableConfig tableConfig, Schema schema) {
+    // Create record transformer
+    _recordTransformer = 
CompositeTransformer.getDefaultTransformer(tableConfig, schema);
+
+    // Create complex type transformer
+    _complexTypeTransformer = 
ComplexTypeTransformer.getComplexTypeTransformer(tableConfig);
+  }
+
+  /**
+   * Process and validate the decoded row against schema.
+   * @param decodedRow the row data to pass in
+   * @return both processed rows and failed rows in a struct.
+   * @throws TransformException when data has issues like schema validation. 
Fetch the partialResult from Exception
+   */
+  public Result processRow(GenericRow decodedRow) throws TransformException {
+    Result res = new Result();
+    // to keep track and add to "failedRows" when exception happens
+    GenericRow currentRow = null;
+    try {
+      if (_complexTypeTransformer != null) {
+        // TODO: consolidate complex type transformer into composite type 
transformer
+        decodedRow = _complexTypeTransformer.transform(decodedRow);
+      }
+      Collection<GenericRow> rows = (Collection<GenericRow>) 
decodedRow.getValue(GenericRow.MULTIPLE_RECORDS_KEY);
+      if (rows == null) {
+        rows = ImmutableList.of(decodedRow);

Review comment:
       Suggest keeping the original logic to avoid creating this extra list

##########
File path: 
pinot-core/src/main/java/org/apache/pinot/core/data/manager/realtime/LLRealtimeSegmentDataManager.java
##########
@@ -510,42 +505,25 @@ private void processStreamEvents(MessageBatch 
messagesAndOffsets, long idlePipeS
           .decode(messagesAndOffsets.getMessageAtIndex(index), 
messagesAndOffsets.getMessageOffsetAtIndex(index),
               messagesAndOffsets.getMessageLengthAtIndex(index), reuse);
       if (decodedRow != null) {
-        List<GenericRow> transformedRows = new ArrayList<>();
+        TransformPipeline.Result result = new TransformPipeline.Result();
         try {
-          if (_complexTypeTransformer != null) {
-            // TODO: consolidate complex type transformer into composite type 
transformer
-            decodedRow = _complexTypeTransformer.transform(decodedRow);
-          }
-          Collection<GenericRow> rows = (Collection<GenericRow>) 
decodedRow.getValue(GenericRow.MULTIPLE_RECORDS_KEY);
-          if (rows != null) {
-            for (GenericRow row : rows) {
-              GenericRow transformedRow = _recordTransformer.transform(row);
-              if (transformedRow != null && 
IngestionUtils.shouldIngestRow(row)) {
-                transformedRows.add(transformedRow);
-              } else {
-                realtimeRowsDroppedMeter =
-                    _serverMetrics.addMeteredTableValue(_metricKeyName, 
ServerMeter.INVALID_REALTIME_ROWS_DROPPED, 1,
-                        realtimeRowsDroppedMeter);
-              }
-            }
-          } else {
-            GenericRow transformedRow = 
_recordTransformer.transform(decodedRow);
-            if (transformedRow != null && 
IngestionUtils.shouldIngestRow(transformedRow)) {
-              transformedRows.add(transformedRow);
-            } else {
-              realtimeRowsDroppedMeter =
-                  _serverMetrics.addMeteredTableValue(_metricKeyName, 
ServerMeter.INVALID_REALTIME_ROWS_DROPPED, 1,
-                      realtimeRowsDroppedMeter);
-            }
-          }
-        } catch (Exception e) {
+          result = _transformPipeline.processRow(decodedRow);
+        } catch (TransformPipeline.TransformException e) {
           _numRowsErrored++;
           String errorMessage = String.format("Caught exception while 
transforming the record: %s", decodedRow);
           _segmentLogger.error(errorMessage, e);
           _realtimeTableDataManager.addSegmentError(_segmentNameStr,
               new SegmentErrorInfo(System.currentTimeMillis(), errorMessage, 
e));
+          // for a row with multiple records (multi rows), if we encounter 
exception in the middle,
+          // there could be some rows that are processed successfully. We 
still wish to process them.
+          result = e.getPartialResult();

Review comment:
       We may simplify the handling by skipping the whole result when it 
encounters exception

##########
File path: 
pinot-core/src/main/java/org/apache/pinot/core/data/manager/realtime/TransformPipeline.java
##########
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.data.manager.realtime;
+
+import com.google.common.collect.ImmutableList;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import org.apache.pinot.segment.local.recordtransformer.ComplexTypeTransformer;
+import org.apache.pinot.segment.local.recordtransformer.CompositeTransformer;
+import org.apache.pinot.segment.local.recordtransformer.RecordTransformer;
+import org.apache.pinot.segment.local.utils.IngestionUtils;
+import org.apache.pinot.spi.config.table.TableConfig;
+import org.apache.pinot.spi.data.Schema;
+import org.apache.pinot.spi.data.readers.GenericRow;
+
+
+public class TransformPipeline {
+  private final RecordTransformer _recordTransformer;
+  private final ComplexTypeTransformer _complexTypeTransformer;
+  public TransformPipeline(TableConfig tableConfig, Schema schema) {
+    // Create record transformer
+    _recordTransformer = 
CompositeTransformer.getDefaultTransformer(tableConfig, schema);
+
+    // Create complex type transformer
+    _complexTypeTransformer = 
ComplexTypeTransformer.getComplexTypeTransformer(tableConfig);
+  }
+
+  /**
+   * Process and validate the decoded row against schema.
+   * @param decodedRow the row data to pass in
+   * @return both processed rows and failed rows in a struct.
+   * @throws TransformException when data has issues like schema validation. 
Fetch the partialResult from Exception
+   */
+  public Result processRow(GenericRow decodedRow) throws TransformException {
+    Result res = new Result();
+    // to keep track and add to "failedRows" when exception happens
+    GenericRow currentRow = null;
+    try {
+      if (_complexTypeTransformer != null) {
+        // TODO: consolidate complex type transformer into composite type 
transformer
+        decodedRow = _complexTypeTransformer.transform(decodedRow);
+      }
+      Collection<GenericRow> rows = (Collection<GenericRow>) 
decodedRow.getValue(GenericRow.MULTIPLE_RECORDS_KEY);
+      if (rows == null) {
+        rows = ImmutableList.of(decodedRow);
+      }
+      for (GenericRow row : rows) {
+        currentRow = row;
+        GenericRow transformedRow = _recordTransformer.transform(row);
+        if (transformedRow != null && IngestionUtils.shouldIngestRow(row)) {
+          res.addTransformedRows(transformedRow);
+        } else {
+          res.addFailedRows(row);
+        }
+      }
+      return res;
+    } catch (Exception ex) {
+      // when exception happens, the current processing row needs to be added 
to failed list
+      res.addFailedRows(currentRow);
+      throw new TransformException("Encountered error while processing row", 
res, ex);
+    }
+  }
+
+  public static class Result {
+    private final List<GenericRow> _transformedRows = new ArrayList<>();
+    private final List<GenericRow> _failedRows = new ArrayList<>();
+
+    public List<GenericRow> getTransformedRows() {
+      return ImmutableList.copyOf(_transformedRows);

Review comment:
       Don't make a copy here, it will create unnecessary garbage

##########
File path: 
pinot-core/src/test/java/org/apache/pinot/core/data/manager/realtime/Fixtures.java
##########
@@ -0,0 +1,156 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.pinot.core.data.manager.realtime;
+
+import com.google.common.collect.ImmutableList;
+import java.util.Random;
+import 
org.apache.pinot.core.realtime.impl.fakestream.FakeStreamConsumerFactory;
+import org.apache.pinot.core.realtime.impl.fakestream.FakeStreamMessageDecoder;
+import org.apache.pinot.spi.config.table.TableConfig;
+import org.apache.pinot.spi.data.Schema;
+import org.apache.pinot.spi.data.readers.GenericRow;
+import org.apache.pinot.spi.utils.JsonUtils;
+
+
+public class Fixtures {
+  private Fixtures() {
+  }
+  public static final int MAX_ROWS_IN_SEGMENT = 250000;
+  public static final long MAX_TIME_FOR_SEGMENT_CLOSE_MS = 64368000L;
+  public static final String TOPIC_NAME = "someTopic";
+  public static final String CONSUMER_FACTORY_CLASS = 
FakeStreamConsumerFactory.class.getName();
+  public static final String MESSAGE_DECODER_CLASS = 
FakeStreamMessageDecoder.class.getName();
+
+  //@formatter:off
+  public static final String TABLE_CONFIG_JSON =
+      "{"
+          + "  \"metadata\":{},"

Review comment:
       (minor) Let's keep the original indentation for readability




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org
For additional commands, e-mail: commits-h...@pinot.apache.org

Reply via email to