pedro93 opened a new issue #6850:
URL: https://github.com/apache/incubator-pinot/issues/6850


   Bug found in pinot v0.7.0
   
   When creating a table definition in the UI, it will not allow definitions 
with `upsertConfig` due to reporting that such configurations are invalid.
   
   A workaround for this is to directly use the swagger API. It seems the UI is 
not using the same backend service for configuration validation.
   
   An example configuration could be:
   ```
   {
     "REALTIME": {
       "tableName": "Table_REALTIME",
       "tableType": "REALTIME",
       "segmentsConfig": {
         "timeColumnName": "timestamp",
         "replication": "1",
         "replicasPerPartition": "1",
         "schemaName": "Table"
       },
       "tenants": {
         "broker": "DefaultTenant",
         "server": "DefaultTenant",
         "tagOverrideConfig": {}
       },
       "tableIndexConfig": {
         "invertedIndexColumns": [],
         "rangeIndexColumns": [],
         "varLengthDictionaryColumns": [],
         "enableDefaultStarTree": false,
         "enableDynamicStarTreeCreation": false,
         "aggregateMetrics": false,
         "nullHandlingEnabled": false,
         "autoGeneratedInvertedIndex": false,
         "createInvertedIndexDuringSegmentGeneration": false,
         "bloomFilterColumns": [],
         "loadMode": "MMAP",
         "streamConfigs": {
           "streamType": "kafka",
           "stream.kafka.topic.name": "data.Table",
           "stream.kafka.broker.list": "kafka.dc-kafka.svc.cluster.local:9092",
           "stream.kafka.consumer.type": "lowlevel",
           "stream.kafka.consumer.prop.auto.offset.reset": "smallest",
           "stream.kafka.consumer.factory.class.name": 
"org.apache.pinot.plugin.stream.kafka20.KafkaConsumerFactory",
           "stream.kafka.decoder.class.name": 
"org.apache.pinot.plugin.stream.kafka.KafkaJSONMessageDecoder",
           "realtime.segment.flush.threshold.rows": "0",
           "realtime.segment.flush.threshold.time": "24h",
           "realtime.segment.flush.segment.size": "100M"
         },
         "noDictionaryColumns": [],
         "onHeapDictionaryColumns": [],
         "sortedColumn": []
       },
       "metadata": {},
       "quota": {},
       "routing": {},
       "query": {},
       "upsertConfig": {
         "mode": "FULL"
       },
       "ingestionConfig": {},
       "isDimTable": false
     }
   }
   ```
   
   With a schema definition:
   ```
   {
     "schemaName": "Table",
     "dimensionFieldSpecs": [
       {
         "name": "executionId",
         "dataType": "STRING"
       },
       {
         "name": "jobId",
         "dataType": "STRING"
       },
       {
         "name": "jobMemberId",
         "dataType": "STRING"
       },
       {
         "name": "projectId",
         "dataType": "STRING"
       }
     ],
     "dateTimeFieldSpecs": [
       {
         "name": "timestamp",
         "dataType": "LONG",
         "format": "1:MILLISECONDS:EPOCH",
         "granularity": "1:MILLISECONDS"
       }
     ],
     "primaryKeyColumns": [
       "executionId"
     ]
   }
   ```


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@pinot.apache.org
For additional commands, e-mail: commits-h...@pinot.apache.org

Reply via email to