xiaokang commented on code in PR #44999: URL: https://github.com/apache/doris/pull/44999#discussion_r1899213576
########## extension/logstash/lib/logstash/outputs/doris.rb: ########## @@ -157,50 +173,59 @@ def send_events(events) http_headers["label"] = @label_prefix + "_" + @db + "_" + @table + "_" + Time.now.strftime('%Y%m%d_%H%M%S_%L_' + SecureRandom.uuid) end - req_count = 0 - sleep_for = 1 - while true - response = make_request(documents, http_headers, @http_query, @http_hosts.sample) - - req_count += 1 - response_json = {} - begin - response_json = JSON.parse(response.body) - rescue => e - @logger.warn("doris stream load response: #{response} is not a valid JSON") - end + handle_request(documents, http_headers, event_num, 1) + end + + def sleep_for_attempt(attempt) + sleep_for = attempt**2 + sleep_for = sleep_for <= 60 ? sleep_for : 60 + (sleep_for/2) + (rand(0..sleep_for)/2) + end + + private + def handle_request(documents, http_headers, event_num, req_count) + response = make_request(documents, http_headers, @http_query, @http_hosts.sample) + response_json = {} + begin + response_json = JSON.parse(response.body) + rescue => _ + @logger.warn("doris stream load response is not a valid JSON:\n#{response}") Review Comment: do more exception handling instead of just log warning. ########## extension/logstash/lib/logstash/outputs/doris.rb: ########## @@ -72,6 +73,7 @@ class LogStash::Outputs::Doris < LogStash::Outputs::Base config :log_progress_interval, :validate => :number, :default => 10 + config :retry_queue_size, :validate => :number, :default => 128 Review Comment: 128 may be too large if the request batch size is large, eg 100MB. So you should limit on queued bytes instead of items. ########## extension/logstash/lib/logstash/outputs/doris.rb: ########## @@ -157,50 +173,59 @@ def send_events(events) http_headers["label"] = @label_prefix + "_" + @db + "_" + @table + "_" + Time.now.strftime('%Y%m%d_%H%M%S_%L_' + SecureRandom.uuid) end - req_count = 0 - sleep_for = 1 - while true - response = make_request(documents, http_headers, @http_query, @http_hosts.sample) - - req_count += 1 - response_json = {} - begin - response_json = JSON.parse(response.body) - rescue => e - @logger.warn("doris stream load response: #{response} is not a valid JSON") - end + handle_request(documents, http_headers, event_num, 1) + end + + def sleep_for_attempt(attempt) + sleep_for = attempt**2 + sleep_for = sleep_for <= 60 ? sleep_for : 60 + (sleep_for/2) + (rand(0..sleep_for)/2) + end + + private + def handle_request(documents, http_headers, event_num, req_count) + response = make_request(documents, http_headers, @http_query, @http_hosts.sample) + response_json = {} + begin + response_json = JSON.parse(response.body) + rescue => _ + @logger.warn("doris stream load response is not a valid JSON:\n#{response}") + end + + status = response_json["Status"] - status = response_json["Status"] + if status == 'Label Already Exists' + @logger.warn("Label already exists: #{response_json['Label']}, skip #{event_num} records:\n#{response}") + return + end - if status == 'Label Already Exists' - @logger.warn("Label already exists: #{response_json['Label']}, skip #{event_num} records.") - break + if status == "Success" || status == "Publish Timeout" + @total_bytes.addAndGet(documents.size) + @total_rows.addAndGet(event_num) + if @log_request or @logger.debug? + @logger.info("doris stream load response:\n#{response}") end + return + end - if status == "Success" || status == "Publish Timeout" - @total_bytes.addAndGet(documents.size) - @total_rows.addAndGet(event_num) - break - else - @logger.warn("FAILED doris stream load response:\n#{response}") - - if @max_retries >= 0 && req_count > @max_retries - @logger.warn("DROP this batch after failed #{req_count} times.") - if @save_on_failure - @logger.warn("Try save to disk.Disk file path : #{@save_dir}/#{@table}_#{@save_file}") - save_to_disk(documents) - end - break - end - - # sleep and then retry - sleep_for = sleep_for * 2 - sleep_for = sleep_for <= 60 ? sleep_for : 60 - sleep_rand = (sleep_for / 2) + (rand(0..sleep_for) / 2) - @logger.warn("Will do retry #{req_count} after sleep #{sleep_rand} secs.") - sleep(sleep_rand) + @logger.warn("FAILED doris stream load response:\n#{response}") + # if there are data quality issues, we do not retry + if (status == 'Fail' && response_json['Message'].start_with?("[DATA_QUALITY_ERROR]")) || (@max_retries >= 0 && req_count-1 > @max_retries) Review Comment: DATA_QUALITY_ERROR should be processed by setting max_filter_ratio instead of hard code here. ########## extension/logstash/lib/logstash/outputs/doris.rb: ########## @@ -157,50 +173,59 @@ def send_events(events) http_headers["label"] = @label_prefix + "_" + @db + "_" + @table + "_" + Time.now.strftime('%Y%m%d_%H%M%S_%L_' + SecureRandom.uuid) end - req_count = 0 - sleep_for = 1 - while true - response = make_request(documents, http_headers, @http_query, @http_hosts.sample) - - req_count += 1 - response_json = {} - begin - response_json = JSON.parse(response.body) - rescue => e - @logger.warn("doris stream load response: #{response} is not a valid JSON") - end + handle_request(documents, http_headers, event_num, 1) + end + + def sleep_for_attempt(attempt) + sleep_for = attempt**2 + sleep_for = sleep_for <= 60 ? sleep_for : 60 + (sleep_for/2) + (rand(0..sleep_for)/2) + end + + private + def handle_request(documents, http_headers, event_num, req_count) + response = make_request(documents, http_headers, @http_query, @http_hosts.sample) + response_json = {} + begin + response_json = JSON.parse(response.body) + rescue => _ + @logger.warn("doris stream load response is not a valid JSON:\n#{response}") + end + + status = response_json["Status"] - status = response_json["Status"] + if status == 'Label Already Exists' + @logger.warn("Label already exists: #{response_json['Label']}, skip #{event_num} records:\n#{response}") + return + end - if status == 'Label Already Exists' - @logger.warn("Label already exists: #{response_json['Label']}, skip #{event_num} records.") - break + if status == "Success" || status == "Publish Timeout" + @total_bytes.addAndGet(documents.size) + @total_rows.addAndGet(event_num) + if @log_request or @logger.debug? + @logger.info("doris stream load response:\n#{response}") end + return + end - if status == "Success" || status == "Publish Timeout" - @total_bytes.addAndGet(documents.size) - @total_rows.addAndGet(event_num) - break - else - @logger.warn("FAILED doris stream load response:\n#{response}") - - if @max_retries >= 0 && req_count > @max_retries - @logger.warn("DROP this batch after failed #{req_count} times.") - if @save_on_failure - @logger.warn("Try save to disk.Disk file path : #{@save_dir}/#{@table}_#{@save_file}") - save_to_disk(documents) - end - break - end - - # sleep and then retry - sleep_for = sleep_for * 2 - sleep_for = sleep_for <= 60 ? sleep_for : 60 - sleep_rand = (sleep_for / 2) + (rand(0..sleep_for) / 2) - @logger.warn("Will do retry #{req_count} after sleep #{sleep_rand} secs.") - sleep(sleep_rand) + @logger.warn("FAILED doris stream load response:\n#{response}") + # if there are data quality issues, we do not retry + if (status == 'Fail' && response_json['Message'].start_with?("[DATA_QUALITY_ERROR]")) || (@max_retries >= 0 && req_count-1 > @max_retries) + # if @max_retries >= 0 && req_count-1 > @max_retries + @logger.warn("DROP this batch after failed #{req_count} times.") + if @save_on_failure + @logger.warn("Try save to disk.Disk file path : #{@save_dir}/#{@table}_#{@save_file}") + save_to_disk(documents) end + return end + + # add to retry_queue + sleep_for = sleep_for_attempt(req_count) + req_count += 1 + @logger.warn("Will do the #{req_count-1}th retry after #{sleep_for} secs.") + delay_event = DelayEvent.new(sleep_for, [documents, http_headers, event_num, req_count]) + add_event_to_retry_queue(delay_event, req_count <= 1) Review Comment: Why block just for `req_count <= 1` ? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org