xiaokang commented on code in PR #44999: URL: https://github.com/apache/doris/pull/44999#discussion_r1897568480
########## extension/logstash/lib/logstash/outputs/doris.rb: ########## @@ -131,22 +132,35 @@ def register end end + # Run named Timer as daemon thread + @timer = java.util.Timer.new("Doris Output #{self.params['id']}", true) + # The queue in Timer is unbounded and uncontrollable, so use a new queue to control the amount + @count_block_queue = java.util.concurrent.ArrayBlockingQueue.new(128) Review Comment: I think you should use LinkedBlockingQueue here since it's very offen to do insert and delete. ########## extension/logstash/lib/logstash/outputs/doris.rb: ########## @@ -131,22 +132,35 @@ def register end end + # Run named Timer as daemon thread + @timer = java.util.Timer.new("Doris Output #{self.params['id']}", true) + # The queue in Timer is unbounded and uncontrollable, so use a new queue to control the amount + @count_block_queue = java.util.concurrent.ArrayBlockingQueue.new(128) + + @retry_queue = Queue.new + retry_thread = Thread.new do + while popped = @retry_queue.pop + documents, http_headers, event_num, req_count = popped + handle_request(documents, http_headers, event_num, req_count) + end + end + print_plugin_info() end # def register + def close + @timer.cancel + end + def multi_receive(events) return if events.empty? send_events(events) end private def send_events(events) - documents = "" - event_num = 0 - events.each do |event| - documents << event_body(event) << "\n" - event_num += 1 - end + documents = events.map { |event| event_body(event) }.join("\n") Review Comment: just refactor? ########## extension/logstash/lib/logstash/outputs/doris.rb: ########## @@ -157,50 +171,56 @@ def send_events(events) http_headers["label"] = @label_prefix + "_" + @db + "_" + @table + "_" + Time.now.strftime('%Y%m%d_%H%M%S_%L_' + SecureRandom.uuid) end - req_count = 0 - sleep_for = 1 - while true - response = make_request(documents, http_headers, @http_query, @http_hosts.sample) - - req_count += 1 - response_json = {} - begin - response_json = JSON.parse(response.body) - rescue => e - @logger.warn("doris stream load response: #{response} is not a valid JSON") - end + handle_request(documents, http_headers, event_num, 1) + end + + def sleep_for_attempt(attempt) + sleep_for = attempt**2 + sleep_for = sleep_for <= 60 ? sleep_for : 60 + (sleep_for/2) + (rand(0..sleep_for)/2) + end - status = response_json["Status"] + private + def handle_request(documents, http_headers, event_num, req_count) + response = make_request(documents, http_headers, @http_query, @http_hosts.sample) + response_json = {} + begin + response_json = JSON.parse(response.body) + rescue => _ + @logger.warn("doris stream load response: #{response} is not a valid JSON") + end - if status == 'Label Already Exists' - @logger.warn("Label already exists: #{response_json['Label']}, skip #{event_num} records.") - break - end + status = response_json["Status"] - if status == "Success" || status == "Publish Timeout" - @total_bytes.addAndGet(documents.size) - @total_rows.addAndGet(event_num) - break - else - @logger.warn("FAILED doris stream load response:\n#{response}") - - if @max_retries >= 0 && req_count > @max_retries - @logger.warn("DROP this batch after failed #{req_count} times.") - if @save_on_failure - @logger.warn("Try save to disk.Disk file path : #{@save_dir}/#{@table}_#{@save_file}") - save_to_disk(documents) - end - break - end - - # sleep and then retry - sleep_for = sleep_for * 2 - sleep_for = sleep_for <= 60 ? sleep_for : 60 - sleep_rand = (sleep_for / 2) + (rand(0..sleep_for) / 2) - @logger.warn("Will do retry #{req_count} after sleep #{sleep_rand} secs.") - sleep(sleep_rand) + if status == 'Label Already Exists' + @logger.warn("Label already exists: #{response_json['Label']}, skip #{event_num} records.") + return + end + + if status == "Success" || status == "Publish Timeout" + @total_bytes.addAndGet(documents.size) + @total_rows.addAndGet(event_num) + return + end + + @logger.warn("FAILED doris stream load response:\n#{response}") + # if there are data quality issues, we do not retry + if (status == 'Fail' && response_json['Message'].start_with?("[DATA_QUALITY_ERROR]")) || (@max_retries >= 0 && req_count > @max_retries) + @logger.warn("DROP this batch after failed #{req_count} times.") + if @save_on_failure + @logger.warn("Try save to disk.Disk file path : #{@save_dir}/#{@table}_#{@save_file}") + save_to_disk(documents) end + return end + + # add to retry_queue + sleep_for = sleep_for_attempt(req_count) + req_count += 1 + @logger.warn("Will do retry #{req_count} after #{sleep_for} secs.") + timer_task = RetryTimerTask.new(@retry_queue, @count_block_queue, [documents, http_headers, event_num, req_count]) + @count_block_queue.put(0) Review Comment: Why put 0 to count_block_queue? ########## extension/logstash/lib/logstash/outputs/doris.rb: ########## @@ -157,50 +171,56 @@ def send_events(events) http_headers["label"] = @label_prefix + "_" + @db + "_" + @table + "_" + Time.now.strftime('%Y%m%d_%H%M%S_%L_' + SecureRandom.uuid) end - req_count = 0 - sleep_for = 1 - while true - response = make_request(documents, http_headers, @http_query, @http_hosts.sample) - - req_count += 1 - response_json = {} - begin - response_json = JSON.parse(response.body) - rescue => e - @logger.warn("doris stream load response: #{response} is not a valid JSON") - end + handle_request(documents, http_headers, event_num, 1) + end + + def sleep_for_attempt(attempt) + sleep_for = attempt**2 + sleep_for = sleep_for <= 60 ? sleep_for : 60 + (sleep_for/2) + (rand(0..sleep_for)/2) + end - status = response_json["Status"] + private + def handle_request(documents, http_headers, event_num, req_count) + response = make_request(documents, http_headers, @http_query, @http_hosts.sample) + response_json = {} + begin + response_json = JSON.parse(response.body) + rescue => _ + @logger.warn("doris stream load response: #{response} is not a valid JSON") + end - if status == 'Label Already Exists' - @logger.warn("Label already exists: #{response_json['Label']}, skip #{event_num} records.") - break - end + status = response_json["Status"] - if status == "Success" || status == "Publish Timeout" - @total_bytes.addAndGet(documents.size) - @total_rows.addAndGet(event_num) - break - else - @logger.warn("FAILED doris stream load response:\n#{response}") - - if @max_retries >= 0 && req_count > @max_retries - @logger.warn("DROP this batch after failed #{req_count} times.") - if @save_on_failure - @logger.warn("Try save to disk.Disk file path : #{@save_dir}/#{@table}_#{@save_file}") - save_to_disk(documents) - end - break - end - - # sleep and then retry - sleep_for = sleep_for * 2 - sleep_for = sleep_for <= 60 ? sleep_for : 60 - sleep_rand = (sleep_for / 2) + (rand(0..sleep_for) / 2) - @logger.warn("Will do retry #{req_count} after sleep #{sleep_rand} secs.") - sleep(sleep_rand) + if status == 'Label Already Exists' + @logger.warn("Label already exists: #{response_json['Label']}, skip #{event_num} records.") + return + end + + if status == "Success" || status == "Publish Timeout" + @total_bytes.addAndGet(documents.size) + @total_rows.addAndGet(event_num) + return + end + + @logger.warn("FAILED doris stream load response:\n#{response}") + # if there are data quality issues, we do not retry + if (status == 'Fail' && response_json['Message'].start_with?("[DATA_QUALITY_ERROR]")) || (@max_retries >= 0 && req_count > @max_retries) Review Comment: It's no necessary to do special check for DATA_QUALITY_ERROR, since it's should be handled by max_filter_ration config. ########## extension/logstash/lib/logstash/outputs/doris.rb: ########## @@ -157,50 +171,56 @@ def send_events(events) http_headers["label"] = @label_prefix + "_" + @db + "_" + @table + "_" + Time.now.strftime('%Y%m%d_%H%M%S_%L_' + SecureRandom.uuid) end - req_count = 0 - sleep_for = 1 - while true - response = make_request(documents, http_headers, @http_query, @http_hosts.sample) - - req_count += 1 - response_json = {} - begin - response_json = JSON.parse(response.body) - rescue => e - @logger.warn("doris stream load response: #{response} is not a valid JSON") - end + handle_request(documents, http_headers, event_num, 1) + end + + def sleep_for_attempt(attempt) + sleep_for = attempt**2 + sleep_for = sleep_for <= 60 ? sleep_for : 60 + (sleep_for/2) + (rand(0..sleep_for)/2) + end - status = response_json["Status"] + private + def handle_request(documents, http_headers, event_num, req_count) + response = make_request(documents, http_headers, @http_query, @http_hosts.sample) + response_json = {} + begin + response_json = JSON.parse(response.body) + rescue => _ + @logger.warn("doris stream load response: #{response} is not a valid JSON") Review Comment: should return or do something else instead of just go ahead. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org