I'm new to Heka, I build a config that should read our combined ssl log
and put it in elasticsearch.  It generates no errors, the dashboard
identifies the file and it's length and can read the file.  But only the
heka metrics (all-report, memstat, and statmetric) go into ES.  The
logformat in the config is copied directly from the nginx.conf.

The config is in a gist at
https://gist.github.com/wwalker/faeb56fe131b20a29d25e542796e914f

and is here.  What is wrong with my config?

Thank you for your time!

[hekad]
maxprocs = 8

[TcpInput]
splitter = "HekaFramingSplitter"
decoder = "ProtobufDecoder"
address = ":5565"

# [StatAccumInput]

[Dashboard]
type = "DashboardOutput"
address = ":4352"
ticker_interval = 15

[TestWebserver]
type = "LogstreamerInput"
log_directory = "/var/log/nginx"
file_match = 'access\.log'
decoder = "CombinedLogDecoder"

[CombinedLogDecoder]
type = "SandboxDecoder"
filename = "lua_decoders/nginx_access.lua"

[CombinedLogDecoder.config]
type = "combined"
user_agent_transform = true
# combined log format
log_format = '$remote_addr - $remote_user [$time_local] $server_port 
$ssl_protocol/$ssl_cipher $upstream_addr "$request" $status $body_bytes_sent 
"$http_referer" "$http_user_agent" -- [$request_time]'

[ESJsonEncoder]
index = "%{Type}-%{%Y.%m.%d}"
es_index_from_timestamp = true
type_name = "%{Type}"
    [ESJsonEncoder.field_mappings]
    Timestamp = "@timestamp"
    Severity = "level"

[ElasticSearchOutput]
server = "http://10.90.17.241:9200";
flush_interval = 5000
flush_count = 10
encoder = "ESJsonEncoder"
message_matcher = "Type != 'sync.log'"


-- 
Wayne Walker

“For me, I am driven by two main philosophies: know more today about
the world than I knew yesterday and lessen the suffering of others. You'd
be surprised how far that gets you.”
― Neil deGrasse Tyson
_______________________________________________
Heka mailing list
[email protected]
https://mail.mozilla.org/listinfo/heka

Reply via email to