1 year ago
#351933
Jose Roberto Nava Morales
error_class=Fluent::Plugin::Buffer::BufferChunkOverflowError error="a xxxx bytes record (nth: 0) is larger than buffer chunk limit size (10240)"
I have the following problem
i am setting up a fluentd server but i get the following error:
2022-03-29 23:35:44 +0000 [error]: #0 unexpected error on reading data host="192.190.204.60" port=12770 error_class=Fluent::Plugin::Buffer::BufferChunkOverflowError error="a 28833 bytes record (nth: 0) is larger than buffer chunk limit size (10240)"
Mi configuration file is:
root@ip-172-32-5-193 bin]# cat /etc/td-agent/td-agent.conf
<match fluent.**>
@type null
</match>
<source>
@type tcp
port 10514
bind "0.0.0.0"
protocol_type tcp
tag "syslog"
<parse>
@type none
</parse>
</source>
<filter syslog.**>
@type parser
key_name message
hash_value_field messages
<parse>
@type json
</parse>
</filter>
<source>
@type syslog
port 24224
bind "0.0.0.0"
protocol_type tcp
tag "syslog"
<parse>
@type "syslog"
keep_time_key true
with_priority true
message_format rfc3164
time_format "%Y-%m-%dT%H:%M:%SZ"
</parse>
</source>
<match **>
@type stdout
<buffer>
@type "memory"
flush_interval 1s
</buffer>
</match>
<match syslog.**>
@type copy
<store>
@type opensearch
host 192.168.1.3
port 443
user logstash
password logstash
scheme https
index_name qafluentd.stg.${tag}.%Y%m%d%H
<buffer tag, time>
timekey 1h # chunks per hours ("3600" also available)
flush_mode interval
flush_interval 30s
flush_thread_count 2
retry_max_interval 180s
retry_wait 2s
retry_timeout 96h
chunk_limit_size 300K
chunk_limit_records 1000000 # A large number
delayed_commit_timeout 150s
overflow_action throw_exception
</buffer>
include_tag_key true
tag_key @log_name
include_timestamp true # defaults to false
logstash_format true
</store>
</match>
this is my configuration
[root@ip-172-32-5-193 bin]# td-agent
2022-03-30 00:06:30 +0000 [info]: parsing config file is succeeded path="/etc/td-agent/td-agent.conf"
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-elasticsearch' version '5.1.4'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-flowcounter-simple' version '0.1.0'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-json' version '0.2.0'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-kafka' version '0.17.3'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-opensearch' version '1.0.2'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-prometheus' version '2.0.2'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-prometheus_pushgateway' version '0.1.0'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-record-modifier' version '2.1.0'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-rewrite-tag-filter' version '2.4.0'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-s3' version '1.6.1'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-sd-dns' version '0.1.0'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-systemd' version '1.0.5'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-td' version '1.1.0'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-utmpx' version '0.5.0'
2022-03-30 00:06:30 +0000 [info]: gem 'fluent-plugin-webhdfs' version '1.5.0'
2022-03-30 00:06:30 +0000 [info]: gem 'fluentd' version '1.14.3'
2022-03-30 00:06:31 +0000 [warn]: 'protocol_type' parameter is deprecated: use transport directive
2022-03-30 00:06:31 +0000 [warn]: define <match fluent.**> to capture fluentd logs in top level is deprecated. Use <label @FLUENT_LOG> instead
2022-03-30 00:06:31 +0000 [info]: using configuration file: <ROOT>
<match fluent.**>
@type null
</match>
<source>
@type tcp
port 10514
bind "0.0.0.0"
protocol_type tcp
tag "syslog"
<parse>
@type "none"
</parse>
</source>
<filter syslog.**>
@type parser
key_name "message"
hash_value_field "messages"
<parse>
@type "json"
</parse>
</filter>
<source>
@type syslog
port 24224
bind "0.0.0.0"
protocol_type tcp
tag "syslog"
<parse>
@type "syslog"
keep_time_key true
with_priority true
message_format rfc3164
time_format "%Y-%m-%dT%H:%M:%SZ"
</parse>
</source>
<match **>
@type stdout
<buffer>
@type "memory"
flush_interval 1s
</buffer>
</match>
<match syslog.**>
@type copy
<store>
@type "opensearch"
host "192.168.1.3"
port 443
user "logstash"
password xxxxxx
scheme https
index_name "qafluentd.stg.${tag}.%Y%m%d%H"
include_tag_key true
tag_key "@log_name"
include_timestamp true
logstash_format true
<buffer tag, time>
timekey 1h
flush_mode interval
flush_interval 30s
flush_thread_count 2
retry_max_interval 180s
retry_wait 2s
retry_timeout 96h
chunk_limit_size 300K
chunk_limit_records 1000000
delayed_commit_timeout 150s
overflow_action throw_exception
</buffer>
</store>
</match>
</ROOT>
2022-03-30 00:06:31 +0000 [info]: starting fluentd-1.14.3 pid=23346 ruby="2.7.5"
2022-03-30 00:06:31 +0000 [info]: spawn command to main: cmdline=["/opt/td-agent/bin/ruby", "-Eascii-8bit:ascii-8bit", "/sbin/td-agent", "--under-supervisor"]
2022-03-30 00:06:31 +0000 [info]: adding match pattern="fluent.**" type="null"
2022-03-30 00:06:31 +0000 [info]: adding filter pattern="syslog.**" type="parser"
2022-03-30 00:06:31 +0000 [info]: adding match pattern="**" type="stdout"
2022-03-30 00:06:31 +0000 [info]: adding match pattern="syslog.**" type="copy"
^C2022-03-30 00:06:32 +0000 [info]: Received graceful stop
2022-03-30 00:06:32 +0000 [info]: adding source type="tcp"
2022-03-30 00:06:32 +0000 [info]: adding source type="syslog"
2022-03-30 00:06:32 +0000 [warn]: #0 'protocol_type' parameter is deprecated: use transport directive
2022-03-30 00:06:32 +0000 [warn]: #0 define <match fluent.**> to capture fluentd logs in top level is deprecated. Use <label @FLUENT_LOG> instead
2022-03-30 00:06:32 +0000 [warn]: parameter 'protocol_type' in <source>
@type tcp
port 10514
bind "0.0.0.0"
protocol_type tcp
tag "syslog"
<parse>
@type "none"
</parse>
</source> is not used.
2022-03-30 00:06:32 +0000 [info]: #0 starting fluentd worker pid=23352 ppid=23346 worker=0
2022-03-30 00:06:32 +0000 [info]: #0 listening syslog socket on 0.0.0.0:24224 with tcp
2022-03-30 00:06:32 +0000 [info]: Worker 0 finished with status 1
and the log is the next:
2022-03-30 00:02:07 +0000 [info]: parsing config file is succeeded path="/etc/td-agent/td-agent.conf"
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-elasticsearch' version '5.1.4'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-flowcounter-simple' version '0.1.0'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-json' version '0.2.0'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-kafka' version '0.17.3'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-opensearch' version '1.0.2'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-prometheus' version '2.0.2'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-prometheus_pushgateway' version '0.1.0'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-record-modifier' version '2.1.0'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-rewrite-tag-filter' version '2.4.0'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-s3' version '1.6.1'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-sd-dns' version '0.1.0'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-systemd' version '1.0.5'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-td' version '1.1.0'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-utmpx' version '0.5.0'
2022-03-30 00:02:07 +0000 [info]: gem 'fluent-plugin-webhdfs' version '1.5.0'
2022-03-30 00:02:07 +0000 [info]: gem 'fluentd' version '1.14.3'
2022-03-30 00:02:08 +0000 [warn]: 'protocol_type' parameter is deprecated: use transport directive
2022-03-30 00:02:08 +0000 [warn]: define <match fluent.**> to capture fluentd logs in top level is deprecated. Use <label @FLUENT_LOG> instead
2022-03-30 00:02:08 +0000 [info]: using configuration file: <ROOT>
<match fluent.**>
@type null
</match>
<source>
@type tcp
port 10514
bind "0.0.0.0"
protocol_type tcp
tag "syslog"
<parse>
@type "none"
</parse>
</source>
<filter syslog.**>
@type parser
key_name "message"
hash_value_field "messages"
<parse>
@type "json"
</parse>
</filter>
<source>
@type syslog
port 24224
bind "0.0.0.0"
protocol_type tcp
tag "syslog"
<parse>
@type "syslog"
keep_time_key true
with_priority true
message_format rfc3164
time_format "%Y-%m-%dT%H:%M:%SZ"
</parse>
</source>
<match **>
@type stdout
<buffer>
@type "memory"
flush_interval 1s
</buffer>
</match>
<match syslog.**>
@type copy
<store>
@type "opensearch"
host "192.168,.1.3"
port 443
user "logstash"
password xxxxxx
scheme https
index_name "qafluentd.stg.${tag}.%Y%m%d%H"
include_tag_key true
tag_key "@log_name"
include_timestamp true
logstash_format true
<buffer tag, time>
timekey 1h
flush_mode interval
flush_interval 30s
flush_thread_count 2
retry_max_interval 180s
retry_wait 2s
retry_timeout 96h
chunk_limit_size 300K
chunk_limit_records 1000000
delayed_commit_timeout 150s
overflow_action throw_exception
</buffer>
</store>
</match>
</ROOT>
2022-03-30 00:02:08 +0000 [info]: starting fluentd-1.14.3 pid=23030 ruby="2.7.5"
2022-03-30 00:02:08 +0000 [info]: spawn command to main: cmdline=["/opt/td-agent/bin/ruby", "-Eascii-8bit:ascii-8bit", "/opt/td-agent/bin/fluentd", "--log", "/var/log/td-agent/td-agent.log", "--daemon", "/var/run/td-agent/td-agent.pid", "--under-supervisor"]
2022-03-30 00:02:08 +0000 [info]: adding match pattern="fluent.**" type="null"
2022-03-30 00:02:08 +0000 [info]: adding filter pattern="syslog.**" type="parser"
2022-03-30 00:02:08 +0000 [info]: adding match pattern="**" type="stdout"
2022-03-30 00:02:08 +0000 [info]: adding match pattern="syslog.**" type="copy"
2022-03-30 00:02:09 +0000 [info]: adding source type="tcp"
2022-03-30 00:02:09 +0000 [info]: adding source type="syslog"
2022-03-30 00:02:09 +0000 [warn]: #0 'protocol_type' parameter is deprecated: use transport directive
2022-03-30 00:02:09 +0000 [warn]: #0 define <match fluent.**> to capture fluentd logs in top level is deprecated. Use <label @FLUENT_LOG> instead
2022-03-30 00:02:09 +0000 [warn]: parameter 'protocol_type' in <source>
@type tcp
port 10514
bind "0.0.0.0"
protocol_type tcp
tag "syslog"
<parse>
@type "none"
</parse>
</source> is not used.
2022-03-30 00:02:09 +0000 [info]: #0 starting fluentd worker pid=23039 ppid=23036 worker=0
2022-03-30 00:02:09 +0000 [info]: #0 listening syslog socket on 0.0.0.0:24224 with tcp
2022-03-30 00:02:09 +0000 [info]: #0 listening tcp socket bind="0.0.0.0" port=10514
2022-03-30 00:02:09 +0000 [info]: #0 fluentd worker is now running worker=0
2022-03-30 00:02:12 +0000 [warn]: #0 emit transaction failed: error_class=Fluent::Plugin::Buffer::BufferChunkOverflowError error="a 28833 bytes record (nth: 0) is larger than buffer chunk limit size (10240)" location="/opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/plugin/buffer.rb:454:in `write'" tag="syslog"
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/plugin/buffer.rb:454:in `write'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/plugin/output.rb:1049:in `block in handle_stream_simple'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/plugin/output.rb:931:in `write_guard'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/plugin/output.rb:1048:in `handle_stream_simple'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/plugin/output.rb:921:in `execute_chunking'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/plugin/output.rb:851:in `emit_buffered'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/event_router.rb:181:in `emit_events'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/event_router.rb:115:in `emit_stream'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/plugin/in_tcp.rb:160:in `block in start'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/plugin_helper/server.rb:622:in `on_read_with_connection'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/cool.io-1.7.1/lib/cool.io/io.rb:123:in `on_readable'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/cool.io-1.7.1/lib/cool.io/io.rb:186:in `on_readable'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/cool.io-1.7.1/lib/cool.io/loop.rb:88:in `run_once'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/cool.io-1.7.1/lib/cool.io/loop.rb:88:in `run'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/plugin_helper/event_loop.rb:93:in `block in start'
2022-03-30 00:02:12 +0000 [warn]: #0 /opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/plugin_helper/thread.rb:78:in `block in thread_create'
2022-03-30 00:02:12 +0000 [error]: #0 unexpected error on reading data host="187.190.204.60" port=12597 error_class=Fluent::Plugin::Buffer::BufferChunkOverflowError error="a 28833 bytes record (nth: 0) is larger than buffer chunk limit size (10240)"
2022-03-30 00:02:12 +0000 [error]: #0 suppressed same stacktrace
2022-03-30 00:02:15 +0000 [warn]: #0 emit transaction failed: error_class=Fluent::Plugin::Buffer::BufferChunkOverflowError error="a 28833 bytes record (nth: 0) is larger than buffer chunk limit size (10240)" location="/opt/td-agent/lib/ruby/gems/2.7.0/gems/fluentd-1.14.3/lib/fluent/plugin/buffer.rb:454:in `write'" tag="syslog"
2022-03-30 00:02:15 +0000 [warn]: #0 suppressed same stacktrace
2022-03-30 00:02:15 +0000 [error]: #0 unexpected error on reading data host="187.190.204.60" port=12603 error_class=Fluent::Plugin::Buffer::BufferChunkOverflowError error="a 28833 bytes record (nth: 0) is larger than buffer chunk limit size (10240)"
2022-03-30 00:02:15 +0000 [error]: #0 suppressed same stacktrace
[root@ip-172-32-5-193 bin]#
any idea?
linux
fluentd
opensearch
0 Answers
Your Answer