使用 shuf 来打乱一个文件中的行或是选择文件中一个随机的行。

菜鸟求助:使用插件logstash-input-jdbc把数据从mysql导入es中错误

Logstash | 作者 lz8086 | 发布于2017年05月11日 | 阅读数:9484

 
input {
jdbc {
jdbc_driver_library => "/usr/local/logstash-5.3.0/mysql-connector-java-6.0.6/mysql-connector-java-6.0.6-bin.jar"
jdbc_driver_class => "com.mysql.cj.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://ip:3306/test?characterEncoding=utf8&useSSL=false"
jdbc_user => "root"
jdbc_password => "123"
jdbc_paging_enabled => "true"
jdbc_page_size => "50000"
statement => "select * from estest"
schedule => "* * * * *"
type => "nametype"
}
}

output {
elasticsearch {
hosts => "ip:9200"
index => "esinput"
document_id => "%{id}"
user => elastic
password => changeme
}
stdout {
codec => json_lines
}
}
[root@vdljavacoe36 logstash_jdbc_test]# ../logstash -f ./ownjdbc.conf 
Sending Logstash's logs to /usr/local/logstash-5.3.0/logs which is now configured via log4j2.properties
[2017-05-11T16:13:40,089][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elastic:xxxxxx@172.16.130.91:9200/]}}
[2017-05-11T16:13:40,096][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://elastic:xxxxxx@172.16.130.91:9200/, :path=>"/"}
[2017-05-11T16:13:40,345][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>#<URI::HTTP:0x4758b342 URL:http://elastic:xxxxxx@172.16.130.91:9200/&gt;}
[2017-05-11T16:13:40,347][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2017-05-11T16:13:40,414][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-*", "version"=>50001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"_default_"=>{"_all"=>{"enabled"=>true, "norms"=>false}, "dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword"}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date", "include_in_all"=>false}, "@version"=>{"type"=>"keyword", "include_in_all"=>false}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2017-05-11T16:13:40,437][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>[#<URI::Generic:0xd24b58f URL://172.16.130.91:9200>]}
[2017-05-11T16:13:40,440][INFO ][logstash.pipeline ] Starting pipeline {"id"=>"main", "pipeline.workers"=>4, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>5, "pipeline.max_inflight"=>500}
[2017-05-11T16:13:40,613][INFO ][logstash.pipeline ] Pipeline main started
[2017-05-11T16:13:40,800][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
Loading class `com.mysql.jdbc.Driver'. This is deprecated. The new driver class is `com.mysql.cj.jdbc.Driver'. The driver is automatically registered via the SPI and manual loading of the driver class is generally unnecessary.
[2017-05-11T16:14:01,376][WARN ][logstash.inputs.jdbc ] Failed test_connection.
{ 235556 rufus-scheduler intercepted an error:
235556 job:
235556 Rufus::Scheduler::CronJob "* * * * *" {}
235556 error:
235556 235556
235556 NoMethodError
235556 undefined method `close_jdbc_connection' for #<Sequel::JDBC::Database:0x4e67d8dc>
235556 /usr/local/logstash-5.3.0/vendor/local_gems/621ff018/logstash-input-jdbc-4.2.0/lib/logstash/plugin_mixins/jdbc.rb:174:in `open_jdbc_connection'
235556 /usr/local/logstash-5.3.0/vendor/local_gems/621ff018/logstash-input-jdbc-4.2.0/lib/logstash/plugin_mixins/jdbc.rb:217:in `execute_statement'
235556 /usr/local/logstash-5.3.0/vendor/local_gems/621ff018/logstash-input-jdbc-4.2.0/lib/logstash/inputs/jdbc.rb:272:in `execute_query'
235556 /usr/local/logstash-5.3.0/vendor/local_gems/621ff018/logstash-input-jdbc-4.2.0/lib/logstash/inputs/jdbc.rb:250:in `run'
235556 org/jruby/RubyProc.java:281:in `call'
235556 /usr/local/logstash-5.3.0/vendor/bundle/jruby/1.9/gems/rufus-scheduler-3.0.9/lib/rufus/scheduler/jobs.rb:234:in `do_call'
235556 /usr/local/logstash-5.3.0/vendor/bundle/jruby/1.9/gems/rufus-scheduler-3.0.9/lib/rufus/scheduler/jobs.rb:258:in `do_trigger'
235556 /usr/local/logstash-5.3.0/vendor/bundle/jruby/1.9/gems/rufus-scheduler-3.0.9/lib/rufus/scheduler/jobs.rb:300:in `start_work_thread'
235556 org/jruby/RubyProc.java:281:in `call'
235556 /usr/local/logstash-5.3.0/vendor/bundle/jruby/1.9/gems/rufus-scheduler-3.0.9/lib/rufus/scheduler/jobs.rb:299:in `start_work_thread'
235556 org/jruby/RubyKernel.java:1479:in `loop'
235556 /usr/local/logstash-5.3.0/vendor/bundle/jruby/1.9/gems/rufus-scheduler-3.0.9/lib/rufus/scheduler/jobs.rb:289:in `start_work_thread'
235556 tz:
235556 ENV['TZ']:
235556 Time.now: 2017-05-11 16:14:01 +0800
235556 scheduler:
235556 object_id: 205642
235556 opts:
235556 {:max_work_threads=>1}
235556 frequency: 0.3
235556 scheduler_lock: #<Rufus::Scheduler::NullLock:0xa32e517>
235556 trigger_lock: #<Rufus::Scheduler::NullLock:0x19193761>
235556 uptime: 20.763 (20s763)
235556 down?: false
235556 threads: 2
235556 thread: #<Thread:0x4a4d34de>
235556 thread_key: rufus_scheduler_205642
235556 work_threads: 1
235556 active: 1
235556 vacant: 0
235556 max_work_threads: 1
235556 mutexes: {}
235556 jobs: 1
235556 at_jobs: 0
235556 in_jobs: 0
235556 every_jobs: 0
235556 interval_jobs: 0
235556 cron_jobs: 1
235556 running_jobs: 1
235556 work_queue: 0
} 235556 .
附件一为自定义文件 附件二为错误信息  在此先谢过了
已邀请:

lz8086 - es小司机

赞同来自:

找到原因了,mysql  ip地址写错, day dog

要回复问题请先登录注册