欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页

logstash从MySQL导入数据到ElasticSearch的配置

程序员文章站 2022-06-11 11:57:11
...
input {
    stdin {
    }
    jdbc {
      # 数据库
      jdbc_connection_string => "jdbc:mysql://172.0.0.1:3306/inssa?characterEncoding=UTF-8&useSSL=false"
      # 用户名密码
      jdbc_user => "root"
      jdbc_password => "root"
      jdbc_default_timezone => "UTC"
      # jar包的位置
      jdbc_driver_library => "/usr/local/logtash-5.1.1/mysql-connector-java-5.5.19.jar"
      # mysql的Driver
      jdbc_driver_class => "com.mysql.jdbc.Driver"
      jdbc_paging_enabled => "true"
      lowercase_column_names => "false"
      jdbc_page_size => "50000"
      # statement_filepath => "config-mysql/jdbc2logstash_history.sql"
      statement => "select * from operationlogs"
      schedule => "* * * * *"
      #索引的类型
      type => "operationlogs"
    }


}

filter {

    mutate {
          convert => [ "publish_time", "string" ]
    }
    json {
        source => "message"
        remove_field => ["message"]
    }
    date {
        timezone => "Europe/Berlin"
        match => ["publish_time" , "ISO8601", "yyyy-MM-dd HH:mm:ss"]
    }
}

output {
    #if [type]=="operationlogs" {
        elasticsearch {
            hosts => "172.16.2.50:9201"
            # index名
            index => "inssa"
            # 需要关联的数据库中有有一个id字段,对应索引的id号
            document_id => "%{operationLogId}"
        }
        stdout {
            codec => json_lines
        }
    #}