一坠七、安裝ElasticSearch(下面統(tǒng)稱es,版本6.0.0,環(huán)境windows10)
直接上下載地址:https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.0.0.zip
解壓后目錄如下:
啟動es灿渴,./bin/elasticsearch.bat
孽鸡;啟動成功如圖
默認cluster_name是elasticsearch和端口9200可以修改笼痛,需要修改在config/elasticsearch.yml;上圖
二再膳、安裝logstash
下載地址:https://artifacts.elastic.co/downloads/logstash/logstash-6.0.0.zip
解壓目錄
先安裝logstash-input-jdbc插件
./bin/logstash-plugin.bat install logstash-input-jdbc
在logstash目錄下創(chuàng)建config-mysql啄清,見圖4
創(chuàng)建配置文件load_data.conf藏古,配置文件隨便取名晨继,可以創(chuàng)建sql文件烟阐,也可以在conf配置文件中定義,具體下面有說明
先上配置文件內(nèi)容
input {
stdin {
}
jdbc {
jdbc_connection_string => "jdbc:mysql://127.0.0.1:3306/jfinal_club?characterEncoding=utf8&useSSL=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "D:/ELK/6.0.0/logstash-6.0.0/config-mysql/mysql-connector-java-5.1.43.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_paging_enabled => "true"
jdbc_page_size => "50000"
statement_filepath => "D:/ELK/6.0.0/logstash-6.0.0/config-mysql/store_list.sql"
schedule => "* * * * *"
use_column_value => false
record_last_run => true
last_run_metadata_path => "D:/ELK/6.0.0/logstash-6.0.0/config-mysql/run/store_list"
type => "sl"
}
jdbc {
jdbc_connection_string => "jdbc:mysql://127.0.0.1:3306/jfinal_club?characterEncoding=utf8&useSSL=false"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "D:/ELK/6.0.0/logstash-6.0.0/config-mysql/mysql-connector-java-5.1.43.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_paging_enabled => "true"
jdbc_page_size => "50000"
statement => "select * from store where updated > date_add(:sql_last_value, interval 8 hour)"
schedule => "* * * * *"
use_column_value => false
record_last_run => true
last_run_metadata_path => "D:/ELK/6.0.0/logstash-6.0.0/config-mysql/run/store_s"
type => "st"
}
}
filter {
json {
source => "message"
remove_field => ["message"]
}
}
output {
if[type] == "sl"{
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "store_list"
document_type => "jdbc"
document_id => "%{store_id}}"
}
}
if[type] == "st"{
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "store_st"
document_type => "jdbc"
document_id => "%{id}}"
}
}
stdout {
codec => json_lines
}
}
字段解釋;具體的見:https://www.elastic.co/guide/en/logstash/current/plugins-inputs-jdbc.html
圖6中有個run目錄踱稍,在這里是用來存放:sql_last_value的時間值的
store_list.sql
先在es中生成index
PUT /store_list
{
"settings": {
"number_of_shards": 3,
"number_of_replicas": 1
},
"mappings": {
"jdbc": {
"properties": {
"@timestamp": {
"type": "date"
},
"@version": {
"type": "keyword"
},
"store_id": {
"type": "long"
},
"store_name": {
"type": "keyword"
},
"uid": {
"type": "text"
},
"telephone": {
"type": "text"
},
"street_id": {
"type": "text"
},
"detail": {
"type": "keyword"
},
"address": {
"type": "keyword"
},
"store_created": {
"type": "date"
},
"store_updated": {
"type": "date"
},
"detail_id": {
"type": "long"
},
"type_name": {
"type": "text"
},
"tag": {
"type": "keyword"
},
"overall_rating": {
"type": "text"
},
"navi_location_lng": {
"type": "double"
},
"navi_location_lat": {
"type": "double"
},
"detail_url": {
"type": "text"
},
"comment_num": {
"type": "integer"
},
"detail_created": {
"type": "date"
},
"detail_updated": {
"type": "date"
},
"location_id": {
"type": "long"
},
"lng": {
"type": "double"
},
"lat": {
"type": "double"
}
}
}
}
}
上面這種方式可以通過es管理工具執(zhí)行曲饱,比如kibana->dev tools
;或者使用curl
的方式也可以
curl -XPUT "http://localhost:9200/store_list" -H 'Content-Type: application/json' -d'
{
"settings": {
"number_of_shards": 3,
"number_of_replicas": 1
},
"mappings": {
"jdbc": {
"properties": {
"@timestamp": {
"type": "date"
},
"@version": {
"type": "keyword"
},
"store_id": {
"type": "long"
},
"store_name": {
"type": "keyword"
},
"uid": {
"type": "text"
},
"telephone": {
"type": "text"
},
"street_id": {
"type": "text"
},
"detail": {
"type": "keyword"
},
"address": {
"type": "keyword"
},
"store_created": {
"type": "date"
},
"store_updated": {
"type": "date"
},
"detail_id": {
"type": "long"
},
"type_name": {
"type": "text"
},
"tag": {
"type": "keyword"
},
"overall_rating": {
"type": "text"
},
"navi_location_lng": {
"type": "double"
},
"navi_location_lat": {
"type": "double"
},
"detail_url": {
"type": "text"
},
"comment_num": {
"type": "integer"
},
"detail_created": {
"type": "date"
},
"detail_updated": {
"type": "date"
},
"location_id": {
"type": "long"
},
"lng": {
"type": "double"
},
"lat": {
"type": "double"
}
}
}
}
}'
然后通過http://localhost:9200/store_list/查看字段生成情況
store_list就是index,相當于數(shù)據(jù)庫的database
然后回到logstash目錄下
執(zhí)行 nohup.exe ./bin/logstash.bat -f config-mysql/load_data.conf &
最好加上& 結(jié)尾,后臺運行
然后看數(shù)據(jù)庫同步情況
可能有些細節(jié)沒能寫全珠月,如果在集成中遇到什么情況扩淀,可以評論指出