157 lines
4.5 KiB
Markdown
157 lines
4.5 KiB
Markdown
es、logstash安装
|
||
================
|
||
|
||
* 安装包准备(下载好之后上传至linux服务器)
|
||
|
||
* elasticsearch https://repo.huaweicloud.com/elasticsearch/
|
||
* ik分词器 https://github.com/medcl/elasticsearch-analysis-ik/releases
|
||
* logstash https://repo.huaweicloud.com/logstash/
|
||
* kibana https://repo.huaweicloud.com/kibana/
|
||
* elasticsearch-head https://github.com/mobz/elasticsearch-head
|
||
* node.js https://nodejs.org/en/download/
|
||
* elasticsearch
|
||
|
||
* 解压安装elasticsearch
|
||
|
||
```bash
|
||
tar -zxvf <elasticsearch-name>
|
||
```
|
||
* 配置elasticsearch.yml文件
|
||
|
||
* 打开配置文件
|
||
|
||
```bash
|
||
vim config/elasticsearch.yml
|
||
```
|
||
* 在配置文件最下面输入以下内容后 wq保存退出
|
||
|
||
```yaml
|
||
node.name: node-1
|
||
cluster.initial_master_nodes: "node-1"
|
||
network.host: 0.0.0.0
|
||
http.port: 9200 #端口
|
||
http.cors.enabled: true #开启跨域
|
||
http.cors.allow-origin: "*"
|
||
```
|
||
* 启动elasticsearch
|
||
|
||
* 直接启动-执行bin目录下的elasticsearch
|
||
|
||
```bash
|
||
./bin/elasticsearch
|
||
```
|
||
* 后台启动-执行bin目录下的elasticsearch 加上-d
|
||
|
||
```
|
||
./bin/elasticsearch -d
|
||
```
|
||
使用ps kill命令杀掉当前elasticsearch后台
|
||
|
||
```bash
|
||
ps -ef|grep elasticsearch
|
||
kill -9 <PID>
|
||
```
|
||
* 启动出现max virtual memory areas vm.max\_map\_count \[65530\] is too low, increase to at least \[262144\]解决办法
|
||
|
||
* 打开/etc/sysctl.conf
|
||
|
||
```bash
|
||
sudo /etc/sysctl.conf
|
||
```
|
||
* 在最下面添加以下内容 然后:wq保存退出
|
||
|
||
```bash
|
||
vm.max_map_count=262144
|
||
```
|
||
* 执行/sbin/sysctl -p立即生效
|
||
|
||
```bash
|
||
sudo /sbin/sysctl -p
|
||
```
|
||
* ik 分词器安装(版本号要一致)
|
||
|
||
```bash
|
||
./bin/elasticsearch-plugin install https://github.com/medcl/elasticsearch-analysis-ik/releases/download/v6.3.0/elasticsearch-analysis-ik-6.3.0.zip
|
||
```
|
||
* logstash安装
|
||
|
||
* 解压安装logstash
|
||
|
||
```bash
|
||
tar -zxvf <logstash-name>
|
||
```
|
||
* 配置启动文件
|
||
|
||
* 在config文件目录下创建mysql.conf启动文件
|
||
|
||
```bash
|
||
input {
|
||
jdbc {
|
||
jdbc_connection_string => "jdbc:mysql://192.168.3.51:3306/kf?serverTimezone=Asia/Shanghai&useSSL=false&allowPublicKeyRetrieval=true&allowMultiQueries=true"
|
||
jdbc_user => "lduan"
|
||
jdbc_password => "359"
|
||
jdbc_driver_library => "/home/gd-service/software/mysql-connector-java-8.0.23/mysql-connector-java-8.0.23.jar"
|
||
jdbc_driver_class => "com.mysql.cj.jdbc.Driver"
|
||
# 定时器 多久执行一次SQL,默认是一分钟
|
||
schedule => "* * * * *"
|
||
#是否清除 last_run_metadata_path 的记录,如果为真那么每次都相当于从头开始查询所有的数据库记录
|
||
clean_run => "false"
|
||
#记录字段保存的位置。
|
||
last_run_metadata_path => "/home/gd-service/software/elk/logstash-7.8.0/station_parameter.txt"
|
||
# statement => "SELECT * FROM purchase_history"
|
||
# 执行的sql 文件路径+名称
|
||
statement_filepath => "/home/gd-service/software/elk/logstash-7.8.0/mysql/jdbc.sql"
|
||
#是否分页
|
||
jdbc_paging_enabled => "true"
|
||
jdbc_page_size => "5000"
|
||
# 索引类型
|
||
type => "_doc"
|
||
#是否需要记录某列的值,用于实时同步更新
|
||
use_column_value => true
|
||
#需要记录的字段一般用于记录主键Id,或者更新时间,用于sql查询最新
|
||
tracking_column => "index"
|
||
#写入es数据的key,默认会被转成小写,该字段用于控制是否小写。
|
||
lowercase_column_names => false
|
||
}
|
||
stdin{
|
||
}
|
||
}
|
||
|
||
#ElasticSearch中默认使用UTC时间,和中国时间相差8小时,加入以下配置
|
||
filter {
|
||
ruby {
|
||
code => "event.set('timestamp',event.get('@timestamp').time.localtime+8*60*60)"
|
||
}
|
||
ruby {
|
||
code => "event.set('@timestamp',event.get('timestamp'))"
|
||
}
|
||
mutate {
|
||
remove_field => ["timestamp"]
|
||
}
|
||
# ruby {
|
||
# code => "event.set('myTimeField',event.get('myTimeField').time.localtime+8*60*60)"
|
||
# }
|
||
}
|
||
|
||
#filter { json { source => "message" remove_field => ["message"] } }
|
||
|
||
output {
|
||
elasticsearch {
|
||
hosts => ["localhost:9200"]
|
||
index => "product"
|
||
document_type => "_doc"
|
||
document_id => "%{index}"
|
||
}
|
||
stdout{
|
||
# JSON格式输出
|
||
codec => "json_lines"
|
||
}
|
||
}
|
||
```
|
||
* 启动logstash
|
||
|
||
* 直接启动-执行bin目录下的logstash
|
||
|
||
```bash
|
||
./logstash -f ../config/mysql.conf
|
||
``` |