zabbix4.4 历史数据存放elasticsearch 并定期清理

拥有回忆 提交于 2020-08-05 00:02:18

一、其他不多说 直接开始做吧

elasticsearch 安装配置  我这里测试用的2台机器安装ES6.8

rpm -ivh elasticsearch-6.8.10.rpm

集群配置

节点一
root@[ 8.1.10.146 @elastic1:/root ]# cat /etc/elasticsearch/elasticsearch.yml |grep -v "^#"
cluster.name: Zabbix-history
node.name: 8.1.10.146
path.data: /data/ESdata
path.logs: /var/log/elasticsearch
bootstrap.memory_lock: true
network.host: 0.0.0.0
http.max_initial_line_length: 128k
http.max_header_size: 64kb
http.max_content_length: 500mb
discovery.zen.ping.unicast.hosts: ["8.1.10.146", "8.1.10.147"]
discovery.zen.minimum_master_nodes: 2

节点二
root@[ 8.1.10.147 @elastic2:/root ]# cat /etc/elasticsearch/elasticsearch.yml |grep -v "^#"
cluster.name: Zabbix-history
node.name: 8.1.10.147
path.data: /data/ESdata
path.logs: /var/log/elasticsearch
bootstrap.memory_lock: true
network.host: 8.1.10.147
http.max_initial_line_length: 128k
http.max_header_size: 64kb
http.max_content_length: 500mb
discovery.zen.ping.unicast.hosts: ["8.1.10.146", "8.1.10.147"]
discovery.zen.minimum_master_nodes: 2

JVM配置

节点一
root@[ 8.1.10.146 @elastic1:/etc/elasticsearch ]# cat jvm.options |grep -v "^#" |grep -v "^$"
-Xms31g
-Xmx31g
-Xmn16g
8-13:-XX:+UseConcMarkSweepGC
8-13:-XX:CMSInitiatingOccupancyFraction=75
8-13:-XX:+UseCMSInitiatingOccupancyOnly
14-:-XX:+UseG1GC
14-:-XX:G1ReservePercent=25
14-:-XX:InitiatingHeapOccupancyPercent=30
-Des.networkaddress.cache.ttl=60
-Des.networkaddress.cache.negative.ttl=10
-XX:+AlwaysPreTouch
-Xss5m
-Djava.awt.headless=true
-Dfile.encoding=UTF-8
-Djna.nosys=true
-XX:-OmitStackTraceInFastThrow
14-:-XX:+ShowCodeDetailsInExceptionMessages
-Dio.netty.noUnsafe=true
-Dio.netty.noKeySetOptimization=true
-Dio.netty.recycler.maxCapacityPerThread=0
-Dlog4j.shutdownHookEnabled=false
-Dlog4j2.disable.jmx=true
-Djava.io.tmpdir=${ES_TMPDIR}
-XX:+HeapDumpOnOutOfMemoryError
-XX:HeapDumpPath=/var/lib/elasticsearch
-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log
8:-XX:+PrintGCDetails
8:-XX:+PrintGCDateStamps
8:-XX:+PrintTenuringDistribution
8:-XX:+PrintGCApplicationStoppedTime
8:-Xloggc:/var/log/elasticsearch/gc.log
8:-XX:+UseGCLogFileRotation
8:-XX:NumberOfGCLogFiles=32
8:-XX:GCLogFileSize=64m
9-:-Xlog:gc*,gc+age=trace,safepoint:file=/var/log/elasticsearch/gc.log:utctime,pid,tags:filecount=32,filesize=64m
9-:-Djava.locale.providers=COMPAT
10-:-XX:UseAVX=2
root@[ 8.1.10.146 @elastic1:/etc/elasticsearch ]# 


节点二
root@[ 8.1.10.147 @elastic2:/etc/elasticsearch ]# cat jvm.options |grep -v "^#" |grep -v "^$"
-Xms31g
-Xmx31g
-Xmn16g
8-13:-XX:+UseConcMarkSweepGC
8-13:-XX:CMSInitiatingOccupancyFraction=75
8-13:-XX:+UseCMSInitiatingOccupancyOnly
14-:-XX:+UseG1GC
14-:-XX:G1ReservePercent=25
14-:-XX:InitiatingHeapOccupancyPercent=30
-Des.networkaddress.cache.ttl=60
-Des.networkaddress.cache.negative.ttl=10
-XX:+AlwaysPreTouch
-Xss5m
-Djava.awt.headless=true
-Dfile.encoding=UTF-8
-Djna.nosys=true
-XX:-OmitStackTraceInFastThrow
14-:-XX:+ShowCodeDetailsInExceptionMessages
-Dio.netty.noUnsafe=true
-Dio.netty.noKeySetOptimization=true
-Dio.netty.recycler.maxCapacityPerThread=0
-Dlog4j.shutdownHookEnabled=false
-Dlog4j2.disable.jmx=true
-Djava.io.tmpdir=${ES_TMPDIR}
-XX:+HeapDumpOnOutOfMemoryError
-XX:HeapDumpPath=/var/lib/elasticsearch
-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log
8:-XX:+PrintGCDetails
8:-XX:+PrintGCDateStamps
8:-XX:+PrintTenuringDistribution
8:-XX:+PrintGCApplicationStoppedTime
8:-Xloggc:/var/log/elasticsearch/gc.log
8:-XX:+UseGCLogFileRotation
8:-XX:NumberOfGCLogFiles=32
8:-XX:GCLogFileSize=64m
9-:-Xlog:gc*,gc+age=trace,safepoint:file=/var/log/elasticsearch/gc.log:utctime,pid,tags:filecount=32,filesize=64m
9-:-Djava.locale.providers=COMPAT
10-:-XX:UseAVX=2
root@[ 8.1.10.147 @elastic2:/etc/elasticsearch ]#

启动集群

systemctl start elasticsearch

检查输出日志没有error就可以

tail -f /var/log/elasticsearch/zabbixtest.log 

查看集群状态

安装Elasticsearch Head 插件 有Google chrome 插件  有单独安装的 自行解决

创建索引,模板,以及基于ingest pipeline的动态索引名切割

#https://blog.csdn.net/peyte1/article/details/84941560 
 
#uint mapping
PUT /uint
{
   "settings" : {
      "index" : {
         "number_of_replicas" : 1,
         "number_of_shards" : 5,
		 "refresh_interval": "30s"
      }
   },
   "mappings" : {
      "values" : {
         "properties" : {
            "itemid" : {
               "type" : "long"
            },
            "clock" : {
               "format" : "epoch_second",
               "type" : "date"
            },
            "value" : {
               "type" : "long"
            }
         }
      }
   }
}
 
# dbl mapping
PUT /dbl
{  
   "settings" : {
      "index" : {
         "number_of_replicas" : 1,
         "number_of_shards" : 5,
		 "refresh_interval": "30s"
      }
   },
   "mappings" : {
      "values" : {
         "properties" : {
            "itemid" : {
               "type" : "long"
            },
            "clock" : {
               "format" : "epoch_second",
               "type" : "date"
            },
            "value" : {
               "type" : "double"
            }
         }
      }
   }
}
 
 
# str mapping
PUT /str
{  
   "settings" : {
      "index" : {
         "number_of_replicas" : 1,
         "number_of_shards" : 5,
		 "refresh_interval": "30s"
      }
   },
   "mappings" : {
      "values" : {
         "properties" : {
            "itemid" : {
               "type" : "long"
            },
            "clock" : {
               "format" : "epoch_second",
               "type" : "date"
            },
            "value" : {
               "fields" : {
                  "analyzed" : {
                     "index" : true,
                     "type" : "text",
                     "analyzer" : "standard"
                  }
               },
               "index" : false,
               "type" : "text"
            }
         }
      }
   }
}
 
 
# text mapping
PUT /text
{
   "settings" : {
      "index" : {
         "number_of_replicas" : 1,
         "number_of_shards" : 5,
		 "refresh_interval": "30s"
      }
   },
   "mappings" : {
      "values" : {
         "properties" : {
            "itemid" : {
               "type" : "long"
            },
            "clock" : {
               "format" : "epoch_second",
               "type" : "date"
            },
            "value" : {
               "fields" : {
                  "analyzed" : {
                     "index" : true,
                     "type" : "text",
                     "analyzer" : "standard"
                  }
               },
               "index" : false,
               "type" : "text"
            }
         }
      }
   }
}
 
 
# log mapping
PUT /log
{
   "settings" : {
      "index" : {
         "number_of_replicas" : 1,
         "number_of_shards" : 5,
		 "refresh_interval": "30s"
      }
   },
   "mappings" : {
      "values" : {
         "properties" : {
            "itemid" : {
               "type" : "long"
            },
            "clock" : {
               "format" : "epoch_second",
               "type" : "date"
            },
            "value" : {
               "fields" : {
                  "analyzed" : {
                     "index" : true,
                     "type" : "text",
                     "analyzer" : "standard"
                  }
               },
               "index" : false,
               "type" : "text"
            }
         }
      }
   }
}
 
 
#uint mapping
PUT /_template/uint_template 
{
   "template": "uint*",
   "index_patterns": ["uint*"],
   "settings" : {
      "index" : {
         "number_of_replicas" : 1,
         "number_of_shards" : 5,
		 "refresh_interval": "30s"
      }
   },
   "mappings" : {
      "values" : {
         "properties" : {
            "itemid" : {
               "type" : "long"
            },
            "clock" : {
               "format" : "epoch_second",
               "type" : "date"
            },
            "value" : {
               "type" : "long"
            }
         }
      }
   }
}
 
# dbl mapping
PUT /_template/dbl_template 
{  
   "template": "dbl*",
   "index_patterns": ["dbl*"],
   "settings" : {
      "index" : {
         "number_of_replicas" : 1,
         "number_of_shards" : 5,
		 "refresh_interval": "30s"
      }
   },
   "mappings" : {
      "values" : {
         "properties" : {
            "itemid" : {
               "type" : "long"
            },
            "clock" : {
               "format" : "epoch_second",
               "type" : "date"
            },
            "value" : {
               "type" : "double"
            }
         }
      }
   }
}
 
 
curl -H "Content-Type:application/json" -XPUT http://localhost:9200/_template/str_template '
{  
   "template": "str*",
   "index_patterns": ["str*"],
   "settings" : {
      "index" : {
         "number_of_replicas" : 1,
         "number_of_shards" : 5,
		 "refresh_interval": "30s"
      }
   },
   "mappings" : {
      "values" : {
         "properties" : {
            "itemid" : {
               "type" : "long"
            },
            "clock" : {
               "format" : "epoch_second",
               "type" : "date"
            },
            "value" : {
               "fields" : {
                  "analyzed" : {
                     "index" : true,
                     "type" : "text",
                     "analyzer" : "standard"
                  }
               },
               "index" : false,
               "type" : "text"
            }
         }
      }
   }
}
'
 
 
# str mapping
PUT /_template/str_template 
{  
   "template": "str*",
   "index_patterns": ["str*"],
   "settings" : {
      "index" : {
         "number_of_replicas" : 1,
         "number_of_shards" : 5,
		 "refresh_interval": "30s"
      }
   },
   "mappings" : {
      "values" : {
         "properties" : {
            "itemid" : {
               "type" : "long"
            },
            "clock" : {
               "format" : "epoch_second",
               "type" : "date"
            },
            "value" : {
               "fields" : {
                  "analyzed" : {
                     "index" : true,
                     "type" : "text",
                     "analyzer" : "standard"
                  }
               },
               "index" : false,
               "type" : "text"
            }
         }
      }
   }
}
 
 
# text mapping
PUT /_template/text_template 
{
   "template": "text*",
   "index_patterns": ["text*"],
   "settings" : {
      "index" : {
         "number_of_replicas" : 1,
         "number_of_shards" : 5,
		 "refresh_interval": "30s"
      }
   },
   "mappings" : {
      "values" : {
         "properties" : {
            "itemid" : {
               "type" : "long"
            },
            "clock" : {
               "format" : "epoch_second",
               "type" : "date"
            },
            "value" : {
               "fields" : {
                  "analyzed" : {
                     "index" : true,
                     "type" : "text",
                     "analyzer" : "standard"
                  }
               },
               "index" : false,
               "type" : "text"
            }
         }
      }
   }
}
 
 
# log mapping
PUT /_template/log_template 
{
   "template": "log*",
   "index_patterns": ["log*"],
   "settings" : {
      "index" : {
         "number_of_replicas" : 1,
         "number_of_shards" : 5,
		 "refresh_interval": "30s"
      }
   },
   "mappings" : {
      "values" : {
         "properties" : {
            "itemid" : {
               "type" : "long"
            },
            "clock" : {
               "format" : "epoch_second",
               "type" : "date"
            },
            "value" : {
               "fields" : {
                  "analyzed" : {
                     "index" : true,
                     "type" : "text",
                     "analyzer" : "standard"
                  }
               },
               "index" : false,
               "type" : "text"
            }
         }
      }
   }
}
 
# uint mapping
PUT _ingest/pipeline/uint-pipeline
{
  "description": "daily uint index naming",
  "processors": [
    {
      "date_index_name": {
        "field": "clock",
        "date_formats": ["UNIX"],
        "index_name_prefix": "uint-",
        "date_rounding": "d"
      }
    }
  ]
}
 
# dbl mapping
PUT _ingest/pipeline/dbl-pipeline
{
  "description": "daily dbl index naming",
  "processors": [
    {
      "date_index_name": {
        "field": "clock",
        "date_formats": ["UNIX"],
        "index_name_prefix": "dbl-",
        "date_rounding": "d"
      }
    }
  ]
}
 
# log mapping
PUT _ingest/pipeline/log-pipeline
{
  "description": "daily log index naming",
  "processors": [
    {
      "date_index_name": {
        "field": "clock",
        "date_formats": ["UNIX"],
        "index_name_prefix": "log-",
        "date_rounding": "d"
      }
    }
  ]
}
 
# text mapping
PUT _ingest/pipeline/text-pipeline
{
  "description": "daily text index naming",
  "processors": [
    {
      "date_index_name": {
        "field": "clock",
        "date_formats": ["UNIX"],
        "index_name_prefix": "text-",
        "date_rounding": "d"
      }
    }
  ]
}
 
# str mapping
PUT _ingest/pipeline/str-pipeline
{
  "description": "daily str index naming",
  "processors": [
    {
      "date_index_name": {
        "field": "clock",
        "date_formats": ["UNIX"],
        "index_name_prefix": "str-",
        "date_rounding": "d"
      }
    }
  ]
}

zabbix历史数据写入es 配置如下

[root@harbor ~]# cat /etc/zabbix/zabbix_server.conf |grep -v "^#\|^$"
LogFile=/var/log/zabbix/zabbix_server.log
LogFileSize=0
PidFile=/var/run/zabbix/zabbix_server.pid
SocketDir=/var/run/zabbix
DBHost=9.1.8.244
DBName=zabbix44
DBUser=root
DBPassword=redhat
HistoryStorageURL=http://9.1.13.189:9200    ##ES地址
HistoryStorageTypes=uint,dbl,str,log,text   #历史表
HistoryStorageDateIndex=1                   #按天存放
SNMPTrapperFile=/var/log/snmptrap/snmptrap.log
Timeout=4
AlertScriptsPath=/usr/lib/zabbix/alertscripts
ExternalScripts=/usr/lib/zabbix/externalscripts
LogSlowQueries=3000
StatsAllowedIP=127.0.0.1

修改zabbix.config.php

[root@harbor ~]# cat /etc/zabbix/web/zabbix.conf.php 
<?php
// Zabbix GUI configuration file.
global $DB,$HISTORY;     ##添加了$HISTORY

$DB['TYPE']     = 'MYSQL';
$DB['SERVER']   = '9.1.8.244';
$DB['PORT']     = '3306';
$DB['DATABASE'] = 'zabbix44';
$DB['USER']     = 'root';
$DB['PASSWORD'] = 'redhat';
$HISTORY['url'] = 'http://9.1.13.189:9200';  #添加历史库地址
$HISTORY['types'] = ['str', 'text', 'log', 'uint', 'dbl'];  #表名


// Schema name. Used for IBM DB2 and PostgreSQL.
$DB['SCHEMA'] = '';

$ZBX_SERVER      = 'harbor.musingtec.com';
$ZBX_SERVER_PORT = '10051';
$ZBX_SERVER_NAME = '你真好看';

$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;

重启zabbix 刷新一下应该可以看到历史数据

zabbix 历史数据存放ES  定期删除工具  使用官方的索引管理工具 elasticsearch-curator    

安装

rpm -ivh elasticsearch-curator-5.8.1-1.x86_64.rpm

注意查对应的版本  我的es是6.8 第一次装elasticsearch-curato5.2 报错不支持 最后下了5.8

装完就2个工具curator 和curator_cli

不多解释主要用到2个yml文件 一个config.yml  定义连接es 日志等信息  一个action.yml   更多参数 可以去官网查看

https://www.elastic.co/guide/en/elasticsearch/client/curator/current/version-compatibility.html

贴出配置文件

[root@iimserverBv4 ~]# cat /opt/elasticsearch-curator/config.yaml 
client:
  hosts:
    - 127.0.0.1
  port: 9200
  url_prefix:
  use_ssl: False
  certificate:
  client_cert:
  client_key:
  ssl_no_validate: False
  http_auth:
  timeout: 30
  master_only: False

logging:
  loglevel: INFO
  logfile: /var/log/elasticsearch-curator.log
  logformat: default
  blacklist: ['elasticsearch', 'urllib3']
[root@iimserverBv4 ~]# cat /opt/elasticsearch-curator/active.yaml 
---
# Remember, leave a key empty if there is no value.  None will be a string,
# not a Python "NoneType"
#
# Also remember that all examples have 'disable_action' set to True.  If you
# want to use this action as a template, be sure to set this to False after
# copying it.
actions:
  1:
    action: delete_indices
    description: >-
      Delete indices older than 120 days (based on index name), for logstash-
      prefixed indices. Ignore the error if the filter does not result in an
      actionable list of indices (ignore_empty_list) and exit cleanly.
    options:
      ignore_empty_list: True
      disable_action: False
    filters:
    - filtertype: pattern
      kind: prefix
      value: dbl-
    - filtertype: age
      source: name
      direction: older
      timestring: '%Y-%m-%d'
      unit: days
      unit_count: 1
  2:
    action: delete_indices
    description: >-
      Delete indices older than 120 days (based on index name), for logstash-
      prefixed indices. Ignore the error if the filter does not result in an
      actionable list of indices (ignore_empty_list) and exit cleanly.
    options:
      ignore_empty_list: True
      disable_action: False
    filters:
    - filtertype: pattern
      kind: prefix
      value: log-
    - filtertype: age
      source: name
      direction: older
      timestring: '%Y-%m-%d'
      unit: days
      unit_count: 1
  3:
    action: delete_indices
    description: >-
      Delete indices older than 120 days (based on index name), for logstash-
      prefixed indices. Ignore the error if the filter does not result in an
      actionable list of indices (ignore_empty_list) and exit cleanly.
    options:
      ignore_empty_list: True
      disable_action: False
    filters:
    - filtertype: pattern
      kind: prefix
      value: text-
    - filtertype: age
      source: name
      direction: older
      timestring: '%Y-%m-%d'
      unit: days
      unit_count: 1
  4:
    action: delete_indices
    description: >-
      Delete indices older than 120 days (based on index name), for logstash-
      prefixed indices. Ignore the error if the filter does not result in an
      actionable list of indices (ignore_empty_list) and exit cleanly.
    options:
      ignore_empty_list: True
      disable_action: False
    filters:
    - filtertype: pattern
      kind: prefix
      value: uint-
    - filtertype: age
      source: name
      direction: older
      timestring: '%Y-%m-%d'
      unit: days
      unit_count: 1
  5:
    action: delete_indices
    description: >-
      Delete indices older than 120 days (based on index name), for logstash-
      prefixed indices. Ignore the error if the filter does not result in an
      actionable list of indices (ignore_empty_list) and exit cleanly.
    options:
      ignore_empty_list: True
      disable_action: False
    filters:
    - filtertype: pattern
      kind: prefix
      value: str-
    - filtertype: age
      source: name
      direction: older
      timestring: '%Y-%m-%d'
      unit: days
      unit_count: 1

手动执行一下

#手动
[root@iimserverBv4 ~]# curator --config /opt/elasticsearch-curator/config.yaml /opt/elasticsearch-curator/active.yaml

#查看日志
[root@iimserverBv4 ~]#  tail -f /var/log/elasticsearch-curator.log 
2020-06-18 13:29:24,381 INFO      Action ID: 4, "delete_indices" completed.
2020-06-18 13:29:24,381 INFO      Preparing Action ID: 5, "delete_indices"
2020-06-18 13:29:24,381 INFO      Creating client object and testing connection
2020-06-18 13:29:24,381 INFO      Instantiating client object
2020-06-18 13:29:24,382 INFO      Testing client connectivity
2020-06-18 13:29:24,385 INFO      Successfully created Elasticsearch client object with provided settings
2020-06-18 13:29:24,387 INFO      Trying Action ID: 5, "delete_indices": Delete indices older than 120 days (based on index name), for logstash- prefixed indices. Ignore the error if the filter does not result in an actionable list of indices (ignore_empty_list) and exit cleanly.
2020-06-18 13:29:24,408 INFO      Skipping action "delete_indices" due to empty list: <class 'curator.exceptions.NoIndices'>
2020-06-18 13:29:24,408 INFO      Action ID: 5, "delete_indices" completed.
2020-06-18 13:29:24,408 INFO      Job completed.

写入crontab 每天定时执行

[root@iimserverBv4 ~]# crontab -l
0 2 * * * /usr/bin/curator --config /opt/elasticsearch-curator/config.yaml /opt/elasticsearch-curator/active.yaml

 

 

 

 

 

 

 
 
易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!