Nginx_Kafka_Module

99封情书 提交于 2021-01-11 07:52:42
  1.  
  2.  
    1.安装git
  3.  
    yum install -y git
  4.  
    2.切换到/usr/ local/src目录,然后将kafka的c客户端源码 clone到本地
  5.  
    cd /usr/ local/src
  6.  
    git clone https://github.com/edenhill/librdkafka
  7.  
    3.进入到librdkafka,然后进行编译
  8.  
    cd librdkafka
  9.  
    yum install -y gcc gcc-c++ pcre-devel zlib-devel
  10.  
    ./configure
  11.  
    make && make install
  12.  
     
  13.  
    4.安装nginx整合kafka的插件,进入到/usr/ local/src, clone nginx整合kafka的源码
  14.  
    cd /usr/ local/src
  15.  
    git clone https://github.com/brg-liuwei/ngx_kafka_module
  16.  
     
  17.  
    5.进入到nginx的源码包目录下 (编译nginx,然后将将插件同时编译)
  18.  
    cd /usr/ local/src/nginx-1.12.2
  19.  
    ./configure --add-module=/usr/ local/src/ngx_kafka_module/
  20.  
    make
  21.  
    make install
  22.  
     
  23.  
    6.修改nginx的配置文件,详情请查看当前目录的nginx.conf
  24.  
     
  25.  
    7.启动zk和kafka集群(创建topic)
  26.  
    /bigdata/zookeeper-3.4.9/bin/zkServer.sh start
  27.  
    /bigdata/kafka_2.11-0.10.2.1/bin/kafka-server-start.sh -daemon /bigdata/kafka_2.11-0.10.2.1/config/server.properties
  28.  
     
  29.  
    8.启动nginx,报错,找不到kafka.so.1的文件
  30.  
    error while loading shared libraries: librdkafka.so.1: cannot open shared object file: No such file or directory
  31.  
    9.加载so库
  32.  
    echo "/usr/local/lib" >> /etc/ld.so.conf
  33.  
    ldconfig
  34.  
     
  35.  
    10.测试,向nginx中写入数据,然后观察kafka的消费者能不能消费到数据
  36.  
    curl localhost/kafka/track -d "message send to kafka topic"
  37.  
    curl localhost/kafka/track -d "何洪波666"
  38.  
     
  39.  
     
  40.  
     
  41.  
    附件:nginx.conf
  42.  
     
  43.  
    #user nobody;
  44.  
    worker_processes 1;
  45.  
     
  46.  
    #error_log logs/error.log;
  47.  
    #error_log logs/error.log notice;
  48.  
    #error_log logs/error.log info;
  49.  
     
  50.  
    #pid logs/nginx.pid;
  51.  
     
  52.  
     
  53.  
    events {
  54.  
    worker_connections 1024;
  55.  
    }
  56.  
     
  57.  
     
  58.  
    http {
  59.  
    include mime.types;
  60.  
    default_type application/octet-stream;
  61.  
     
  62.  
    #log_format main '$remote_addr - $remote_user [$time_local] "$request" '
  63.  
    # '$status $body_bytes_sent "$http_referer" '
  64.  
    # '"$http_user_agent" "$http_x_forwarded_for"';
  65.  
    #access_log logs/access.log main;
  66.  
    sendfile on;
  67.  
    #tcp_nopush on;
  68.  
    #keepalive_timeout 0;
  69.  
    keepalive_timeout 65;
  70.  
    #gzip on;
  71.  
     
  72.  
    kafka;
  73.  
    kafka_broker_list node-1.xiaoniu.com:9092 node-2.xiaoniu.com:9092 node-3.xiaoniu.com:9092;
  74.  
     
  75.  
    server {
  76.  
    listen 80;
  77.  
    server_name node-6.xiaoniu.com;
  78.  
    #charset koi8-r;
  79.  
    #access_log logs/host.access.log main;
  80.  
     
  81.  
    location = /kafka/track {
  82.  
    kafka_topic track;
  83.  
    }
  84.  
     
  85.  
    location = /kafka/user {
  86.  
    kafka_topic user;
  87.  
    }
  88.  
     
  89.  
    #error_page 404 /404.html;
  90.  
     
  91.  
    # redirect server error pages to the static page /50x.html
  92.  
    #
  93.  
    error_page 500 502 503 504 /50x.html;
  94.  
    location = /50x.html {
  95.  
    root html;
  96.  
    }
  97.  
     
  98.  
    }
  99.  
    }
  100.  
  1. 2.切换到/usr/ local/src目录,然后将kafka的c客户端源码 clone到本地
  2.  
    cd /usr/ local/src
  3.  
    git clone https://github.com/edenhill/librdkafka
  4.  
    3.进入到librdkafka,然后进行编译
  5.  
    cd librdkafka
  6.  
    yum install -y gcc gcc-c++ pcre-devel zlib-devel
  7.  
    ./configure
  8.  
    make && make install
  9.  
     
  10.  
    4.安装nginx整合kafka的插件,进入到/usr/ local/src, clone nginx整合kafka的源码
  11.  
    cd /usr/ local/src
  12.  
    git clone https://github.com/brg-liuwei/ngx_kafka_module
  13.  
     
  14.  
    5.进入到nginx的源码包目录下 (编译nginx,然后将将插件同时编译)
  15.  
    cd /usr/ local/src/nginx-1.12.2
  16.  
    ./configure --add-module=/usr/ local/src/ngx_kafka_module/
  17.  
    make
  18.  
    make install
  19.  
     
  20.  
    6.修改nginx的配置文件,详情请查看当前目录的nginx.conf
  21.  
     
  22.  
    7.启动zk和kafka集群(创建topic)
  23.  
    /bigdata/zookeeper-3.4.9/bin/zkServer.sh start
  24.  
    /bigdata/kafka_2.11-0.10.2.1/bin/kafka-server-start.sh -daemon /bigdata/kafka_2.11-0.10.2.1/config/server.properties
  25.  
     
  26.  
    8.启动nginx,报错,找不到kafka.so.1的文件
  27.  
    error while loading shared libraries: librdkafka.so.1: cannot open shared object file: No such file or directory
  28.  
    9.加载so库
  29.  
    echo "/usr/local/lib" >> /etc/ld.so.conf
  30.  
    ldconfig
  31.  
     
  32.  
    10.测试,向nginx中写入数据,然后观察kafka的消费者能不能消费到数据
  33.  
    curl localhost/kafka/track -d "message send to kafka topic"
  34.  
    curl localhost/kafka/track -d "何洪波666"
  35.  
     
  36.  
     
  37.  
     
  38.  
    附件:nginx.conf
  39.  
     
  40.  
    #user nobody;
  41.  
    worker_processes 1;
  42.  
     
  43.  
    #error_log logs/error.log;
  44.  
    #error_log logs/error.log notice;
  45.  
    #error_log logs/error.log info;
  46.  
     
  47.  
    #pid logs/nginx.pid;
  48.  
     
  49.  
     
  50.  
    events {
  51.  
    worker_connections 1024;
  52.  
    }
  53.  
     
  54.  
     
  55.  
    http {
  56.  
    include mime.types;
  57.  
    default_type application/octet-stream;
  58.  
     
  59.  
    #log_format main '$remote_addr - $remote_user [$time_local] "$request" '
  60.  
    # '$status $body_bytes_sent "$http_referer" '
  61.  
    # '"$http_user_agent" "$http_x_forwarded_for"';
  62.  
    #access_log logs/access.log main;
  63.  
    sendfile on;
  64.  
    #tcp_nopush on;
  65.  
    #keepalive_timeout 0;
  66.  
    keepalive_timeout 65;
  67.  
    #gzip on;
  68.  
     
  69.  
    kafka;
  70.  
    kafka_broker_list node-1.xiaoniu.com:9092 node-2.xiaoniu.com:9092 node-3.xiaoniu.com:9092;
  71.  
     
  72.  
    server {
  73.  
    listen 80;
  74.  
    server_name node-6.xiaoniu.com;
  75.  
    #charset koi8-r;
  76.  
    #access_log logs/host.access.log main;
  77.  
     
  78.  
    location = /kafka/track {
  79.  
    kafka_topic track;
  80.  
    }
  81.  
     
  82.  
    location = /kafka/user {
  83.  
    kafka_topic user;
  84.  
    }
  85.  
     
  86.  
    #error_page 404 /404.html;
  87.  
     
  88.  
    # redirect server error pages to the static page /50x.html
  89.  
    #
  90.  
    error_page 500 502 503 504 /50x.html;
  91.  
    location = /50x.html {
  92.  
    root html;
  93.  
    }
  94.  
     
  95.  
    }
  96.  
     
  97.  
    }
  98.  
     
  99.  
     
  100.  
易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!