elasticsearch,docker-compose,filebeat,Docker,Tomcat,elasticsearch,Docker Compose,Filebeat" /> elasticsearch,docker-compose,filebeat,Docker,Tomcat,elasticsearch,Docker Compose,Filebeat" />

将tomcat日志从tomcat docker容器收集到Filebeat docker容器

将tomcat日志从tomcat docker容器收集到Filebeat docker容器,docker,tomcat,elasticsearch,docker-compose,filebeat,Docker,Tomcat,elasticsearch,Docker Compose,Filebeat,我有一个Tomcat docker容器和Filebeat docker容器都已启动并运行 我的目标:我需要从运行tomcat容器到Filebeat容器收集tomcat日志 问题:我不知道如何从Tomcat容器获取收集的日志文件 到目前为止我尝试的内容:我尝试创建一个docker卷,将tomcat日志添加到该卷,并从filebeat容器访问该卷,但没有成功 结构:我已经在项目Logstash(项目的根目录)下编写了docker-compose.yml文件,具有以下项目结构(这里我想从一个配置文件中

我有一个Tomcat docker容器和Filebeat docker容器都已启动并运行

我的目标:我需要从运行tomcat容器到Filebeat容器收集tomcat日志

问题:我不知道如何从Tomcat容器获取收集的日志文件

到目前为止我尝试的内容:我尝试创建一个docker卷,将tomcat日志添加到该卷,并从filebeat容器访问该卷,但没有成功

结构:我已经在项目Logstash(项目的根目录)下编写了docker-compose.yml文件,具有以下项目结构(这里我想从一个配置文件中启动并运行Elasticsearch、Logstash、Filebeat和Kibana docker容器)。docker容器(项目的根目录)具有以下结构(这里我想从一个配置文件中启动并运行Tomcat、Nginx和Postgres容器)

  • Logstash:包含4个子目录(Filebeat、Logstash、Elasticsearch和Kibana)、ENV文件和docker-compose.yml文件。两个子目录都包含Dockerfiles,用于提取图像和构建容器

  • docker容器:包含3个子目录(Tomcat、Nginx和Postgres)。ENV文件和docker-compose.yml文件。两个子目录都包含单独的DockerFile,用于提取docker映像并构建容器

  • 注:我认为这个基本结构有助于理解我的需求

docker-compose.yml文件

Logstash.docker-compose.yml文件

version: '2'
services:


  elasticsearch:
    container_name: OTP-Elasticsearch
    build:
      context: ./elasticsearch
      args:
        - ELK_VERSION=${ELK_VERSION}
    volumes:
      - ./elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro
    ports:
      - "9200:9200"
      - "9300:9300"
    environment:
      ES_JAVA_OPTS: "-Xmx256m -Xms256m"
    networks:
      - elk

  filebeat:
    container_name: OTP-Filebeat
    command:
      - "-e"
      - "--strict.perms=false"
    user: root
    build:
      context: ./filebeat
      args:
        - ELK_VERSION=${ELK_VERSION}
    volumes:
      - ./filebeat/config/filebeat.yml:/usr/share/filebeat/filebeat.yml
    environment:
      LS_JAVA_OPTS: "-Xmx256m -Xms256m"
    networks:
      - elk
    depends_on: 
      - elasticsearch
      - logstash

  logstash:
    container_name: OTP-Logstash
    build:
      context: ./logstash
      args:
        - ELK_VERSION=${ELK_VERSION}
    volumes:
      - ./logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml:ro
      - ./logstash/pipeline:/usr/share/logstash/pipeline:ro
    expose:
      - 5044/tcp
    ports:
      - "9600:9600"
      - "5044:5044"
    environment:
      LS_JAVA_OPTS: "-Xmx256m -Xms256m"
    networks:
      - elk
    links:
      - elasticsearch
    depends_on:
      - elasticsearch


  kibana:
    container_name: OTP-Kibana
    build:
      context: ./kibana
      args:
        - ELK_VERSION=${ELK_VERSION}
    volumes:
      - ./kibana/config/:/usr/share/kibana/config:ro
    ports:
      - "5601:5601"
    networks:
      - elk
    links:
      - elasticsearch
    depends_on: 
      - elasticsearch
      - logstash
      - filebeat

networks:
  elk:
    driver: bridge
version: '2'
services:

  # Nginx
  nginx:
    container_name: OTP-Nginx
    restart: always
    build: 
      context: ./nginx
      args:
        - comapanycode=${COMPANY_CODE}
        - dbtype=${DB_TYPE}
        - dbip=${DB_IP}
        - dbname=${DB_NAME}
        - dbuser=${DB_USER}
        - dbpassword=${DB_PASSWORD}
        - webdirectory=${WEB_DIRECTORY}
    ports:
      - "80:80"
    links:
      - db:db
    volumes:
      - ./log/nginx:/var/log/nginx
    depends_on:
      - db

  # Postgres
  db:
    container_name: OTP-Postgres
    restart: always
    ports:
      - "5430:5430"
    build: 
      context: ./postgres
      args:
        - food_db_version=${FOOD_DB_VERSION}
        - dbtype=${DB_TYPE} 
        - retail_db_version=${RETAIL_DB_VERSION}
        - dbname=${DB_NAME} 
        - dbuser=${DB_USER}
        - dbpassword=${DB_PASSWORD}
    volumes:
      - .data/db:/octopus_docker/postgresql/data

  # Tomcat
  tomcat:
    container_name: OTP-Tomcat
    restart: always
    build: 
      context: ./tomcat
      args:
        - dbuser=${DB_USER}
        - dbpassword=${DB_PASSWORD}
    links:
      - db:db
    volumes:
      - ./tomcat/${WARNAME}.war:/usr/local/tomcat/webapps/${WARNAME}.war
    ports:
      - "8080:8080"
    depends_on:
      - db
      - nginx 
docker-containers.docker-compose.yml文件

version: '2'
services:


  elasticsearch:
    container_name: OTP-Elasticsearch
    build:
      context: ./elasticsearch
      args:
        - ELK_VERSION=${ELK_VERSION}
    volumes:
      - ./elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro
    ports:
      - "9200:9200"
      - "9300:9300"
    environment:
      ES_JAVA_OPTS: "-Xmx256m -Xms256m"
    networks:
      - elk

  filebeat:
    container_name: OTP-Filebeat
    command:
      - "-e"
      - "--strict.perms=false"
    user: root
    build:
      context: ./filebeat
      args:
        - ELK_VERSION=${ELK_VERSION}
    volumes:
      - ./filebeat/config/filebeat.yml:/usr/share/filebeat/filebeat.yml
    environment:
      LS_JAVA_OPTS: "-Xmx256m -Xms256m"
    networks:
      - elk
    depends_on: 
      - elasticsearch
      - logstash

  logstash:
    container_name: OTP-Logstash
    build:
      context: ./logstash
      args:
        - ELK_VERSION=${ELK_VERSION}
    volumes:
      - ./logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml:ro
      - ./logstash/pipeline:/usr/share/logstash/pipeline:ro
    expose:
      - 5044/tcp
    ports:
      - "9600:9600"
      - "5044:5044"
    environment:
      LS_JAVA_OPTS: "-Xmx256m -Xms256m"
    networks:
      - elk
    links:
      - elasticsearch
    depends_on:
      - elasticsearch


  kibana:
    container_name: OTP-Kibana
    build:
      context: ./kibana
      args:
        - ELK_VERSION=${ELK_VERSION}
    volumes:
      - ./kibana/config/:/usr/share/kibana/config:ro
    ports:
      - "5601:5601"
    networks:
      - elk
    links:
      - elasticsearch
    depends_on: 
      - elasticsearch
      - logstash
      - filebeat

networks:
  elk:
    driver: bridge
version: '2'
services:

  # Nginx
  nginx:
    container_name: OTP-Nginx
    restart: always
    build: 
      context: ./nginx
      args:
        - comapanycode=${COMPANY_CODE}
        - dbtype=${DB_TYPE}
        - dbip=${DB_IP}
        - dbname=${DB_NAME}
        - dbuser=${DB_USER}
        - dbpassword=${DB_PASSWORD}
        - webdirectory=${WEB_DIRECTORY}
    ports:
      - "80:80"
    links:
      - db:db
    volumes:
      - ./log/nginx:/var/log/nginx
    depends_on:
      - db

  # Postgres
  db:
    container_name: OTP-Postgres
    restart: always
    ports:
      - "5430:5430"
    build: 
      context: ./postgres
      args:
        - food_db_version=${FOOD_DB_VERSION}
        - dbtype=${DB_TYPE} 
        - retail_db_version=${RETAIL_DB_VERSION}
        - dbname=${DB_NAME} 
        - dbuser=${DB_USER}
        - dbpassword=${DB_PASSWORD}
    volumes:
      - .data/db:/octopus_docker/postgresql/data

  # Tomcat
  tomcat:
    container_name: OTP-Tomcat
    restart: always
    build: 
      context: ./tomcat
      args:
        - dbuser=${DB_USER}
        - dbpassword=${DB_PASSWORD}
    links:
      - db:db
    volumes:
      - ./tomcat/${WARNAME}.war:/usr/local/tomcat/webapps/${WARNAME}.war
    ports:
      - "8080:8080"
    depends_on:
      - db
      - nginx 
其他文件:

filebeat.yml(Logstash/Filbeat/config/中的配置文件)

其他信息:

  • 我使用的系统是Ubuntu 18.04
  • 我的目标是从运行的tomcat容器中收集tomcat日志,并将它们转发到Logstash和filter日志,然后将这些日志转发到Elasticsearch,最后转发到Kibana以实现可视化
  • 目前,我可以收集本地计算机(主机)日志并在Kibana中可视化它们。(/var/log/)
我的问题:

  • 我需要知道从tomcat容器中收集tomcat日志并通过filebeat容器将其转发到logstash容器的正确方法
任何讨论、回答或任何帮助都是高度期待的


谢谢

苏龙。。。在所有容器之间创建共享卷,并将tomcat设置为将日志文件保存到该文件夹中。如果您可以将所有服务放入一个
docker compose.yml
,只需在内部设置卷:

docker compose.yml

version: '3'
services:
  one:
    ...
    volumes:
      - logs:/var/log/shared
  two:
    ...
    volumes:
      - logs:/var/log/shared
volumes:
  logs:
如果需要多个
docker compose.yml
文件,请使用
docker volume create logs
预先全局创建卷,并将其映射到两个合成文件中:

version: '3'
services:
  one:
    ...
    volumes:
      - logs:/var/log/shared
  two:
    ...
    volumes:
      - logs:/var/log/shared
volumes:
  logs:
    external: true

很抱歉这样描述,我会尽力做到的。谢谢。没关系,这是一个很好的实践,把一个问题的完整定义,我编辑我的代码作为您的指南,并添加了共享卷。我需要知道如何配置filebeat的输入路径,以便从该共享卷收集日志。再次感谢。请参阅此存储库: