Send log to graylog server on docker

Hi, guys
I have a question .
I currently have the graylog version 3.1.01.8.0_222 on Linux 5.10.0-10-amd64 Debian ) installed with Docker-Compse below

==================================
version: '3.3'

services:
#
# monitoring
#
  prom:
    image: quay.io/prometheus/prometheus:v2.0.0
    restart: always
    volumes:
     - ./monitor/prometheus.yml:/etc/prometheus/prometheus.yml
    command: "--config.file=/etc/prometheus/prometheus.yml --storage.tsdb.path=/prometheus"
    ports:
     - 9090:9090
    depends_on:
     - exporter
    logging:
      driver: gelf
      options:
        gelf-address: udp://192.168.100.100:12201

  exporter:
    image: prom/node-exporter:latest
    restart: always
    ports:
     - "9100:9100"
       #    network_mode: host
    logging:
      driver: gelf
      options:
        gelf-address: udp://192.168.100.100:12201

  grafana:
    restart: always
    image: grafana/grafana
    ports:
     - "3000:3000"
    depends_on:
    - prom
    volumes:
    - ./grafana_data:/var/lib/grafana
    logging:
      driver: gelf
      options:
        gelf-address: udp://192.168.100.100:12201


  cadvisor:
    image: google/cadvisor:latest
    container_name: monitoring_cadvisor
    restart: unless-stopped
    volumes:
      - /:/rootfs:ro
      - /var/run:/var/run:rw
      - /sys:/sys:ro
      - /var/lib/docker/:/var/lib/docker:ro
    expose:
      - 8080

#
# logserver
#
  graylog-mongo:
    image: "mongo:3"
    restart: always
    volumes:
    - ./graylog/mongodb:/data/db
    environment:
      - TZ=Asia/Tehran
      - AUTO_UPDATES_ON=true

  graylog-elasticsearch:
    image: docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.2
    restart: always
    #command: "elasticsearch -Des.cluster.name='graylog'"
    volumes:
    - ./graylog/elasticsearchdata:/usr/share/elasticsearch/data
    ports:
    - 9200:9200
    environment:
      - TZ=Asia/Tehran
      - AUTO_UPDATES_ON=true
      - http.host=0.0.0.0
      - transport.host=localhost
      - network.host=0.0.0.0
      - node.max_local_storage_nodes=4
      - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
      # config added by da.na
      - elasticsearch_max_time_per_index=1d # limit 
      - elasticsearch_max_number_of_indices=8
      - indices.fielddata.cache.size=20%
      - elasticsearch_shards=1
      - elasticsearch_replicas=1
    ulimits:
      memlock:
        soft: -1
        hard: -1

  graylog:
    image: graylog/graylog:3.1
    restart: always
    volumes:
       - ./graylog/journal:/usr/share/graylog/data/journal
       - ./graylog/config:/usr/share/graylog/data/config
    environment:
      - TZ=Asia/Tehran
      - AUTO_UPDATES_ON=true
      - GRAYLOG_PASSWORD_SECRET=xxxxxxxxxx
      - GRAYLOG_ROOT_PASSWORD_SHA2=<.....>
      - GRAYLOG_WEB_ENDPOINT_URI=http://192.168.x.x:9000/api/
      - GRAYLOG_HTTP_BIND_ADDRESS=0.0.0.0:9000
      - GRAYLOG_HTTP_EXTERNAL_URI=http://192.168.x.x:9000/
      - GRAYLOG_TRANSPORT_EMAIL_WEB_INTERFACE_URL=http://192.168.x.x:9000
      - GRAYLOG_TRANSPORT_EMAIL_HOSTNAME=mail.eniac-tech.local
      - GRAYLOG_TRANSPORT_EMAIL_ENABLED=true
      - GRAYLOG_TRANSPORT_EMAIL_PORT=25
      - GRAYLOG_TRANSPORT_EMAIL_USE_AUTH=false
      - GRAYLOG_TRANSPORT_EMAIL_USE_TLS=false
      - GRAYLOG_TRANSPORT_EMAIL_USE_SSL=false
      - GRAYLOG_TRANSPORT_FROM_EMAIL=administrator@bahram.com
      - GRAYLOG_TRANSPORT_SUBJECT_PREFIX=[graylog]
    links:
      - graylog-mongo:mongo
      - graylog-elasticsearch:elasticsearch
    depends_on:
      - graylog-mongo
      - graylog-elasticsearch
    ports:
      # Graylog web interface and REST API
      - 9000:9000
      # Syslog TCP
      - 8514:8514
      # Syslog UDP
      - 8514:8514/udp
      # GELF TCP
      - 12201:12201
      # GELF UDP
      - 12201:12201/udp

and everyting is OK
And I intend to send Windows VM machine logs to this new server with winlogbeat for testing , But nothing is received in the Graylog server . I used winlogbeat , GELF 514 as well.
I am completlly confused and don’t know where the problem is.
I appreciate any ideas and suggestions.

DId you open the port for winlogbeat in the last lines of your docker compose file?

hi, ihe
Thank a lot for reply and guide
Yes , you can see in the end of docker-compose
in the winlogbeat config file I used port 12201

12201 is the default port for gelf. You’ll need to add 5044/tcp as well, which is the default port for beats.

1 Like

Hey @bahram

That is a privileged. to can eiter you a custom port like @drewmiranda-gl suggested or use port above 1024. Just an idea.

Yes that’s right
I used both TCP and UDP ports 1415 and 5050
It seems that this problem has nothing to do with the port

Are you saying you still aren’t receiving beat logs after adding 5044/tcp to your docker compose file? Can you share your winlogbeat config?

Hi , drewmiranda-gl

thanks a lot for your help

This docker-compose.yaml file

version: '3'
services:
  # MongoDB: https://hub.docker.com/_/mongo/
  # Authore mahmoudi = devops523@gmail.com
  mongo:
    image: mongo:4
    container_name: mongodb
    networks:
      - graylog
    ports:
      - 27017:27017
  # Elasticsearch: https://www.elastic.co/guide/en/elasticsearch/reference/6.x/docker.html
  elasticsearch:
    image: docker.elastic.co/elasticsearch/elasticsearch-oss:7.10.2
    container_name: elasticsearch
    environment:
      - http.host=0.0.0.0
      - transport.host=localhost
      - network.host=0.0.0.0
      - "ES_JAVA_OPTS=-Xms1g -Xmx1g"
    ulimits:
      memlock:
        soft: -1
        hard: -1
    ports:
      - 9200:9200
    networks:
      - graylog
  # Graylog: https://hub.docker.com/r/graylog/graylog/
  graylog:
    image: graylog/graylog:3.1.0
    container_name: graylog
    environment:
      - GRAYLOG_PASSWORD_SECRET=t7mrBWcgb2dX4ylxnOEbLy1dUfUZi0PwVrW6dTdMwj78O5gARsIvNTSohptG7zwoEd7O8SU6DnmJOoeqicJBw4PqKjEeQuX8
      - GRAYLOG_ROOT_PASSWORD_SHA2=5fdca860613f2ef9e31aba0e8d4bb45cab6289ae6f6c90f5a6d5a1f9aaf4f0f9
      - GRAYLOG_HTTP_EXTERNAL_URI=http://192.168.11.110:9000/
      - GRAYLOG_ELASTICSEARCH_VERSION=7
      - GRAYLOG_HTTP_ENABLE_CORS=true
    networks:
      - graylog
    depends_on:
      - mongo
      - elasticsearch
    ports:
      # Graylog web interface and REST API
      - 9000:9000
      # Syslog TCP
      - 1514:1514
      # Syslog UDP
      - 1514:1514/udp
      # GELF TCP
      - 12201:12201
      # GELF UDP
      - 12201:12201/udp
      # Prometheus
      - 9091:9091
      # mahmoudi
      - 5050:5050
      # customport
      - 5151:5151
networks:
  graylog:
    driver: bridge

AND
WINLOGBEAT FILE

###################### Winlogbeat Configuration Example ##########################

# This file is an example configuration file highlighting only the most common

# options. The winlogbeat.reference.yml file from the same directory contains all the

# supported options with more comments. You can use it as a reference.

#

# You can find the full configuration reference here:

# https://www.elastic.co/guide/en/beats/winlogbeat/index.html

#======================= Winlogbeat specific options ==========================

# event_logs specifies a list of event logs to monitor as well as any

# accompanying options. The YAML data type of event_logs is a list of

# dictionaries.

#

# The supported keys are name (required), tags, fields, fields_under_root,

# forwarded, ignore_older, level, event_id, provider, and include_xml. Please

# visit the documentation for the complete details of each option.

# https://go.es.io/WinlogbeatConfig

winlogbeat.event_logs:

- name: Application

ignore_older: 72h

- name: Security

- name: System

#==================== Elasticsearch template setting ==========================

setup.template.settings:

index.number_of_shards: 1

#index.codec: best_compression

#_source.enabled: false

#================================ General =====================================

# The name of the shipper that publishes the network data. It can be used to group

# all the transactions sent by a single shipper in the web interface.

#name:

# The tags of the shipper are included in their own field with each

# transaction published.

#tags: ["service-X", "web-tier"]

# Optional fields that you can specify to add additional information to the

# output.

#fields:

# env: staging

#============================== Dashboards =====================================

# These settings control loading the sample dashboards to the Kibana index. Loading

# the dashboards is disabled by default and can be enabled either by setting the

# options here or by using the `setup` command.

setup.dashboards.enabled: true

# The URL from where to download the dashboards archive. By default this URL

# has a value which is computed based on the Beat name and version. For released

# versions, this URL points to the dashboard archive on the artifacts.elastic.co

# website.

#setup.dashboards.url:

#============================== Kibana =====================================

# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API.

# This requires a Kibana endpoint configuration.

setup.kibana:

# Kibana Host

# Scheme and port can be left out and will be set to the default (http and 5601)

# In case you specify and additional path, the scheme is required: http://localhost:5601/path

# IPv6 addresses should always be defined as: https://[2001:db8::1]:5601

#host: "localhost:5601"

# Kibana Space ID

# ID of the Kibana Space into which the dashboards should be loaded. By default,

# the Default Space will be used.

#space.id:

#============================= Elastic Cloud ==================================

# These settings simplify using winlogbeat with the Elastic Cloud (https://cloud.elastic.co/).

# The cloud.id setting overwrites the `output.elasticsearch.hosts` and

# `setup.kibana.host` options.

# You can find the `cloud.id` in the Elastic Cloud web UI.

#cloud.id:

# The cloud.auth setting overwrites the `output.elasticsearch.username` and

# `output.elasticsearch.password` settings. The format is `<user>:<pass>`.

#cloud.auth:

#================================ Outputs =====================================

# Configure what output to use when sending the data collected by the beat.

#-------------------------- Elasticsearch output ------------------------------

#output.elasticsearch:

# Array of hosts to connect to.

hosts: ["192.168.11.110:9200"]

# Optional protocol and basic auth credentials.

#protocol: "https"

#username: "elastic"

#password: "changeme"

#----------------------------- Logstash output --------------------------------

output.logstash:

# The Logstash hosts

hosts: ["192.168.11.110:5050"]

# Optional SSL. By default is off.

# List of root certificates for HTTPS server verifications

#ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]

# Certificate for SSL client authentication

#ssl.certificate: "/etc/pki/client/cert.pem"

# Client Certificate Key

#ssl.key: "/etc/pki/client/cert.key"

#================================ Processors =====================================

# Configure processors to enhance or manipulate events generated by the beat.

processors:

- add_host_metadata: ~

- add_cloud_metadata: ~

#================================ Logging =====================================

# Sets log level. The default log level is info.

# Available log levels are: error, warning, info, debug

#logging.level: debug

# At debug level, you can selectively enable logging only for some components.

# To enable all selectors use ["*"]. Examples of other selectors are "beat",

# "publish", "service".

#logging.selectors: ["*"]

#============================== Xpack Monitoring ===============================

# winlogbeat can export internal metrics to a central Elasticsearch monitoring

# cluster. This requires xpack monitoring to be enabled in Elasticsearch. The

# reporting is disabled by default.

# Set to true to enable the monitoring reporter.

#xpack.monitoring.enabled: false

# Uncomment to send the metrics to Elasticsearch. Most settings from the

# Elasticsearch output are accepted here as well. Any setting that is not set is

# automatically inherited from the Elasticsearch output configuration, so if you

# have the Elasticsearch output configured, you can simply uncomment the

# following line.

#xpack.monitoring.elasticsearch:

#================================= Migration ==================================

# This allows to enable 6.7 migration aliases

#migration.6_to_7.enabled: true

Thanks for sharing, this is helpful.

A couple of things stick out:

I’m not sure if the formatted was messed up by the forum, but in the pasted text for winlogbeat config, i don’t see indentations (spaces) for this section:

#----------------------------- Logstash output --------------------------------
output.logstash:
# The Logstash hosts
hosts: ["192.168.11.110:5050"]

for reference here is what my line looks like:

output.logstash:
   hosts: ["192.168.1.1:5044"]

Also, can you confirm that you have a Beats input running on port 5050? I do see you have it specified in your docker compose file though:

    ports:
      # mahmoudi
      - 5050:5050
1 Like

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.