Input - Show received messages not showing logs

Hi GR Gurus,
I tried to create a graylog cluster with 3 nodes. All is green and there are no error messages. When I click on show received messages in input it just spins for ever with no logs. I see the logs coming in. I have a multi-homed network. The 172.x is our back end and the 10.x is our front end. ( both firewalls are disabled )

here is my server.conf file on the master node. All others are same with “no master”

Last login: Thu Apr 5 08:18:44 on ttys000
mac-D25MD0F7F8JC:~ rgizynski$ graylog1
Last login: Thu Apr 5 07:42:24 2018 from win-44x9fc2.admin.philasd.net


  •                                                                         *
    
  •            Welcome to the School District of Philadelphia               *
    
  •             Office of Telecommunications and Networking                 *
    
  •            AUTHORIZED uses only.                                        *
    
  •            Unauthorized use of this system is prohibited.               *
    
  •                                                                         *
    

[rgizynski@graylog1 ~]$ root
Last login: Thu Apr 5 07:42:27 EDT 2018 on pts/0
[root@graylog1 ~]# vi /etc/elasticsearch/elasticsearch.yml
[root@graylog1 ~]# systemctl restart elasticsearch.service
[root@graylog1 ~]# systemctl status elasticsearch.service
● elasticsearch.service - Elasticsearch
Loaded: loaded (/usr/lib/systemd/system/elasticsearch.service; disabled; vendor preset: disabled)
Active: active (running) since Thu 2018-04-05 08:25:23 EDT; 1min 36s ago
Docs: http://www.elastic.co
Process: 10716 ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec (code=exited, status=0/SUCCESS)
Main PID: 10717 (java)
CGroup: /system.slice/elasticsearch.service
└─10717 /bin/java -Xms2g -Xmx2g -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+AlwaysPreTouch -server -Xss1m -Djava.awt.headless=true -Dfile.encoding=UTF-8 -Djna.nosys=true -Djdk.io.perm…

Apr 05 08:25:23 graylog1 systemd[1]: Starting Elasticsearch…
Apr 05 08:25:23 graylog1 systemd[1]: Started Elasticsearch.
[root@graylog1 ~]# cd /etc/graylog/server/
[root@graylog1 server]# ls
log4j2.xml node-id server.conf
[root@graylog1 server]# cd ~rgizynski
[root@graylog1 rgizynski]# cd GRAYLOG_CLUSTER/
[root@graylog1 GRAYLOG_CLUSTER]# ls
cacerts elasticsearch.yml elasticsearch.yml-fail elasticsearch.yml.glog elasticsearch.yml-orig elasticsearch.yml.rpmsave jvm.options log4j2.properties log4j2.xml node-id server.conf server.conf-graylog1 server.conf.orig
[root@graylog1 GRAYLOG_CLUSTER]# ll
total 140
-r–r--r-- 1 root root 5314 Apr 2 13:32 cacerts
-rw-r----- 1 root root 2846 Mar 29 17:18 elasticsearch.yml
-rwxr-x— 1 root root 279 Mar 29 17:21 elasticsearch.yml-fail
-rwxr-x— 1 root root 3208 Mar 29 17:21 elasticsearch.yml.glog
-rw-r----- 1 root root 2847 Mar 29 17:21 elasticsearch.yml-orig
-rwxr-x— 1 root root 3208 Mar 29 17:21 elasticsearch.yml.rpmsave
-rw-r----- 1 root root 3064 Mar 29 17:21 jvm.options
-rw-r----- 1 root root 4456 Mar 29 17:21 log4j2.properties
-rw-r–r-- 1 root root 2159 Mar 29 17:20 log4j2.xml
-rw-r–r-- 1 root root 37 Mar 29 17:20 node-id
-rw-r–r-- 1 root root 27976 Apr 4 16:33 server.conf
-rw-r–r-- 1 root root 27772 Apr 4 07:40 server.conf-graylog1
-rw-r–r-- 1 root root 27301 Jan 24 17:32 server.conf.orig
[root@graylog1 GRAYLOG_CLUSTER]# grep -v “#” server.conf > server.conf-no-comments
[root@graylog1 GRAYLOG_CLUSTER]# vi server.conf-no-comments
[root@graylog1 GRAYLOG_CLUSTER]# vi server.conf-no-comments
[root@graylog1 GRAYLOG_CLUSTER]# vi server.conf-no-comments

is_master = true
node_id_file = /etc/graylog/server/node-id
password_secret = d2100a1af7dfdfd6e1196ebpassworb17f1c45d333af904c02d00a3883bcbbf
root_username = sdpadmin
root_password_sha2 = d2100a1af7dfdfd6e1196password17f1c45d333af904c02d00a3883bcbbf
root_email = "rgizynski@philasd.org"
root_timezone = America/New_York
plugin_dir = /usr/share/graylog-server/plugin
rest_listen_uri = https://graylog1.philasd.net:9000/api/
rest_transport_uri = https://graylog1.philasd.net:9000/api/
rest_enable_tls = true
rest_tls_cert_file = /usr/ssl/wildcard-all.crt
rest_tls_key_file = /usr/ssl/wildcard-key-pkcs8.pem
rest_tls_key_password = k3yL1m3P13
web_listen_uri = https://graylog1.philasd.net:9000/
web_enable_tls = true
web_tls_cert_file = /usr/ssl/wildcard-all.crt
web_tls_key_file = /usr/ssl/wildcard-key-pkcs8.pem
web_tls_key_password = k3yL1m3P13
elasticsearch_hosts = http://graylog1-mgmt.philasd.net:9200,http://graylog2-mgmt.philasd.net:9200,http://graylog3-mgmt.philasd.net:9200
elasticsearch_discovery_zen_ping_unicast_hosts = graylog1-mgmt.philasd.net,graylog2-mgmt.philasd.net,graylog3-mgmt.philasd.net
elasticsearch_config_file = /etc/elasticsearch/elasticsearch.yml
rotation_strategy = count
elasticsearch_max_docs_per_index = 20000000
elasticsearch_max_number_of_indices = 20
retention_strategy = delete
elasticsearch_shards = 4
elasticsearch_replicas = 2
elasticsearch_index_prefix = graylog
elasticsearch_template_name = graylog-internal
allow_leading_wildcard_searches = false
allow_highlighting = false
elasticsearch_analyzer = standard
output_batch_size = 500
output_flush_interval = 1
output_fault_count_threshold = 5
output_fault_penalty_seconds = 30
processbuffer_processors = 5
outputbuffer_processors = 3
processor_wait_strategy = blocking
ring_size = 65536
inputbuffer_ring_size = 65536
inputbuffer_processors = 2
inputbuffer_wait_strategy = blocking
message_journal_enabled = true
message_journal_dir = /var/lib/graylog-server/journal
lb_recognition_period_seconds = 3
alert_check_interval = 60
mongodb_uri = mongodb://admin:passwd@graylog1-mgmt.philasd.net:27017,graylog2-mgmt.philasd.net:27017,graylog3-mgmt.philasd.net:27017/graylog
mongodb_max_connections = 1000
mongodb_threads_allowed_to_block_multiplier = 5
transport_email_web_interface_url = https://graylog2.philasd.net:9000
content_packs_dir = /usr/share/graylog-server/contentpacks
content_packs_auto_load = grok-patterns.json
proxied_requests_thread_pool_size = 32

here is our server.log on primary

2018-04-05T08:25:23.059-04:00 ERROR [Messages] Caught exception during bulk indexing: io.searchbox.client.config.exception.CouldNotConnectException: Could not connect to http://graylog1-mgmt.philasd.net:9200, retrying (attempt #6).
2018-04-05T08:25:23.127-04:00 ERROR [Messages] Caught exception during bulk indexing: io.searchbox.client.config.exception.CouldNotConnectException: Could not connect to http://graylog1-mgmt.philasd.net:9200, retrying (attempt #7).
2018-04-05T08:25:23.257-04:00 ERROR [Messages] Caught exception during bulk indexing: io.searchbox.client.config.exception.CouldNotConnectException: Could not connect to http://graylog1-mgmt.philasd.net:9200, retrying (attempt #8).
2018-04-05T08:25:23.525-04:00 ERROR [Messages] Caught exception during bulk indexing: io.searchbox.client.config.exception.CouldNotConnectException: Could not connect to http://graylog1-mgmt.philasd.net:9200, retrying (attempt #9).
2018-04-05T08:25:24.046-04:00 ERROR [Messages] Caught exception during bulk indexing: io.searchbox.client.config.exception.CouldNotConnectException: Could not connect to http://graylog1-mgmt.philasd.net:9200, retrying (attempt #10).
2018-04-05T08:25:25.100-04:00 ERROR [Messages] Caught exception during bulk indexing: io.searchbox.client.config.exception.CouldNotConnectException: Could not connect to http://graylog1-mgmt.philasd.net:9200, retrying (attempt #11).
2018-04-05T08:25:27.164-04:00 ERROR [Messages] Caught exception during bulk indexing: io.searchbox.client.config.exception.CouldNotConnectException: Could not connect to http://graylog1-mgmt.philasd.net:9200, retrying (attempt #12).
2018-04-05T08:25:32.698-04:00 INFO [Messages] Bulk indexing finally successful (attempt #13).
2018-04-05T08:25:32.705-04:00 WARN [Messages] Failed to index message: index=<graylog_deflector> id=<5c4d1701-38cc-11e8-b654-005056b77e86> error=<{“type”:“invalid_index_name_exception”,“reason”:“Invalid index name [graylog_deflector], already exists as alias”,“index_uuid”:“na”,“index”:“graylog_deflector”}>
2018-04-05T08:25:32.706-04:00 WARN [Messages] Failed to index message: index=<graylog_deflector> id=<5c4d1702-38cc-11e8-b654-005056b77e86> error=<{“type”:“invalid_index_name_exception”,“reason”:“Invalid index name [graylog_deflector], already exists as alias”,“index_uuid”:“na”,“index”:“graylog_deflector”}>
2018-04-05T08:25:32.707-04:00 WARN [Messages] Failed to index message: index=<graylog_deflector> id=<5c4d3e10-38cc-11e8-b654-005056b77e86> error=<{“type”:“invalid_index_name_exception”,“reason”:“Invalid index name [graylog_deflector], already exists as alias”,“index_uuid”:“na”,“index”:“graylog_deflector”}>
2018-04-05T08:25:32.707-04:00 WARN [Messages] Failed to index message: index=<graylog_deflector> id=<5c4d1700-38cc-11e8-b654-005056b77e86> error=<{“type”:“invalid_index_name_exception”,“reason”:“Invalid index name [graylog_deflector], already exists as alias”,“index_uuid”:“na”,“index”:“graylog_deflector”}>
2018-04-05T08:25:32.707-04:00 WARN [Messages] Failed to index message: index=<graylog_deflector> id=<5c4d3e12-38cc-11e8-b654-005056b77e86> error=<{“type”:“invalid_index_name_exception”,“reason”:“Invalid index name [graylog_deflector], already exists as alias”,“index_uuid”:“na”,“index”:“graylog_deflector”}>
2018-04-05T08:25:32.707-04:00 WARN [Messages] Failed to index message: index=<graylog_deflector> id=<5c4d3e11-38cc-11e8-b654-005056b77e86> error=<{“type”:“invalid_index_name_exception”,“reason”:“Invalid index name [graylog_deflector], already exists as alias”,“index_uuid”:“na”,“index”:“graylog_deflector”}>
2018-04-05T08:25:32.707-04:00 WARN [Messages] Failed to index message: index=<graylog_deflector> id=<5c4d3e14-38cc-11e8-b654-005056b77e86> error=<{“type”:“invalid_index_name_exception”,“reason”:“Invalid index name [graylog_deflector], already exists as alias”,“index_uuid”:“na”,“index”:“graylog_deflector”}>
2018-04-05T08:25:32.707-04:00 WARN [Messages] Failed to index message: index=<graylog_deflector> id=<5c4d3e13-38cc-11e8-b654-005056b77e86> error=<{“type”:“invalid_index_name_exception”,“reason”:“Invalid index name [graylog_deflector], already exists as alias”,“index_uuid”:“na”,“index”:“graylog_deflector”}>
2018-04-05T08:25:32.707-04:00 ERROR [Messages] Failed to index [8] messages. Please check the index error log in your web interface for the reason. Error: One or more of the items in the Bulk request failed, check BulkResult.getItems() for more information.
2018-04-05T08:26:31.268-04:00 ERROR [IndexRotationThread] Couldn’t point deflector to a new index
org.graylog2.indexer.ElasticsearchException: Couldn’t check existence of alias graylog_deflector
at org.graylog2.indexer.indices.Indices.aliasExists(Indices.java:298) ~[graylog.jar:?]
at org.graylog2.indexer.MongoIndexSet.isUp(MongoIndexSet.java:224) ~[graylog.jar:?]
at org.graylog2.periodical.IndexRotationThread.checkAndRepair(IndexRotationThread.java:127) ~[graylog.jar:?]
at org.graylog2.periodical.IndexRotationThread.lambda$doRun$0(IndexRotationThread.java:76) ~[graylog.jar:?]
at java.lang.Iterable.forEach(Iterable.java:75) [?:1.8.0_161]
at org.graylog2.periodical.IndexRotationThread.doRun(IndexRotationThread.java:73) [graylog.jar:?]
at org.graylog2.plugin.periodical.Periodical.run(Periodical.java:77) [graylog.jar:?]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [?:1.8.0_161]
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308) [?:1.8.0_161]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180) [?:1.8.0_161]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294) [?:1.8.0_161]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_161]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_161]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_161]
Caused by: java.net.SocketTimeoutException: Read timed out
at java.net.SocketInputStream.socketRead0(Native Method) ~[?:1.8.0_161]
at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) ~[?:1.8.0_161]
at java.net.SocketInputStream.read(SocketInputStream.java:171) ~[?:1.8.0_161]
at java.net.SocketInputStream.read(SocketInputStream.java:141) ~[?:1.8.0_161]
at org.apache.http.impl.io.SessionInputBufferImpl.streamRead(SessionInputBufferImpl.java:137) ~[graylog.jar:?]
at org.apache.http.impl.io.SessionInputBufferImpl.fillBuffer(SessionInputBufferImpl.java:153) ~[graylog.jar:?]
at org.apache.http.impl.io.SessionInputBufferImpl.readLine(SessionInputBufferImpl.java:282) ~[graylog.jar:?]
at org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:138) ~[graylog.jar:?]
at org.apache.http.impl.conn.DefaultHttpResponseParser.parseHead(DefaultHttpResponseParser.java:56) ~[graylog.jar:?]
at org.apache.http.impl.io.AbstractMessageParser.parse(AbstractMessageParser.java:259) ~[graylog.jar:?]
at org.apache.http.impl.DefaultBHttpClientConnection.receiveResponseHeader(DefaultBHttpClientConnection.java:163) ~[graylog.jar:?]
at org.apache.http.impl.conn.CPoolProxy.receiveResponseHeader(CPoolProxy.java:165) ~[graylog.jar:?]
at org.apache.http.protocol.HttpRequestExecutor.doReceiveResponse(HttpRequestExecutor.java:273) ~[graylog.jar:?]
at org.apache.http.protocol.HttpRequestExecutor.execute(HttpRequestExecutor.java:125) ~[graylog.jar:?]
at org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:272) ~[graylog.jar:?]
at org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:185) ~[graylog.jar:?]
at org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:89) ~[graylog.jar:?]
at org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:111) ~[graylog.jar:?]
at org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185) ~[graylog.jar:?]
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83) ~[graylog.jar:?]
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:108) ~[graylog.jar:?]
at io.searchbox.client.http.JestHttpClient.executeRequest(JestHttpClient.java:151) ~[graylog.jar:?]
at io.searchbox.client.http.JestHttpClient.execute(JestHttpClient.java:77) ~[graylog.jar:?]
at io.searchbox.client.http.JestHttpClient.execute(JestHttpClient.java:64) ~[graylog.jar:?]
at org.graylog2.indexer.indices.Indices.aliasExists(Indices.java:295) ~[graylog.jar:?]
… 13 more
2018-04-05T08:41:51.703-04:00 ERROR [MongoAuditLogPeriodical] Not running cleanup for auditlog entries in MongoDB because there is no valid license.

elasticsearch log

[2018-04-05T08:25:24,770][WARN ][o.e.b.Natives ] cannot check if running as root because JNA is not available
[2018-04-05T08:25:24,770][WARN ][o.e.b.Natives ] cannot register console handler because JNA is not available
[2018-04-05T08:25:24,771][WARN ][o.e.b.Natives ] cannot getrlimit RLIMIT_NPROC because JNA is not available
[2018-04-05T08:25:24,771][WARN ][o.e.b.Natives ] cannot getrlimit RLIMIT_AS beacuse JNA is not available
[2018-04-05T08:25:24,772][WARN ][o.e.b.Natives ] cannot getrlimit RLIMIT_FSIZE because JNA is not available
[2018-04-05T08:25:24,894][INFO ][o.e.n.Node ] [] initializing …
[2018-04-05T08:25:24,980][INFO ][o.e.e.NodeEnvironment ] [qDUPSsw] using [1] data paths, mounts [[/var (/dev/mapper/centos-var)]], net usable_space [700mb], net total_space [1.4gb], spins? [possibly], types [xfs]
[2018-04-05T08:25:24,981][INFO ][o.e.e.NodeEnvironment ] [qDUPSsw] heap size [1.9gb], compressed ordinary object pointers [true]
[2018-04-05T08:25:25,013][INFO ][o.e.n.Node ] node name [qDUPSsw] derived from node ID [qDUPSswHQVWUbdZLO-knyQ]; set [node.name] to override
[2018-04-05T08:25:25,013][INFO ][o.e.n.Node ] version[5.6.8], pid[10717], build[688ecce/2018-02-16T16:46:30.010Z], OS[Linux/3.10.0-693.17.1.el7.x86_64/amd64], JVM[Oracle Corporation/OpenJDK 64-Bit Server VM/1.8.0_161/25.161-b14]
[2018-04-05T08:25:25,013][INFO ][o.e.n.Node ] JVM arguments [-Xms2g, -Xmx2g, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -Djdk.io.permissionsUseCanonicalPath=true, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Dlog4j.skipJansi=true, -XX:+HeapDumpOnOutOfMemoryError, -Des.path.home=/usr/share/elasticsearch]
[2018-04-05T08:25:26,142][INFO ][o.e.p.PluginsService ] [qDUPSsw] loaded module [aggs-matrix-stats]
[2018-04-05T08:25:26,142][INFO ][o.e.p.PluginsService ] [qDUPSsw] loaded module [ingest-common]
[2018-04-05T08:25:26,142][INFO ][o.e.p.PluginsService ] [qDUPSsw] loaded module [lang-expression]
[2018-04-05T08:25:26,142][INFO ][o.e.p.PluginsService ] [qDUPSsw] loaded module [lang-groovy]
[2018-04-05T08:25:26,142][INFO ][o.e.p.PluginsService ] [qDUPSsw] loaded module [lang-mustache]
[2018-04-05T08:25:26,142][INFO ][o.e.p.PluginsService ] [qDUPSsw] loaded module [lang-painless]
[2018-04-05T08:25:26,142][INFO ][o.e.p.PluginsService ] [qDUPSsw] loaded module [parent-join]
[2018-04-05T08:25:26,142][INFO ][o.e.p.PluginsService ] [qDUPSsw] loaded module [percolator]
[2018-04-05T08:25:26,142][INFO ][o.e.p.PluginsService ] [qDUPSsw] loaded module [reindex]
[2018-04-05T08:25:26,142][INFO ][o.e.p.PluginsService ] [qDUPSsw] loaded module [transport-netty3]
[2018-04-05T08:25:26,142][INFO ][o.e.p.PluginsService ] [qDUPSsw] loaded module [transport-netty4]
[2018-04-05T08:25:26,143][INFO ][o.e.p.PluginsService ] [qDUPSsw] no plugins loaded
[2018-04-05T08:25:28,218][INFO ][o.e.d.DiscoveryModule ] [qDUPSsw] using discovery type [zen]
[2018-04-05T08:25:29,044][INFO ][o.e.n.Node ] initialized
[2018-04-05T08:25:29,044][INFO ][o.e.n.Node ] [qDUPSsw] starting …
[2018-04-05T08:25:29,232][INFO ][o.e.t.TransportService ] [qDUPSsw] publish_address {172.16.5.11:9300}, bound_addresses {172.16.5.11:9300}
[2018-04-05T08:25:29,247][INFO ][o.e.b.BootstrapChecks ] [qDUPSsw] bound or publishing to a non-loopback address, enforcing bootstrap checks
[2018-04-05T08:25:32,381][INFO ][o.e.c.s.ClusterService ] [qDUPSsw] detected_master {SxhDwH3}{SxhDwH3HSg-UBlcxPpDU5Q}{pT1AIm9PQgSrofpDcqDw6A}{graylog3-mgmt.philasd.net}{172.16.5.14:9300}, added {{ueq7c6I}{ueq7c6IVStKlNtZhePNEOg}{QeE-J8nGRnmYKqQ4BaJQ9g}{graylog2-mgmt.philasd.net}{172.16.5.13:9300},{SxhDwH3}{SxhDwH3HSg-UBlcxPpDU5Q}{pT1AIm9PQgSrofpDcqDw6A}{graylog3-mgmt.philasd.net}{172.16.5.14:9300},}, reason: zen-disco-receive(from master [master {SxhDwH3}{SxhDwH3HSg-UBlcxPpDU5Q}{pT1AIm9PQgSrofpDcqDw6A}{graylog3-mgmt.philasd.net}{172.16.5.14:9300} committed version [1]])
[2018-04-05T08:25:32,404][INFO ][o.e.h.n.Netty4HttpServerTransport] [qDUPSsw] publish_address {172.16.5.11:9200}, bound_addresses {172.16.5.11:9200}
[2018-04-05T08:25:32,404][INFO ][o.e.n.Node ] [qDUPSsw] started

elasticsearch.yml
cluster.name: graylog
discovery.zen.ping.unicast.hosts: [“graylog1-mgmt.philasd.net”, “graylog2-mgmt.philasd.net”, “graylog3-mgmt.philasd.net”]
http.bind_host: graylog1-mgmt.philasd.net
network.host: graylog1-mgmt.philasd.net
bootstrap.system_call_filter: false

[root@graylog1 ~]# ./elasticsearch_cluster_health_check.sh
{
“cluster_name” : “graylog”,
“status” : “green”,
“timed_out” : false,
“number_of_nodes” : 3,
“number_of_data_nodes” : 3,
“active_primary_shards” : 4,
“active_shards” : 4,
“relocating_shards” : 0,
“initializing_shards” : 0,
“unassigned_shards” : 0,
“delayed_unassigned_shards” : 0,
“number_of_pending_tasks” : 0,
“number_of_in_flight_fetch” : 0,
“task_max_waiting_in_queue_millis” : 0,
“active_shards_percent_as_number” : 100.0
}
health status index uuid pri rep docs.count docs.deleted store.size pri.store.size
green open graylog_0 CRGhgN5FRTWk3cJv7c1xkQ 4 0 785228 0 216.5mb 216.5mb
{
“cluster_name” : “graylog”,
“status” : “green”,
“timed_out” : false,
“number_of_nodes” : 3,
“number_of_data_nodes” : 3,
“active_primary_shards” : 4,
“active_shards” : 4,
“relocating_shards” : 0,
“initializing_shards” : 0,
“unassigned_shards” : 0,
“delayed_unassigned_shards” : 0,
“number_of_pending_tasks” : 0,
“number_of_in_flight_fetch” : 0,
“task_max_waiting_in_queue_millis” : 0,
“active_shards_percent_as_number” : 100.0
}
health status index uuid pri rep docs.count docs.deleted store.size pri.store.size
green open graylog_0 CRGhgN5FRTWk3cJv7c1xkQ 4 0 785228 0 216.5mb 216.5mb
{
“cluster_name” : “graylog”,
“status” : “green”,
“timed_out” : false,
“number_of_nodes” : 3,
“number_of_data_nodes” : 3,
“active_primary_shards” : 4,
“active_shards” : 4,
“relocating_shards” : 0,
“initializing_shards” : 0,
“unassigned_shards” : 0,
“delayed_unassigned_shards” : 0,
“number_of_pending_tasks” : 0,
“number_of_in_flight_fetch” : 0,
“task_max_waiting_in_queue_millis” : 0,
“active_shards_percent_as_number” : 100.0
}
health status index uuid pri rep docs.count docs.deleted store.size pri.store.size
green open graylog_0 CRGhgN5FRTWk3cJv7c1xkQ 4 0 785228 0 216.5mb 216.5mb
[root@graylog1 ~]#

#!/bin/bash

curl -XGET ‘http://graylog1-mgmt.philasd.net:9200/_cluster/health?pretty=true
curl -XGET ‘http://graylog1-mgmt.philasd.net:9200/_cat/indices?v
curl -XGET ‘http://graylog2-mgmt.philasd.net:9200/_cluster/health?pretty=true
curl -XGET ‘http://graylog2-mgmt.philasd.net:9200/_cat/indices?v
curl -XGET ‘http://graylog3-mgmt.philasd.net:9200/_cluster/health?pretty=true
curl -XGET ‘http://graylog3-mgmt.philasd.net:9200/_cat/indices?v
[root@graylog1 GRAYLOG_CLUSTER]#

telnet graylog2-mgmt.philasd.net 9200
Trying 172.16.5.13…
Connected to graylog2-mgmt.philasd.net.
Escape character is ‘^]’.
^]

telnet> quit
Connection closed.

[root@graylog1 ~]# netstat -tulpn | egrep ‘9000|9200|9300|27017’
tcp 0 0 172.16.5.11:9200 0.0.0.0:* LISTEN 11598/java
tcp 0 0 172.16.5.11:9300 0.0.0.0:* LISTEN 11598/java
tcp 0 0 10.0.103.11:9000 0.0.0.0:* LISTEN 11350/java
tcp 0 0 172.16.5.11:27017 0.0.0.0:* LISTEN 13463/mongod
[root@graylog1 ~]#

http://docs.graylog.org/en/2.4/pages/faq.html#how-do-i-fix-the-deflector-exists-as-an-index-and-is-not-an-alias-error-message

Also make sure to disable automatic index creation in your Elasticsearch cluster:

That worked. Thanks so much !!

-Bob

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.