Skip to content

Instantly share code, notes, and snippets.

@perfecto25
Last active February 27, 2026 22:39
Show Gist options
  • Select an option

  • Save perfecto25/c3b219e95338c6da76470dbe9bd3db77 to your computer and use it in GitHub Desktop.

Select an option

Save perfecto25/c3b219e95338c6da76470dbe9bd3db77 to your computer and use it in GitHub Desktop.
Graylog Open with Github SSO
---
networks:
graylog:
driver: bridge
volumes:
mongo_data:
driver: local
driver_opts:
type: none
o: bind
device: /home/graylog/data/mongo
opensearch_data:
driver: local
driver_opts:
type: none
o: bind
device: /home/graylog/data/opensearch
graylog_data:
driver: local
driver_opts:
type: none
o: bind
device: /home/graylog/data/graylog
services:
mongodb:
image: mongo:8.0
container_name: mongodb
restart: unless-stopped
networks:
- graylog
volumes:
- mongo_data:/data/db
- mongo_data:/data/configdb
command: --replSet rs01 --wiredTigerCacheSizeGB 8
healthcheck:
test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"]
interval: 10s
timeout: 5s
retries: 10
# MongoDB replica set init — required for Graylog 7.0
mongo-init:
image: mongo:8.0
networks:
- graylog
depends_on:
mongodb:
condition: service_healthy
restart: "no"
entrypoint: >
mongosh --host mongodb:27017 --eval
"rs.initiate({_id:'rs01',members:[{_id:0,host:'mongodb:27017'}]})"
opensearch:
image: opensearchproject/opensearch:2.19.4
container_name: opensearch
restart: unless-stopped
networks:
- graylog
environment:
- cluster.name=graylog-cluster
- node.name=opensearch-node1
- discovery.type=single-node
- bootstrap.memory_lock=true
- "OPENSEARCH_JAVA_OPTS=-Xms32g -Xmx32g"
- DISABLE_SECURITY_PLUGIN=true
- DISABLE_INSTALL_DEMO_CONFIG=true
- action.auto_create_index=false
- indices.query.bool.max_clause_count=32768
ulimits:
memlock:
soft: -1
hard: -1
nofile:
soft: 65536
hard: 65536
volumes:
- opensearch_data:/usr/share/opensearch/data
healthcheck:
test:
[
"CMD-SHELL",
'curl -s http://localhost:9200/_cluster/health | grep -qE ''"status":"(green|yellow)"''',
]
interval: 30s
timeout: 10s
retries: 10
graylog:
image: graylog/graylog:7.0.3-1
container_name: graylog
restart: unless-stopped
networks:
- graylog
depends_on:
mongodb:
condition: service_healthy
opensearch:
condition: service_healthy
environment:
- GRAYLOG_PASSWORD_SECRET=${GRAYLOG_PASSWORD_SECRET:?Please configure GRAYLOG_PASSWORD_SECRET in the .env file}
- GRAYLOG_ROOT_PASSWORD_SHA2=${GRAYLOG_ROOT_PASSWORD_SHA2:?Please configure GRAYLOG_ROOT_PASSWORD_SHA2 in the .env file}
- GRAYLOG_HTTP_EXTERNAL_URI=http://graylog/
- GRAYLOG_HTTP_BIND_ADDRESS=0.0.0.0:9000
- GRAYLOG_TRUSTED_PROXIES=172.18.0.0/16,127.0.0.1/32
- GRAYLOG_MONGODB_URI=mongodb://mongodb:27017/graylog?replicaSet=rs01
- GRAYLOG_ELASTICSEARCH_HOSTS=http://opensearch:9200
# Throughput tuning for 300 servers
- GRAYLOG_PROCESSBUFFER_PROCESSORS=10
- GRAYLOG_OUTPUTBUFFER_PROCESSORS=10
- GRAYLOG_INPUTBUFFER_PROCESSORS=8
- GRAYLOG_MESSAGE_JOURNAL_MAX_SIZE=10gb
- GRAYLOG_MESSAGE_JOURNAL_ENABLED=true
- GRAYLOG_ROOT_TIMEZONE=America/New_York
- "GRAYLOG_SERVER_JAVA_OPTS=-Xms16g -Xmx16g -XX:+UseG1GC -XX:MaxGCPauseMillis=200 -XX:+UnlockExperimentalVMOptions -XX:+UseStringDeduplication"
ports:
- "127.0.0.1:9000:9000" # Web UI (no external access)
- "5044:5044" # Beats/Filebeat input
- "1514:1514/udp" # Syslog UDP
- "1514:1514/tcp" # Syslog TCP
- "12201:12201/udp" # GELF UDP
- "12201:12201/tcp" # GELF TCP
volumes:
- graylog_data:/usr/share/graylog/data
oauth2-proxy:
image: quay.io/oauth2-proxy/oauth2-proxy:latest
container_name: oauth2-proxy
extra_hosts:
- "<!!!! YOUR GITHUB HOSTNAME>:<!!!!YOUR GITHUB IP>" # ie - github.corp.com:172.32.33.50
restart: unless-stopped
networks:
- graylog
command:
- --provider=github
- --github-org=<!!!! YOUR GITHUB ORG NAME>
- --email-domain=*
- --upstream=http://graylog:9000
- --http-address=0.0.0.0:4180
- --cookie-secret=${COOKIE_SECRET:?}
- --client-id=${GH_CLIENT_ID:?}
- --client-secret=${GH_CLIENT_SECRET:?}
- --redirect-url=http://graylog/oauth2/callback # no port, nginx handles it
- --validate-url=https://<!!!! YOUR GITHUB URL>/api/v3/
- --login-url=https://<!!!! YOUR GITHUB URL>/login/oauth/authorize
- --redeem-url=https://<!!!! YOUR GITHUB URL>/login/oauth/access_token
- --cookie-secure=false
- --cookie-domain=graylog
- --cookie-expire=8h
- --cookie-refresh=1h
- --whitelist-domain=graylog
- --pass-access-token=true
- --set-xauthrequest=true
- --pass-user-headers=true
- --user-id-claim=login
- --custom-sign-in-logo=-
- --session-store-type=cookie
ports:
- "127.0.0.1:4180:4180"
#!/bin/env python3.12
import os
import sys
import json
from dictor import dictor
from dotenv import load_dotenv
import requests
import secrets
import string
load_dotenv()
github_url = 'https://<!!! YOUR GITHUB URL>/api/v3'
ghe_token = os.getenv("GH_ADMIN_TOKEN")
gh_headers = {'Authorization': f'token {ghe_token}'}
graylog_url = 'http://127.0.0.1:9000'
graylog_admin = 'admin'
graylog_pw = os.getenv("GRAYLOG_PASSWORD_SECRET")
service_accounts = ["ghost", "github-actions", "actions-admin", "jenkins"]
# only users from these GHE orgs can join Graylog
ghe_orgs = ["MyOrg", "AnotherOrg"]
def _get_org_membership():
""" returns a dict of orgs and all members of each org """
orgs = {}
try:
response = requests.get(f"{github_url}/organizations", headers=gh_headers)
response.raise_for_status()
except requests.HTTPError as exception:
print(exception)
sys.exit()
org_dict = response.json()
for org in org_dict:
orgs[org['login']] = []
# get members of each org
response = requests.get(f"{github_url}/orgs/{org['login']}/members?per_page=300", headers=gh_headers)
if response.status_code == 200:
org_members = response.json()
for member in org_members:
orgs[org['login']].append(member['login'])
return orgs
def _get_graylog_users():
""" get all users on graylog side """
try:
response = requests.get(f"{graylog_url}/api/users", auth=(graylog_admin, graylog_pw))
response.raise_for_status()
except requests.HTTPError as exception:
print(exception)
sys.exit()
users = []
for user in response.json()['users']:
users.append(user['username'])
print(f"GL users = {users}")
return users
def start():
""" get list of github users """
# get all Graylog users as a list
graylog_users = _get_graylog_users()
# get all GH orgs
orgs = _get_org_membership()
disabled2fa = []
for org, members in orgs.items():
# check 2FA disabled
response = requests.get(f"{github_url}/orgs/{org}/members?filter=2fa_disabled", headers=gh_headers)
if response.status_code == 200:
org_members = response.json()
for member in org_members:
disabled2fa.append(member['login'])
# search all users
try:
response = requests.get(f'{github_url}/users?per_page=300', headers=gh_headers)
members = response.json()
except requests.HTTPError as exception:
print(exception)
sys.exit()
for member in sorted(members, key=lambda d: d["login"]):
try:
response = requests.get(f"{github_url}/users/{member['login']}", headers=gh_headers)
except requests.HTTPError as exception:
print(f"[red]{exception}[/red]")
sys.exit()
data = response.json()
if member['type'] != "User" or member['login'] in service_accounts:
continue
user = member['login']
# get users organization membership
org_membership = []
for org_name, users in orgs.items():
if user in users:
org_membership.append(org_name)
if org_membership:
has_overlap = bool(set(ghe_orgs) & set(org_membership))
if not has_overlap:
print(f"user {user} is not allowed to join graylog, is not part of allowed GHE orgs: {org_membership}")
continue
suspended = dictor(data, "suspended_at")
site_admin = dictor(data, "site_admin")
if user in disabled2fa:
print(f"USER {user} has 2FA disabled on GHE side, skipping importing this user to Graylog..")
continue
if suspended:
print(f"USER {user} is suspended, skipping import to Graylog..")
continue
# check if user exists on Graylog
if user not in graylog_users:
print(f"importing user {user} to graylog..")
alphabet = string.ascii_letters + string.digits
password = ''.join(secrets.choice(alphabet) for i in range(20))
roles = ["Reader"]
if site_admin:
roles.append("Admin")
json_data = {
"username": user,
"first_name": user,
"last_name": user,
"email": f"{user}@company.com",
"roles": roles,
"password": password,
"session_timeout_ms": 7200000, # 2 hours
"service_account": False,
"permissions": [],
"timezone": "America/New_York"
}
try:
url = f"{graylog_url}/api/users"
response = requests.post(url, auth=(graylog_admin, graylog_pw), json=json_data, headers={"X-Requested-By": "curl"})
except requests.HTTPError as exception:
print(exception)
sys.exit()
if response.status_code != 201:
print(f"unable to create graylog user {user}")
sys.exit()
print("\n\nGH > Graylog sync is complete")
if __name__ == "__main__":
start()
server {
listen 80;
server_name graylog mars;
location /logout {
return 302 /oauth2/sign_out?rd=http%3A%2F%2Fgraylog%2Foauth2%2Fsign_in;
}
# Graylog redirects here after logout - send straight to oauth2 sign in
location = /login {
return 302 /oauth2/sign_out?rd=http%3A%2F%2Fgraylog%2Foauth2%2Fsign_in;
}
location /oauth2/ {
proxy_pass http://127.0.0.1:4180;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location = /oauth2/auth {
proxy_pass http://127.0.0.1:4180;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Content-Length "";
proxy_pass_request_body off;
}
location /assets/ {
proxy_pass http://127.0.0.1:9000;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location /config.js {
proxy_pass http://127.0.0.1:9000;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
# Intercept Graylog session DELETE (logout) and immediately redirect
# browser to oauth2 sign_out which clears the cookie
location ~ ^/api/system/sessions {
if ($request_method = DELETE) {
return 302 /oauth2/sign_out?rd=http%3A%2F%2Fgraylog%2Foauth2%2Fsign_in;
}
auth_request /oauth2/auth;
auth_request_set $auth_user $upstream_http_x_auth_request_user;
proxy_set_header X-Forwarded-User $auth_user;
proxy_pass http://127.0.0.1:9000;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_read_timeout 90;
}
location /api/ {
auth_request /oauth2/auth;
auth_request_set $auth_user $upstream_http_x_auth_request_user;
proxy_set_header X-Forwarded-User $auth_user;
proxy_pass http://127.0.0.1:9000;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_read_timeout 90;
error_page 401 = @api_unauthorized;
}
location @api_unauthorized {
default_type application/json;
return 401 '{"type":"ApiError","message":"Unauthorized"}';
}
location / {
proxy_pass http://127.0.0.1:4180;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_read_timeout 90;
proxy_buffer_size 128k;
proxy_buffers 4 256k;
proxy_busy_buffers_size 256k;
}
location @oauth2_login {
return 302 /oauth2/start?rd=$request_uri;
}
}
[Unit]
Description=Graylog service with docker compose
PartOf=docker.service
After=docker.service
[Service]
Type=oneshot
RemainAfterExit=true
WorkingDirectory=/home/graylog
ExecStart=/bin/docker compose up -d --remove-orphans
ExecStop=/bin/docker compose down
[Install]
WantedBy=multi-user.target
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment