Atualização - 23/04/2026 - 12:17

This commit is contained in:
2026-04-23 12:17:21 -03:00
parent 82ac465600
commit e412681f96
12 changed files with 307 additions and 0 deletions

View File

@@ -0,0 +1,29 @@
# cat /etc/fluent-bit/fluent-bit.conf
[SERVICE]
Flush 1
Log_Level info
Daemon off
# Coleta logs do Arquivo Gerado
[INPUT]
Name tail
Path /root/teste.log
Tag logs_host04
# Envio para o RK-SIEM-CORE
[OUTPUT]
Name opensearch
Match logs_host04
Host 172.18.0.1
Port 9200
Index teste-logs
Type _doc
HTTP_User admin
HTTP_Passwd admin
tls On
tls.verify Off
Suppress_Type_Name On
#[OUTPUT]
# Name stdout
# Match *

View File

@@ -0,0 +1,56 @@
services:
rk-siem-core:
image: ricardokleber/rk-siem-core:latest
container_name: rk-siem-core
environment:
- cluster.name=rk-siem-core
- node.name=rk-siem-node
- discovery.type=single-node
- bootstrap.memory_lock=true
- "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" # Ajuste conforme sua RAM disponível
- DISABLE_INSTALL_DEMO_CONFIG=false
ulimits:
memlock:
soft: -1
hard: -1
nofile:
soft: 65536
hard: 65536
volumes:
- rk-siem-data:/usr/share/opensearch/data
ports:
- 9200:9200 # API REST
- 9600:9600 # Performance Analyzer
networks:
- rk-siem-net
rk-siem-ui:
image: ricardokleber/rk-siem-ui:latest
container_name: rk-siem-ui
ports:
- 5601:5601 # Interface Web
expose:
- 5601
command: ["/bin/bash", "-c", "/etc/init.d/opensearch-dashboards start && tail -f /var/log/opensearch-dashboards/opensearch-dashboards.stdout"]
restart: always
environment:
- 'OPENSEARCH_HOSTS=["https://rk-siem-core:9200"]'
- "DISABLE_SECURITY_DASHBOARDS_PLUGIN=false"
networks:
- rk-siem-net
depends_on:
- rk-siem-core
rk-siem-host04:
image: ricardokleber/rk-siem-host04:latest
container_name: rk-siem-host04
hostname: rk-siem-host04
tty: true
stdin_open: true
restart: always
volumes:
rk-siem-data:
networks:
rk-siem-net:

View File

@@ -0,0 +1,4 @@
PUT /teste-logs/_settings
{
"index.default_pipeline": "rk-siem_web_logs"
}

View File

@@ -0,0 +1 @@
DELETE _ingest/pipeline/rk-siem_web_logs

View File

@@ -0,0 +1 @@
GET /_settings?filter_path=*.settings.index.default_pipeline

View File

@@ -0,0 +1 @@
GET _ingest/pipeline

View File

@@ -0,0 +1,6 @@
PUT /teste-logs/_settings
{
"index": {
"default_pipeline": null
}
}

View File

@@ -0,0 +1,61 @@
PUT _ingest/pipeline/rk-siem_ssh_logs
{
"description": "Pipeline do RK-SIEM para normalização de logs SSH",
"processors": [
{
"grok": {
"field": "log",
"patterns": [
"%{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME:host} %{WORD:program}\\[%{NUMBER:pid}\\]: %{WORD:ssh_event} password for %{USER:user} from %{IP:source_ip} port %{NUMBER:source_port} ssh2",
"%{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME:host} %{WORD:program}\\[%{NUMBER:pid}\\]: %{WORD:ssh_event} for invalid user %{USER:user} from %{IP:source_ip} port %{NUMBER:source_port} ssh2",
"%{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME:host} %{WORD:program}\\[%{NUMBER:pid}\\]: Connection closed by (authenticating |)%{IP:source_ip} port %{NUMBER:source_port}"
],
"description": "Extrai eventos de Accepted, Failed e Disconnected do SSH"
}
},
{
"date": {
"field": "timestamp",
"formats": ["MMM d HH:mm:ss", "MMM dd HH:mm:ss"],
"target_field": "@timestamp",
"description": "Padroniza o tempo vindo do syslog (ex: Oct 10 13:55:36)"
}
},
{
"set": {
"if": "ctx.ssh_event == 'Accepted'",
"field": "event.outcome",
"value": "success"
}
},
{
"set": {
"if": "ctx.ssh_event == 'Failed'",
"field": "event.outcome",
"value": "failure"
}
},
{
"geoip": {
"field": "source_ip",
"target_field": "geo",
"ignore_missing": true,
"description": "Enriquecimento: Localiza a origem do acesso"
}
},
{
"remove": {
"field": ["log", "timestamp", "program"],
"ignore_missing": true
}
}
],
"on_failure": [
{
"set": {
"field": "error.message",
"value": "Falha no processamento do pipeline rk_siem_ssh_logs"
}
}
]
}

View File

@@ -0,0 +1,10 @@
POST _ingest/pipeline/rk-siem_ssh_logs/_simulate
{
"docs": [
{
"_source": {
"log": "Oct 23 11:30:05 servidor-prod sshd[1234]: Failed password for root from 192.168.1.50 port 54321 ssh2"
}
}
]
}

View File

@@ -0,0 +1,51 @@
PUT _ingest/pipeline/rk-siem_web_logs
{
"description": "Pipeline do RK-SIEM para normalização de logs HTTP - Ajustado para campo 'log'",
"processors": [
{
"grok": {
"field": "log",
"patterns": [
"%{IPORHOST:source_ip} - %{USER:user_id} \\[%{HTTPDATE:timestamp}\\] \"%{WORD:http_method} %{NOTSPACE:url_path} HTTP/%{NUMBER:http_version}\" %{NUMBER:status_code} %{NUMBER:bytes_sent}"
],
"description": "Extrai dados do campo 'log' enviado pelo Fluent-bit"
}
},
{
"date": {
"field": "timestamp",
"formats": [
"dd/MMM/yyyy:HH:mm:ss Z"
],
"target_field": "@timestamp"
}
},
{
"convert": {
"field": "status_code",
"type": "integer"
}
},
{
"user_agent": {
"field": "user_agent_string",
"target_field": "browser_info",
"ignore_missing": true
}
},
{
"remove": {
"field": ["log", "timestamp"],
"ignore_missing": true
}
}
],
"on_failure": [
{
"set": {
"field": "error.message",
"value": "Falha no processamento do pipeline rk_siem_web_logs"
}
}
]
}

View File

@@ -0,0 +1,10 @@
POST _ingest/pipeline/rk-siem_web_logs/_simulate
{
"docs": [
{
"_source": {
"log": "192.168.1.10 - - [23/Apr/2026:14:00:12 +0000] \"DELETE /index.html HTTP/1.1\" 500 5124"
}
}
]
}

View File

@@ -0,0 +1,77 @@
import random
import time
from datetime import datetime
# Bancos de dados fictícios para os logs
IPS = ["192.168.1.10", "10.0.0.5", "172.16.0.2", "45.33.11.2", "185.22.14.5", "200.150.10.1", "8.8.8.8"]
USERS = ["root", "admin", "user1", "guest", "webmaster", "support", "devops", "db_admin"]
PAGES = ["/index.html", "/login", "/admin/dashboard", "/api/v1/user", "/wp-login.php", "/config.php"]
USER_AGENTS = [
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36",
"curl/7.68.0",
"python-requests/2.25.1"
]
def generate_apache_log():
ip = random.choice(IPS)
date = datetime.now().strftime('%d/%b/%Y:%H:%M:%S +0000')
method = random.choice(["GET", "POST", "PUT", "DELETE"])
page = random.choice(PAGES)
status = random.choice([200, 201, 404, 500, 403, 301])
size = random.randint(150, 8000)
agent = random.choice(USER_AGENTS)
return f'{ip} - - [{date}] "{method} {page} HTTP/1.1" {status} {size} "-" "{agent}"'
def generate_ssh_log():
ip = random.choice(IPS)
user = random.choice(USERS)
date = datetime.now().strftime('%b %d %H:%M:%S')
host = "host04"
if random.random() > 0.6: # 40% de chance de sucesso, 60% de falha
return f'{date} {host} sshd[{random.randint(1000, 9999)}]: Failed password for {user} from {ip} port {random.randint(30000, 60000)} ssh2'
else:
return f'{date} {host} sshd[{random.randint(1000, 9999)}]: Accepted password for {user} from {ip} port {random.randint(30000, 60000)} ssh2'
def main():
print("--- Gerador de Logs Customizado ---")
# 1. Escolha do tipo
print("\n[1] Apache2 (HTTP)")
print("[2] SSH (Auth)")
service_choice = input("Escolha o tipo de log (1 ou 2): ")
# 2. Quantidade de linhas
try:
num_lines = int(input("Quantas linhas de log deseja gerar? "))
except ValueError:
print("Erro: Por favor, insira um número válido.")
return
# 3. Nome do arquivo
output_file = input("Digite o nome do arquivo de saída (ex: teste.log): ")
print(f"\nIniciando geração de {num_lines} linhas em '{output_file}'...")
try:
with open(output_file, "w") as f:
for i in range(num_lines):
if service_choice == "1":
line = generate_apache_log()
else:
line = generate_ssh_log()
f.write(line + "\n")
# Exibe progresso a cada 10% para não travar o terminal em arquivos gigantes
if num_lines > 100 and i % (num_lines // 10) == 0:
print(f"Progresso: {round((i/num_lines)*100)}%...")
print(f"\nSucesso! Arquivo '{output_file}' gerado com {num_lines} linhas.")
except Exception as e:
print(f"Ocorreu um erro ao gravar o arquivo: {e}")
if __name__ == "__main__":
main()