committing changes in /etc made by "-bash"
Package changes:
This commit is contained in:
@@ -1,51 +0,0 @@
|
||||
#Generated acquisition file - wizard.sh (service: apache2) / files : /var/log/httpd/error_log
|
||||
filenames:
|
||||
- /var/log/httpd/error_log
|
||||
labels:
|
||||
type: apache2
|
||||
---
|
||||
#Generated acquisition file - wizard.sh (service: nginx) / files : /var/log/nginx/vpn.898.ro.error.log /var/log/nginx/mail.anywhere.ro.error.log /var/log/nginx/zira.898.ro.error.log /var/log/nginx/secure.898.ro.access.log /var/log/nginx/trace.898.ro.access.log /var/log/nginx/trace.898.ro.error.log /var/log/nginx/vpn.898.ro.access.log /var/log/nginx/zira.898.ro.access.log /var/log/nginx/git.898.ro.access.log /var/log/nginx/mail.club3d.ro.access.log /var/log/nginx/mail.anywhere.ro.access.log /var/log/nginx/access.log /var/log/nginx/error.log /var/log/nginx/files.898.ro.error.log /var/log/nginx/mail.club3d.ro.error.log /var/log/nginx/club3d.ro.error.log /var/log/nginx/anywhere.ro.access.log /var/log/nginx/jekyll.club3d.ro.access.log /var/log/nginx/jekyll.club3d.ro.error.log /var/log/nginx/club3d.ro.access.log /var/log/nginx/mail.898.ro.access.log /var/log/nginx/files.898.ro.access.log
|
||||
filenames:
|
||||
- /var/log/nginx/vpn.898.ro.error.log
|
||||
- /var/log/nginx/mail.anywhere.ro.error.log
|
||||
- /var/log/nginx/zira.898.ro.error.log
|
||||
- /var/log/nginx/secure.898.ro.access.log
|
||||
- /var/log/nginx/trace.898.ro.access.log
|
||||
- /var/log/nginx/trace.898.ro.error.log
|
||||
- /var/log/nginx/vpn.898.ro.access.log
|
||||
- /var/log/nginx/zira.898.ro.access.log
|
||||
- /var/log/nginx/git.898.ro.access.log
|
||||
- /var/log/nginx/mail.club3d.ro.access.log
|
||||
- /var/log/nginx/mail.anywhere.ro.access.log
|
||||
- /var/log/nginx/access.log
|
||||
- /var/log/nginx/error.log
|
||||
- /var/log/nginx/files.898.ro.error.log
|
||||
- /var/log/nginx/mail.club3d.ro.error.log
|
||||
- /var/log/nginx/club3d.ro.error.log
|
||||
- /var/log/nginx/anywhere.ro.access.log
|
||||
- /var/log/nginx/jekyll.club3d.ro.access.log
|
||||
- /var/log/nginx/jekyll.club3d.ro.error.log
|
||||
- /var/log/nginx/club3d.ro.access.log
|
||||
- /var/log/nginx/mail.898.ro.access.log
|
||||
- /var/log/nginx/files.898.ro.access.log
|
||||
labels:
|
||||
type: nginx
|
||||
---
|
||||
#Generated acquisition file - wizard.sh (service: sshd) / files : /var/log/secure
|
||||
filenames:
|
||||
- /var/log/secure
|
||||
labels:
|
||||
type: syslog
|
||||
---
|
||||
#Generated acquisition file - wizard.sh (service: mysql) / files :
|
||||
journalctl_filter:
|
||||
- _SYSTEMD_UNIT=mysql.service
|
||||
labels:
|
||||
type: mysql
|
||||
---
|
||||
#Generated acquisition file - wizard.sh (service: linux) / files : /var/log/messages
|
||||
filenames:
|
||||
- /var/log/messages
|
||||
labels:
|
||||
type: syslog
|
||||
---
|
||||
@@ -1,63 +0,0 @@
|
||||
common:
|
||||
daemonize: true
|
||||
log_media: file
|
||||
log_level: info
|
||||
log_dir: /var/log/
|
||||
log_max_size: 20
|
||||
compress_logs: true
|
||||
log_max_files: 10
|
||||
working_dir: .
|
||||
config_paths:
|
||||
config_dir: /etc/crowdsec/
|
||||
data_dir: /var/lib/crowdsec/data/
|
||||
simulation_path: /etc/crowdsec/simulation.yaml
|
||||
hub_dir: /etc/crowdsec/hub/
|
||||
index_path: /etc/crowdsec/hub/.index.json
|
||||
notification_dir: /etc/crowdsec/notifications/
|
||||
plugin_dir: /usr/lib64/crowdsec/plugins/
|
||||
crowdsec_service:
|
||||
#console_context_path: /etc/crowdsec/console/context.yaml
|
||||
acquisition_path: /etc/crowdsec/acquis.yaml
|
||||
acquisition_dir: /etc/crowdsec/acquis.d
|
||||
parser_routines: 1
|
||||
cscli:
|
||||
output: human
|
||||
color: auto
|
||||
db_config:
|
||||
log_level: info
|
||||
type: sqlite
|
||||
db_path: /var/lib/crowdsec/data/crowdsec.db
|
||||
#max_open_conns: 100
|
||||
#user:
|
||||
#password:
|
||||
#db_name:
|
||||
#host:
|
||||
#port:
|
||||
flush:
|
||||
max_items: 5000
|
||||
max_age: 7d
|
||||
plugin_config:
|
||||
user: nobody # plugin process would be ran on behalf of this user
|
||||
group: nobody # plugin process would be ran on behalf of this group
|
||||
api:
|
||||
client:
|
||||
insecure_skip_verify: false
|
||||
credentials_path: /etc/crowdsec/local_api_credentials.yaml
|
||||
server:
|
||||
log_level: info
|
||||
listen_uri: 127.0.0.1:8080
|
||||
profiles_path: /etc/crowdsec/profiles.yaml
|
||||
console_path: /etc/crowdsec/console.yaml
|
||||
online_client: # Central API credentials (to push signals and receive bad IPs)
|
||||
credentials_path: /etc/crowdsec/online_api_credentials.yaml
|
||||
trusted_ips: # IP ranges, or IPs which can have admin API access
|
||||
- 127.0.0.1
|
||||
- ::1
|
||||
# tls:
|
||||
# cert_file: /etc/crowdsec/ssl/cert.pem
|
||||
# key_file: /etc/crowdsec/ssl/key.pem
|
||||
prometheus:
|
||||
enabled: true
|
||||
level: full
|
||||
listen_addr: 127.0.0.1
|
||||
listen_port: 6060
|
||||
@@ -1,4 +0,0 @@
|
||||
share_manual_decisions: false
|
||||
share_custom: true
|
||||
share_tainted: true
|
||||
share_context: false
|
||||
11893
crowdsec/hub/.index.json
11893
crowdsec/hub/.index.json
File diff suppressed because one or more lines are too long
@@ -1,13 +0,0 @@
|
||||
parsers:
|
||||
#generic post-parsing of http stuff
|
||||
- crowdsecurity/apache2-logs
|
||||
collections:
|
||||
- crowdsecurity/base-http-scenarios
|
||||
description: "apache2 support : parser and generic http scenarios "
|
||||
author: crowdsecurity
|
||||
tags:
|
||||
- linux
|
||||
- apache2
|
||||
- crawl
|
||||
- scan
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
parsers:
|
||||
- crowdsecurity/http-logs
|
||||
scenarios:
|
||||
- crowdsecurity/http-crawl-non_statics
|
||||
- crowdsecurity/http-probing
|
||||
- crowdsecurity/http-bad-user-agent
|
||||
- crowdsecurity/http-path-traversal-probing
|
||||
- crowdsecurity/http-sensitive-files
|
||||
- crowdsecurity/http-sqli-probing
|
||||
- crowdsecurity/http-xss-probing
|
||||
- crowdsecurity/http-backdoors-attempts
|
||||
- ltsich/http-w00tw00t
|
||||
- crowdsecurity/http-generic-bf
|
||||
- crowdsecurity/http-open-proxy
|
||||
collections:
|
||||
- crowdsecurity/http-cve
|
||||
|
||||
description: "http common : scanners detection"
|
||||
author: crowdsecurity
|
||||
tags:
|
||||
- linux
|
||||
- http
|
||||
- crawl
|
||||
- scan
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
scenarios:
|
||||
- crowdsecurity/http-cve-2021-41773
|
||||
- crowdsecurity/http-cve-2021-42013
|
||||
- crowdsecurity/grafana-cve-2021-43798
|
||||
- crowdsecurity/vmware-vcenter-vmsa-2021-0027
|
||||
- crowdsecurity/fortinet-cve-2018-13379
|
||||
- crowdsecurity/pulse-secure-sslvpn-cve-2019-11510
|
||||
- crowdsecurity/f5-big-ip-cve-2020-5902
|
||||
- crowdsecurity/thinkphp-cve-2018-20062
|
||||
- crowdsecurity/apache_log4j2_cve-2021-44228
|
||||
- crowdsecurity/jira_cve-2021-26086
|
||||
- crowdsecurity/spring4shell_cve-2022-22965
|
||||
- crowdsecurity/vmware-cve-2022-22954
|
||||
- crowdsecurity/CVE-2022-37042
|
||||
- crowdsecurity/CVE-2022-41082
|
||||
- crowdsecurity/CVE-2022-35914
|
||||
- crowdsecurity/CVE-2022-40684
|
||||
- crowdsecurity/CVE-2022-26134
|
||||
- crowdsecurity/CVE-2022-42889
|
||||
- crowdsecurity/CVE-2022-41697
|
||||
- crowdsecurity/CVE-2022-46169
|
||||
- crowdsecurity/CVE-2022-44877
|
||||
- crowdsecurity/CVE-2019-18935
|
||||
- crowdsecurity/netgear_rce
|
||||
- crowdsecurity/CVE-2023-22515
|
||||
- crowdsecurity/CVE-2023-22518
|
||||
author: crowdsecurity
|
||||
description: "Detect CVE exploitation in http logs"
|
||||
tags:
|
||||
- web
|
||||
- exploit
|
||||
- cve
|
||||
- http
|
||||
@@ -1,11 +0,0 @@
|
||||
parsers:
|
||||
- crowdsecurity/syslog-logs
|
||||
- crowdsecurity/geoip-enrich
|
||||
- crowdsecurity/dateparse-enrich
|
||||
collections:
|
||||
- crowdsecurity/sshd
|
||||
description: "core linux support : syslog+geoip+ssh"
|
||||
author: crowdsecurity
|
||||
tags:
|
||||
- linux
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
parsers:
|
||||
- crowdsecurity/mysql-logs
|
||||
scenarios:
|
||||
- crowdsecurity/mysql-bf
|
||||
description: "mysql support : logs and brute-force scenarios"
|
||||
author: crowdsecurity
|
||||
tags:
|
||||
- linux
|
||||
- mysql
|
||||
- bruteforce
|
||||
@@ -1,15 +0,0 @@
|
||||
parsers:
|
||||
#generic post-parsing of http stuff
|
||||
- crowdsecurity/nginx-logs
|
||||
collections:
|
||||
- crowdsecurity/base-http-scenarios
|
||||
scenarios:
|
||||
- crowdsecurity/nginx-req-limit-exceeded
|
||||
description: "nginx support : parser and generic http scenarios"
|
||||
author: crowdsecurity
|
||||
tags:
|
||||
- linux
|
||||
- nginx
|
||||
- crawl
|
||||
- scan
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
parsers:
|
||||
- crowdsecurity/sshd-logs
|
||||
scenarios:
|
||||
- crowdsecurity/ssh-bf
|
||||
- crowdsecurity/ssh-slow-bf
|
||||
description: "sshd support : parser and brute-force detection"
|
||||
author: crowdsecurity
|
||||
tags:
|
||||
- linux
|
||||
- ssh
|
||||
- bruteforce
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
#If it's syslog, we are going to extract progname from it
|
||||
filter: "evt.Line.Labels.type == 'syslog'"
|
||||
onsuccess: next_stage
|
||||
pattern_syntax:
|
||||
RAW_SYSLOG_PREFIX: '^<%{NUMBER:stuff1}>%{NUMBER:stuff2} %{SYSLOGBASE2} %{DATA:program} %{NUMBER:pid}'
|
||||
RAW_SYSLOG_META: '\[meta sequenceId="%{NOTDQUOTE:seq_id}"\]'
|
||||
name: crowdsecurity/syslog-logs
|
||||
nodes:
|
||||
- grok:
|
||||
#this is a named regular expression. grok patterns can be kept into separate files for readability
|
||||
pattern: "^%{SYSLOGLINE}"
|
||||
#This is the field of the `Event` to which the regexp should be applied
|
||||
apply_on: Line.Raw
|
||||
- grok:
|
||||
#a second pattern for unparsed syslog lines, as saw in opnsense
|
||||
pattern: '%{RAW_SYSLOG_PREFIX} - %{RAW_SYSLOG_META} %{GREEDYDATA:message}'
|
||||
apply_on: Line.Raw
|
||||
#if the node was successfull, statics will be applied.
|
||||
statics:
|
||||
- meta: machine
|
||||
expression: evt.Parsed.logsource
|
||||
- parsed: "logsource"
|
||||
value: "syslog"
|
||||
# syslog date can be in two different fields (one of hte assignment will fail)
|
||||
- target: evt.StrTime
|
||||
expression: evt.Parsed.timestamp
|
||||
- target: evt.StrTime
|
||||
expression: evt.Parsed.timestamp8601
|
||||
- meta: datasource_path
|
||||
expression: evt.Line.Src
|
||||
- meta: datasource_type
|
||||
expression: evt.Line.Module
|
||||
---
|
||||
#if it's not syslog, the type is the progname
|
||||
filter: "evt.Line.Labels.type != 'syslog'"
|
||||
onsuccess: next_stage
|
||||
name: crowdsecurity/non-syslog
|
||||
#debug: true
|
||||
statics:
|
||||
- parsed: message
|
||||
expression: evt.Line.Raw
|
||||
- parsed: program
|
||||
expression: evt.Line.Labels.type
|
||||
- meta: datasource_path
|
||||
expression: evt.Line.Src
|
||||
- meta: datasource_type
|
||||
expression: evt.Line.Module
|
||||
|
||||
@@ -1,93 +0,0 @@
|
||||
#Apache access/errors logs
|
||||
#debug: true
|
||||
filter: "evt.Parsed.program startsWith 'apache2'"
|
||||
onsuccess: next_stage
|
||||
name: crowdsecurity/apache2-logs
|
||||
description: "Parse Apache2 access and error logs"
|
||||
#log line can be prefixed by a target_fqdn
|
||||
nodes:
|
||||
- grok:
|
||||
pattern: '(%{IPORHOST:target_fqdn}(:%{INT:port})? )?%{COMMONAPACHELOG}( "%{NOTDQUOTE:referrer}" "%{NOTDQUOTE:http_user_agent}")?'
|
||||
apply_on: message
|
||||
# these ones apply for both grok patterns
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: http_access-log
|
||||
- target: evt.StrTime
|
||||
expression: evt.Parsed.timestamp
|
||||
- meta: service
|
||||
value: http
|
||||
- meta: source_ip
|
||||
expression: evt.Parsed.clientip
|
||||
- meta: http_status
|
||||
expression: evt.Parsed.response
|
||||
- meta: http_path
|
||||
expression: evt.Parsed.request
|
||||
- meta: http_verb
|
||||
expression: "evt.Parsed.verb"
|
||||
- meta: http_user_agent
|
||||
expression: "evt.Parsed.http_user_agent"
|
||||
- meta: target_fqdn
|
||||
expression: "evt.Parsed.target_fqdn"
|
||||
onsuccess: next_stage
|
||||
- grok:
|
||||
pattern: '%{HTTPD_ERRORLOG}'
|
||||
apply_on: message
|
||||
onsuccess: next_stage
|
||||
pattern_syntax:
|
||||
NOT_DOUBLE_POINT: '[^:]+'
|
||||
NOT_DOUBLE_QUOTE: '[^"]+'
|
||||
nodes:
|
||||
- filter: "evt.Parsed.module == 'auth_basic'"
|
||||
onsuccess: next_stage
|
||||
pattern_syntax:
|
||||
EXTRACT_USER_AND_PATH: 'user %{NOT_DOUBLE_POINT:username}: authentication failure for "%{NOT_DOUBLE_QUOTE:target_uri}": Password Mismatch'
|
||||
EXTRACT_USER_AND_PATH2: 'user %{NOT_DOUBLE_POINT:username} not found: "?%{NOT_DOUBLE_QUOTE:target_uri}"?'
|
||||
grok:
|
||||
pattern: '%{EXTRACT_USER_AND_PATH}|%{EXTRACT_USER_AND_PATH2}'
|
||||
apply_on: message
|
||||
# these ones apply for both grok patterns
|
||||
statics:
|
||||
- meta: username
|
||||
expression: evt.Parsed.username
|
||||
- meta: http_path
|
||||
expression: evt.Parsed.target_uri
|
||||
- meta: sub_type
|
||||
value: "auth_fail"
|
||||
- filter: "evt.Parsed.module == 'core' && evt.Parsed.message contains 'Invalid URI'"
|
||||
onsuccess: next_stage
|
||||
pattern_syntax:
|
||||
EXTRACT_URIVERB: 'Invalid URI in request %{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})'
|
||||
grok:
|
||||
pattern: '%{EXTRACT_URIVERB}'
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: http_path
|
||||
expression: evt.Parsed.request
|
||||
- meta: sub_type
|
||||
value: "invalid_uri"
|
||||
- filter: "evt.Parsed.module == 'authz_core' && evt.Parsed.message contains 'client denied'"
|
||||
onsuccess: next_stage
|
||||
pattern_syntax:
|
||||
EXTRACT_PATH: 'client denied by server configuration: %{GREEDYDATA:target_uri}'
|
||||
grok:
|
||||
pattern: '%{EXTRACT_PATH}'
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: http_path
|
||||
expression: evt.Parsed.target_uri
|
||||
- meta: sub_type
|
||||
value: "permission_denied"
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: http_error-log
|
||||
- target: evt.StrTime
|
||||
expression: evt.Parsed.timestamp
|
||||
- meta: service
|
||||
value: http
|
||||
- meta: source_ip
|
||||
expression: evt.Parsed.client
|
||||
- meta: http_status
|
||||
expression: evt.Parsed.response
|
||||
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
onsuccess: next_stage
|
||||
name: crowdsecurity/mysql-logs
|
||||
description: "Parse MySQL logs"
|
||||
filter: "evt.Parsed.program == 'mysql'"
|
||||
pattern_syntax:
|
||||
MYSQL_ACCESS_DENIED: "Access denied for user '%{DATA:user}'@'%{IP:source_ip}' \\(using password: %{WORD:using_password}\\)"
|
||||
nodes:
|
||||
- grok:
|
||||
pattern: "%{TIMESTAMP_ISO8601:time} %{NUMBER} \\[Note\\]( \\[%{DATA:err_code}\\] \\[%{DATA:subsystem}\\])? %{MYSQL_ACCESS_DENIED}"
|
||||
apply_on: message
|
||||
- grok:
|
||||
pattern: "%{TIMESTAMP_ISO8601:time}.*%{NUMBER} Connect.*%{MYSQL_ACCESS_DENIED}"
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: mysql_failed_auth
|
||||
- meta: source_ip
|
||||
expression: "evt.Parsed.source_ip"
|
||||
- target: evt.StrTime
|
||||
expression: evt.Parsed.time
|
||||
- meta: user
|
||||
expression: "evt.Parsed.user"
|
||||
@@ -1,70 +0,0 @@
|
||||
filter: "evt.Parsed.program startsWith 'nginx'"
|
||||
onsuccess: next_stage
|
||||
name: crowdsecurity/nginx-logs
|
||||
description: "Parse nginx access and error logs"
|
||||
pattern_syntax:
|
||||
NGCUSTOMUSER: '[a-zA-Z0-9\.\@\-\+_%]+'
|
||||
nodes:
|
||||
- grok:
|
||||
pattern: '(%{IPORHOST:target_fqdn} )?%{IPORHOST:remote_addr} - %{NGCUSTOMUSER:remote_user}? \[%{HTTPDATE:time_local}\] "%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:http_version}" %{NUMBER:status} %{NUMBER:body_bytes_sent} "%{NOTDQUOTE:http_referer}" "%{NOTDQUOTE:http_user_agent}"( %{NUMBER:request_length} %{NUMBER:request_time} \[%{DATA:proxy_upstream_name}\] \[%{DATA:proxy_alternative_upstream_name}\])?'
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: http_access-log
|
||||
- target: evt.StrTime
|
||||
expression: evt.Parsed.time_local
|
||||
- grok:
|
||||
# and this one the error log
|
||||
pattern: '(%{IPORHOST:target_fqdn} )?%{NGINXERRTIME:time} \[%{LOGLEVEL:loglevel}\] %{NONNEGINT:pid}#%{NONNEGINT:tid}: (\*%{NONNEGINT:cid} )?%{GREEDYDATA:message}, client: %{IPORHOST:remote_addr}, server: %{DATA:target_fqdn}, request: "%{WORD:verb} ([^/]+)?%{URIPATHPARAM:request}( HTTP/%{NUMBER:http_version})?", host: "%{IPORHOST}(:%{NONNEGINT})?"'
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: http_error-log
|
||||
- target: evt.StrTime
|
||||
expression: evt.Parsed.time
|
||||
pattern_syntax:
|
||||
NO_DOUBLE_QUOTE: '[^"]+'
|
||||
onsuccess: next_stage
|
||||
nodes:
|
||||
- filter: "evt.Parsed.message contains 'was not found in'"
|
||||
pattern_syntax:
|
||||
USER_NOT_FOUND: 'user "%{NO_DOUBLE_QUOTE:username}" was not found in "%{NO_DOUBLE_QUOTE}"'
|
||||
grok:
|
||||
pattern: '%{USER_NOT_FOUND}'
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: sub_type
|
||||
value: "auth_fail"
|
||||
- meta: username
|
||||
expression: evt.Parsed.username
|
||||
- filter: "evt.Parsed.message contains 'password mismatch'"
|
||||
pattern_syntax:
|
||||
PASSWORD_MISMATCH: 'user "%{NO_DOUBLE_QUOTE:username}": password mismatch'
|
||||
grok:
|
||||
pattern: '%{PASSWORD_MISMATCH}'
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: sub_type
|
||||
value: "auth_fail"
|
||||
- meta: username
|
||||
expression: evt.Parsed.username
|
||||
- filter: "evt.Parsed.message contains 'limiting requests, excess'"
|
||||
statics:
|
||||
- meta: sub_type
|
||||
value: "req_limit_exceeded"
|
||||
# these ones apply for both grok patterns
|
||||
statics:
|
||||
- meta: service
|
||||
value: http
|
||||
- meta: source_ip
|
||||
expression: "evt.Parsed.remote_addr"
|
||||
- meta: http_status
|
||||
expression: "evt.Parsed.status"
|
||||
- meta: http_path
|
||||
expression: "evt.Parsed.request"
|
||||
- meta: http_verb
|
||||
expression: "evt.Parsed.verb"
|
||||
- meta: http_user_agent
|
||||
expression: "evt.Parsed.http_user_agent"
|
||||
- meta: target_fqdn
|
||||
expression: "evt.Parsed.target_fqdn"
|
||||
@@ -1,100 +0,0 @@
|
||||
onsuccess: next_stage
|
||||
#debug: true
|
||||
filter: "evt.Parsed.program == 'sshd'"
|
||||
name: crowdsecurity/sshd-logs
|
||||
description: "Parse openSSH logs"
|
||||
pattern_syntax:
|
||||
# The IP grok pattern that ships with crowdsec is buggy and does not capture the last digit of an IP if it is the last thing it matches, and the last octet starts with a 2
|
||||
# https://github.com/crowdsecurity/crowdsec/issues/938
|
||||
IPv4_WORKAROUND: (?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)
|
||||
IP_WORKAROUND: (?:%{IPV6}|%{IPv4_WORKAROUND})
|
||||
SSHD_AUTH_FAIL: 'pam_%{DATA:pam_type}\(sshd:auth\): authentication failure; logname= uid=%{NUMBER:uid}? euid=%{NUMBER:euid}? tty=ssh ruser= rhost=%{IP_WORKAROUND:sshd_client_ip}( %{SPACE}user=%{USERNAME:sshd_invalid_user})?'
|
||||
SSHD_MAGIC_VALUE_FAILED: 'Magic value check failed \(\d+\) on obfuscated handshake from %{IP_WORKAROUND:sshd_client_ip} port \d+'
|
||||
SSHD_INVALID_USER: 'Invalid user\s*%{USERNAME:sshd_invalid_user}? from %{IP_WORKAROUND:sshd_client_ip}( port \d+)?'
|
||||
SSHD_INVALID_BANNER: 'banner exchange: Connection from %{IP_WORKAROUND:sshd_client_ip} port \d+: invalid format'
|
||||
SSHD_PREAUTH_AUTHENTICATING_USER: 'Connection closed by (authenticating|invalid) user %{USERNAME:sshd_invalid_user} %{IP_WORKAROUND:sshd_client_ip} port \d+ \[preauth\]'
|
||||
#following: https://github.com/crowdsecurity/crowdsec/issues/1201 - some scanners behave differently and trigger this one
|
||||
SSHD_PREAUTH_AUTHENTICATING_USER_ALT: 'Disconnected from (authenticating|invalid) user %{USERNAME:sshd_invalid_user} %{IP_WORKAROUND:sshd_client_ip} port \d+ \[preauth\]'
|
||||
SSHD_BAD_KEY_NEGOTIATION: 'Unable to negotiate with %{IP_WORKAROUND:sshd_client_ip} port \d+: no matching (host key type|key exchange method|MAC) found.'
|
||||
nodes:
|
||||
- grok:
|
||||
name: "SSHD_FAIL"
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: ssh_failed-auth
|
||||
- meta: target_user
|
||||
expression: "evt.Parsed.sshd_invalid_user"
|
||||
- grok:
|
||||
name: "SSHD_PREAUTH_AUTHENTICATING_USER_ALT"
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: ssh_failed-auth
|
||||
- meta: target_user
|
||||
expression: "evt.Parsed.sshd_invalid_user"
|
||||
- grok:
|
||||
name: "SSHD_PREAUTH_AUTHENTICATING_USER"
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: ssh_failed-auth
|
||||
- meta: target_user
|
||||
expression: "evt.Parsed.sshd_invalid_user"
|
||||
- grok:
|
||||
name: "SSHD_DISC_PREAUTH"
|
||||
apply_on: message
|
||||
- grok:
|
||||
name: "SSHD_BAD_VERSION"
|
||||
apply_on: message
|
||||
- grok:
|
||||
name: "SSHD_INVALID_USER"
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: ssh_failed-auth
|
||||
- meta: target_user
|
||||
expression: "evt.Parsed.sshd_invalid_user"
|
||||
- grok:
|
||||
name: "SSHD_INVALID_BANNER"
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: ssh_failed-auth
|
||||
- meta: extra_log_type
|
||||
value: ssh_bad_banner
|
||||
- grok:
|
||||
name: "SSHD_USER_FAIL"
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: ssh_failed-auth
|
||||
- meta: target_user
|
||||
expression: "evt.Parsed.sshd_invalid_user"
|
||||
- grok:
|
||||
name: "SSHD_AUTH_FAIL"
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: ssh_failed-auth
|
||||
- meta: target_user
|
||||
expression: "evt.Parsed.sshd_invalid_user"
|
||||
- grok:
|
||||
name: "SSHD_MAGIC_VALUE_FAILED"
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: ssh_failed-auth
|
||||
- meta: target_user
|
||||
expression: "evt.Parsed.sshd_invalid_user"
|
||||
- grok:
|
||||
name: "SSHD_BAD_KEY_NEGOTIATION"
|
||||
apply_on: message
|
||||
statics:
|
||||
- meta: log_type
|
||||
value: ssh_bad_keyexchange
|
||||
statics:
|
||||
- meta: service
|
||||
value: ssh
|
||||
- meta: source_ip
|
||||
expression: "evt.Parsed.sshd_client_ip"
|
||||
@@ -1,11 +0,0 @@
|
||||
filter: "evt.StrTime != ''"
|
||||
name: crowdsecurity/dateparse-enrich
|
||||
#debug: true
|
||||
#it's a hack lol
|
||||
statics:
|
||||
- method: ParseDate
|
||||
expression: evt.StrTime
|
||||
- target: MarshaledTime
|
||||
expression: evt.Enriched.MarshaledTime
|
||||
- meta: timestamp
|
||||
expression: evt.Enriched.MarshaledTime
|
||||
@@ -1,27 +0,0 @@
|
||||
filter: "'source_ip' in evt.Meta"
|
||||
name: crowdsecurity/geoip-enrich
|
||||
description: "Populate event with geoloc info : as, country, coords, source range."
|
||||
data:
|
||||
- source_url: https://crowdsec-statics-assets.s3-eu-west-1.amazonaws.com/GeoLite2-City.mmdb
|
||||
dest_file: GeoLite2-City.mmdb
|
||||
- source_url: https://crowdsec-statics-assets.s3-eu-west-1.amazonaws.com/GeoLite2-ASN.mmdb
|
||||
dest_file: GeoLite2-ASN.mmdb
|
||||
statics:
|
||||
- method: GeoIpCity
|
||||
expression: evt.Meta.source_ip
|
||||
- meta: IsoCode
|
||||
expression: evt.Enriched.IsoCode
|
||||
- meta: IsInEU
|
||||
expression: evt.Enriched.IsInEU
|
||||
- meta: GeoCoords
|
||||
expression: evt.Enriched.GeoCoords
|
||||
- method: GeoIpASN
|
||||
expression: evt.Meta.source_ip
|
||||
- meta: ASNNumber
|
||||
expression: evt.Enriched.ASNNumber
|
||||
- meta: ASNOrg
|
||||
expression: evt.Enriched.ASNOrg
|
||||
- method: IpToRange
|
||||
expression: evt.Meta.source_ip
|
||||
- meta: SourceRange
|
||||
expression: evt.Enriched.SourceRange
|
||||
@@ -1,33 +0,0 @@
|
||||
filter: "evt.Meta.service == 'http' && evt.Meta.log_type in ['http_access-log', 'http_error-log']"
|
||||
description: "Parse more Specifically HTTP logs, such as HTTP Code, HTTP path, HTTP args and if its a static ressource"
|
||||
name: crowdsecurity/http-logs
|
||||
pattern_syntax:
|
||||
DIR: "^.*/"
|
||||
FILE: "[^/].*?"
|
||||
EXT: "\\.[^.]*$|$"
|
||||
nodes:
|
||||
- statics:
|
||||
- parsed: "impact_completion"
|
||||
# the value of a field can as well be determined as the result of an expression
|
||||
expression: "evt.Meta.http_status in ['404', '403', '502'] ? 'false' : 'true'"
|
||||
- target: evt.Parsed.static_ressource
|
||||
value: 'false'
|
||||
# let's split the path?query if possible
|
||||
- grok:
|
||||
pattern: "^%{GREEDYDATA:request}\\?%{GREEDYDATA:http_args}$"
|
||||
apply_on: request
|
||||
# this is another node, with its own pattern_syntax
|
||||
- #debug: true
|
||||
grok:
|
||||
pattern: "%{DIR:file_dir}(%{FILE:file_frag}%{EXT:file_ext})?"
|
||||
apply_on: request
|
||||
statics:
|
||||
- meta: http_path
|
||||
expression: "evt.Parsed.http_path"
|
||||
# meta af
|
||||
- meta: http_args_len
|
||||
expression: "len(evt.Parsed.http_args)"
|
||||
- parsed: file_name
|
||||
expression: evt.Parsed.file_frag + evt.Parsed.file_ext
|
||||
- parsed: static_ressource
|
||||
expression: "Upper(evt.Parsed.file_ext) in ['.JPG', '.CSS', '.JS', '.JPEG', '.PNG', '.SVG', '.MAP', '.ICO', '.OTF', '.GIF', '.MP3', '.MP4', '.WOFF', '.WOFF2', '.TTF', '.OTF', '.EOT', '.WEBP', '.WAV', '.GZ', '.BROTLI', '.BVR', '.TS', '.BMP', '.AVIF'] ? 'true' : 'false'"
|
||||
@@ -1,14 +0,0 @@
|
||||
name: crowdsecurity/whitelists
|
||||
description: "Whitelist events from private ipv4 addresses"
|
||||
whitelist:
|
||||
reason: "private ipv4/ipv6 ip/ranges"
|
||||
ip:
|
||||
- "127.0.0.1"
|
||||
- "::1"
|
||||
cidr:
|
||||
- "192.168.0.0/16"
|
||||
- "10.0.0.0/8"
|
||||
- "172.16.0.0/12"
|
||||
# expression:
|
||||
# - "'foo.com' in evt.Meta.source_ip.reverse"
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
name: crowdsecurity/CVE-2019-18935
|
||||
description: "Detect Telerik CVE-2019-18935 exploitation attempts"
|
||||
filter: |
|
||||
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && Upper(QueryUnescape(evt.Meta.http_path)) startsWith Upper('/Telerik.Web.UI.WebResource.axd?type=rau')
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595
|
||||
- attack.T1190
|
||||
- cve.CVE-2019-18935
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
behavior: "http:exploit"
|
||||
label: "Telerik CVE-2019-18935"
|
||||
service: telerik
|
||||
@@ -1,19 +0,0 @@
|
||||
type: trigger
|
||||
#debug: true
|
||||
name: crowdsecurity/CVE-2022-26134
|
||||
description: "Detect CVE-2022-26134 exploits"
|
||||
filter: "Upper(PathUnescape(evt.Meta.http_path)) contains Upper('@java.lang.Runtime@getRuntime().exec(')"
|
||||
blackhole: 1m
|
||||
groupby: "evt.Meta.source_ip"
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595
|
||||
- attack.T1190
|
||||
- cve.CVE-2022-26134
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
behavior: "http:exploit"
|
||||
service: atlassian-confluence
|
||||
label: "Confluence CVE-2022-26134"
|
||||
@@ -1,19 +0,0 @@
|
||||
type: trigger
|
||||
#debug: true
|
||||
name: crowdsecurity/CVE-2022-35914
|
||||
description: "Detect CVE-2022-35914 exploits"
|
||||
filter: "Upper(evt.Meta.http_path) contains Upper('/vendor/htmlawed/htmlawed/htmLawedTest.php')"
|
||||
blackhole: 1m
|
||||
groupby: "evt.Meta.source_ip"
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595
|
||||
- attack.T1190
|
||||
- cve.CVE-2022-35914
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
behavior: "http:exploit"
|
||||
service: glpi
|
||||
label: "GLPI CVE-2022-35914"
|
||||
@@ -1,26 +0,0 @@
|
||||
type: trigger
|
||||
#debug: true
|
||||
name: crowdsecurity/CVE-2022-37042
|
||||
description: "Detect CVE-2022-37042 exploits"
|
||||
filter: |
|
||||
(
|
||||
Upper(evt.Meta.http_path) contains Upper('/service/extension/backup/mboximport?account-name=admin&ow=2&no-switch=1&append=1') ||
|
||||
Upper(evt.Meta.http_path) contains Upper('/service/extension/backup/mboximport?account-name=admin&account-status=1&ow=cmd')
|
||||
)
|
||||
and evt.Meta.http_status startsWith ('40') and
|
||||
Upper(evt.Meta.http_verb) == 'POST'
|
||||
|
||||
blackhole: 2m
|
||||
groupby: "evt.Meta.source_ip"
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595
|
||||
- attack.T1190
|
||||
- cve.CVE-2022-37042
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
behavior: "http:exploit"
|
||||
label: "ZCS CVE-2022-37042"
|
||||
service: zimbra
|
||||
@@ -1,19 +0,0 @@
|
||||
type: trigger
|
||||
name: crowdsecurity/fortinet-cve-2022-40684
|
||||
description: "Detect cve-2022-40684 exploitation attempts"
|
||||
filter: |
|
||||
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
|
||||
Upper(evt.Meta.http_path) startsWith Upper('/api/v2/cmdb/system/admin/') and Lower(evt.Parsed.http_user_agent) == 'report runner'
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1548
|
||||
- cve.CVE-2022-40684
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
behavior: "http:exploit"
|
||||
label: "Fortinet CVE-2022-40684"
|
||||
service: fortinet
|
||||
@@ -1,22 +0,0 @@
|
||||
type: trigger
|
||||
#debug: true
|
||||
name: crowdsecurity/CVE-2022-41082
|
||||
description: "Detect CVE-2022-41082 exploits"
|
||||
filter: |
|
||||
Upper(evt.Meta.http_path) contains Upper('/autodiscover/autodiscover.json') &&
|
||||
Upper(evt.Parsed.http_args) contains Upper('powershell')
|
||||
|
||||
blackhole: 1m
|
||||
groupby: "evt.Meta.source_ip"
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595
|
||||
- attack.T1190
|
||||
- cve.CVE-2022-41082
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
behavior: "http:exploit"
|
||||
service: exchange
|
||||
label: "Microsoft Exchange CVE-2022-41082"
|
||||
@@ -1,22 +0,0 @@
|
||||
type: leaky
|
||||
name: crowdsecurity/CVE-2022-41697
|
||||
description: "Detect CVE-2022-41697 enumeration"
|
||||
filter: |
|
||||
Upper(evt.Meta.http_path) contains Upper('/ghost/api/admin/session') &&
|
||||
Upper(evt.Parsed.verb) == 'POST' &&
|
||||
evt.Meta.http_status == '404'
|
||||
leakspeed: "10s"
|
||||
capacity: 5
|
||||
blackhole: 1m
|
||||
groupby: "evt.Meta.source_ip"
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1589
|
||||
- cve.CVE-2022-41697
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
behavior: "http:exploit"
|
||||
label: "Ghost CVE-2022-41697"
|
||||
service: ghost
|
||||
@@ -1,26 +0,0 @@
|
||||
type: trigger
|
||||
#debug: true
|
||||
name: crowdsecurity/CVE-2022-42889
|
||||
description: "Detect CVE-2022-42889 exploits (Text4Shell)"
|
||||
filter: |
|
||||
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${script:javascript:java.lang.Runtime.getRuntime().exec(')
|
||||
or
|
||||
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${script:js:java.lang.Runtime.getRuntime().exec(')
|
||||
or
|
||||
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${url:UTF-8:')
|
||||
or
|
||||
Upper(PathUnescape(evt.Meta.http_path)) contains Upper('${dns:address|')
|
||||
blackhole: 1m
|
||||
groupby: "evt.Meta.source_ip"
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595
|
||||
- attack.T1190
|
||||
- cve.CVE-2022-42889
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
behavior: "http:exploit"
|
||||
label: "Text4Shell CVE-2022-42889"
|
||||
service: apache
|
||||
@@ -1,24 +0,0 @@
|
||||
type: trigger
|
||||
#debug: true
|
||||
name: crowdsecurity/CVE-2022-44877
|
||||
description: "Detect CVE-2022-44877 exploits"
|
||||
filter: |
|
||||
Lower(evt.Meta.http_path) contains '/index.php' &&
|
||||
Upper(evt.Parsed.verb) == 'POST' &&
|
||||
evt.Meta.http_status == '302' &&
|
||||
Lower(evt.Parsed.http_args) matches 'login=.*[$|%24][\\(|%28].*[\\)|%29]'
|
||||
|
||||
blackhole: 1m
|
||||
groupby: "evt.Meta.source_ip"
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595
|
||||
- attack.T1190
|
||||
- cve.CVE-2022-44877
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
behavior: "http:exploit"
|
||||
label: "Centos Webpanel CVE-2022-44877"
|
||||
service: centos
|
||||
@@ -1,46 +0,0 @@
|
||||
type: leaky
|
||||
name: crowdsecurity/CVE-2022-46169-bf
|
||||
description: "Detect CVE-2022-46169 brute forcing"
|
||||
filter: |
|
||||
Upper(evt.Meta.http_path) contains Upper('/remote_agent.php') &&
|
||||
Upper(evt.Parsed.verb) == 'GET' &&
|
||||
Lower(evt.Parsed.http_args) contains 'host_id' &&
|
||||
Lower(evt.Parsed.http_args) contains 'local_data_ids'
|
||||
leakspeed: "10s"
|
||||
capacity: 5
|
||||
blackhole: 1m
|
||||
groupby: "evt.Meta.source_ip"
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1592
|
||||
- cve.CVE-2022-46169
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
behavior: "http:bruteforce"
|
||||
label: "Cacti CVE-2022-46169"
|
||||
service: cacti
|
||||
---
|
||||
type: trigger
|
||||
name: crowdsecurity/CVE-2022-46169-cmd
|
||||
description: "Detect CVE-2022-46169 cmd injection"
|
||||
filter: |
|
||||
Upper(evt.Meta.http_path) contains Upper('/remote_agent.php') &&
|
||||
Upper(evt.Parsed.verb) == 'GET' &&
|
||||
Lower(evt.Parsed.http_args) contains 'action=polldata' &&
|
||||
Lower(evt.Parsed.http_args) matches 'poller_id=.*(;|%3b)'
|
||||
blackhole: 1m
|
||||
groupby: "evt.Meta.source_ip"
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595
|
||||
- attack.T1190
|
||||
- cve.CVE-2022-46169
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
behavior: "http:exploit"
|
||||
label: "Cacti CVE-2022-46169"
|
||||
service: cacti
|
||||
@@ -1,22 +0,0 @@
|
||||
## CVE-2023-22515
|
||||
type: trigger
|
||||
name: crowdsecurity/CVE-2023-22515
|
||||
description: "Detect CVE-2023-22515 exploitation"
|
||||
filter: |
|
||||
Lower(evt.Parsed.file_ext) == '.action' &&
|
||||
(Lower(evt.Parsed.file_dir) contains '/setup' || Lower(evt.Parsed.file_frag) == 'server-info') &&
|
||||
evt.Parsed.file_frag != nil
|
||||
blackhole: 1m
|
||||
groupby: "evt.Meta.source_ip"
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595
|
||||
- attack.T1190
|
||||
- cve.CVE-2023-22515
|
||||
spoofable: 0
|
||||
confidence: 1
|
||||
behavior: "http:exploit"
|
||||
label: "Confluence CVE-2023-22515"
|
||||
service: confluence
|
||||
@@ -1,21 +0,0 @@
|
||||
type: trigger
|
||||
#debug: true
|
||||
name: crowdsecurity/CVE-2023-22518
|
||||
description: "Detect CVE-2023-22518 exploits"
|
||||
filter: |
|
||||
Upper(evt.Meta.http_path) contains Upper('/json/setup-restore.action') &&
|
||||
Upper(evt.Parsed.verb) == 'POST'
|
||||
blackhole: 1m
|
||||
groupby: "evt.Meta.source_ip"
|
||||
labels:
|
||||
type: exploit
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595
|
||||
- attack.T1190
|
||||
- cve.CVE-2023-22518
|
||||
spoofable: 0
|
||||
confidence: 1
|
||||
behavior: "http:exploit"
|
||||
label: "Atlassian Confluence Server CVE-2023-22518"
|
||||
service: Atlassian Confluence
|
||||
@@ -1,31 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
#debug: true
|
||||
name: crowdsecurity/apache_log4j2_cve-2021-44228
|
||||
description: "Detect cve-2021-44228 exploitation attemps"
|
||||
filter: |
|
||||
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
|
||||
(
|
||||
any(File("log4j2_cve_2021_44228.txt"), { Upper(evt.Meta.http_path) contains Upper(#)})
|
||||
or
|
||||
any(File("log4j2_cve_2021_44228.txt"), { Upper(evt.Parsed.http_user_agent) contains Upper(#)})
|
||||
or
|
||||
any(File("log4j2_cve_2021_44228.txt"), { Upper(evt.Parsed.http_referer) contains Upper(#)})
|
||||
)
|
||||
data:
|
||||
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/log4j2_cve_2021_44228.txt
|
||||
dest_file: log4j2_cve_2021_44228.txt
|
||||
type: string
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
service: apache
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1595
|
||||
- attack.T1190
|
||||
- cve.CVE-2021-44228
|
||||
behavior: "http:exploit"
|
||||
label: "Log4j CVE-2021-44228"
|
||||
remediation: true
|
||||
@@ -1,24 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
name: crowdsecurity/f5-big-ip-cve-2020-5902
|
||||
description: "Detect cve-2020-5902 exploitation attemps"
|
||||
filter: |
|
||||
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
|
||||
(
|
||||
Upper(evt.Meta.http_path) matches Upper('/tmui/login.jsp/..;/tmui/[^.]+.jsp\\?(fileName|command|directoryPath|tabId)=')
|
||||
or
|
||||
Upper(evt.Meta.http_path) matches Upper('/tmui/login.jsp/%2E%2E;/tmui/[^.]+.jsp\\?(fileName|command|directoryPath|tabId)=')
|
||||
)
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1190
|
||||
- attack.T1595
|
||||
- cve.CVE-2020-5902
|
||||
behavior: "http:exploit"
|
||||
label: "CVE-2020-5902"
|
||||
remediation: true
|
||||
service: f5
|
||||
@@ -1,20 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
name: crowdsecurity/fortinet-cve-2018-13379
|
||||
description: "Detect cve-2018-13379 exploitation attemps"
|
||||
filter: |
|
||||
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
|
||||
Upper(evt.Meta.http_path) contains Upper('/remote/fgt_lang?lang=/../../../..//////////dev/cmdb/sslvpn_websession')
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1190
|
||||
- attack.T1595
|
||||
- cve.CVE-2018-13379
|
||||
behavior: "http:exploit"
|
||||
label: "CVE-2018-13379"
|
||||
remediation: true
|
||||
service: fortinet
|
||||
@@ -1,22 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
name: crowdsecurity/grafana-cve-2021-43798
|
||||
description: "Detect cve-2021-43798 exploitation attemps"
|
||||
filter: |
|
||||
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
|
||||
(Upper(evt.Meta.http_path) matches '/PUBLIC/PLUGINS/[^/]+/../[./]+/'
|
||||
or
|
||||
Upper(evt.Meta.http_path) matches '/PUBLIC/PLUGINS/[^/]+/%2E%2E/[%2E/]+/')
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
service: grafana
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1190
|
||||
- attack.T1595
|
||||
- cve.CVE-2021-43798
|
||||
behavior: "http:exploit"
|
||||
label: "CVE-2021-43798"
|
||||
remediation: true
|
||||
@@ -1,23 +0,0 @@
|
||||
type: leaky
|
||||
#debug: true
|
||||
name: crowdsecurity/http-backdoors-attempts
|
||||
description: "Detect attempt to common backdoors"
|
||||
filter: 'evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("backdoors.txt"), { evt.Parsed.file_name == #})'
|
||||
groupby: "evt.Meta.source_ip"
|
||||
distinct: evt.Parsed.file_name
|
||||
data:
|
||||
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/backdoors.txt
|
||||
dest_file: backdoors.txt
|
||||
type: string
|
||||
capacity: 1
|
||||
leakspeed: 5s
|
||||
blackhole: 5m
|
||||
labels:
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1595
|
||||
behavior: "http:exploit"
|
||||
label: "scanning for backdoors"
|
||||
service: http
|
||||
remediation: true
|
||||
@@ -1,26 +0,0 @@
|
||||
type: leaky
|
||||
format: 2.0
|
||||
#debug: true
|
||||
name: crowdsecurity/http-bad-user-agent
|
||||
description: "Detect bad user-agents"
|
||||
filter: 'evt.Meta.log_type in ["http_access-log", "http_error-log"] && RegexpInFile(evt.Parsed.http_user_agent, "bad_user_agents.regex.txt")'
|
||||
data:
|
||||
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/bad_user_agents.regex.txt
|
||||
dest_file: bad_user_agents.regex.txt
|
||||
type: regexp
|
||||
strategy: LRU
|
||||
size: 40
|
||||
ttl: 10s
|
||||
capacity: 1
|
||||
leakspeed: 1m
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
confidence: 1
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1595
|
||||
behavior: "http:scan"
|
||||
label: "detection of bad user-agents"
|
||||
service: http
|
||||
remediation: true
|
||||
@@ -1,21 +0,0 @@
|
||||
type: leaky
|
||||
name: crowdsecurity/http-crawl-non_statics
|
||||
description: "Detect aggressive crawl from single ip"
|
||||
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && evt.Parsed.static_ressource == 'false' && evt.Parsed.verb in ['GET', 'HEAD']"
|
||||
distinct: "evt.Parsed.file_name"
|
||||
leakspeed: 0.5s
|
||||
capacity: 40
|
||||
#debug: true
|
||||
#this limits the memory cache (and event_sequences in output) to five events
|
||||
cache_size: 5
|
||||
groupby: "evt.Meta.source_ip + '/' + evt.Parsed.target_fqdn"
|
||||
blackhole: 1m
|
||||
labels:
|
||||
confidence: 1
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1595
|
||||
behavior: "http:crawl"
|
||||
service: http
|
||||
label: "detection of aggressive crawl"
|
||||
remediation: true
|
||||
@@ -1,23 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
#debug: true
|
||||
name: crowdsecurity/http-cve-2021-41773
|
||||
description: "cve-2021-41773"
|
||||
filter: |
|
||||
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
|
||||
(Upper(evt.Meta.http_path) contains "/.%2E/.%2E/"
|
||||
or
|
||||
Upper(evt.Meta.http_path) contains "/%2E%2E/%2E%2E")
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1190
|
||||
- attack.T1595
|
||||
- cve.CVE-2021-41773
|
||||
behavior: "http:exploit"
|
||||
label: "CVE-2021-41773"
|
||||
service: apache
|
||||
remediation: true
|
||||
@@ -1,22 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
#debug: true
|
||||
#this is getting funny, it's the third patch on top of cve-2021-41773
|
||||
name: crowdsecurity/http-cve-2021-42013
|
||||
description: "cve-2021-42013"
|
||||
filter: |
|
||||
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
|
||||
Upper(evt.Meta.http_path) contains "/%%32%65%%32%65/"
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
service: apache
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1190
|
||||
- attack.T1595
|
||||
- cve.CVE-2021-42013
|
||||
behavior: "http:exploit"
|
||||
label: "CVE-2021-42013"
|
||||
remediation: true
|
||||
@@ -1,59 +0,0 @@
|
||||
# 404 scan
|
||||
type: leaky
|
||||
#debug: true
|
||||
name: crowdsecurity/http-generic-bf
|
||||
description: "Detect generic http brute force"
|
||||
filter: "evt.Meta.service == 'http' && evt.Meta.sub_type == 'auth_fail'"
|
||||
groupby: evt.Meta.source_ip
|
||||
capacity: 5
|
||||
leakspeed: "10s"
|
||||
blackhole: 1m
|
||||
labels:
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1110
|
||||
behavior: "http:bruteforce"
|
||||
label: "http bruteforce"
|
||||
service: http
|
||||
remediation: true
|
||||
---
|
||||
# Generic 401 Authorization Errors
|
||||
type: leaky
|
||||
#debug: true
|
||||
name: LePresidente/http-generic-401-bf
|
||||
description: "Detect generic 401 Authorization error brute force"
|
||||
filter: "evt.Meta.log_type == 'http_access-log' && evt.Parsed.verb == 'POST' && evt.Meta.http_status == '401'"
|
||||
groupby: evt.Meta.source_ip
|
||||
capacity: 5
|
||||
leakspeed: "10s"
|
||||
blackhole: 1m
|
||||
labels:
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1110
|
||||
behavior: "http:bruteforce"
|
||||
label: "http bruteforce"
|
||||
service: http
|
||||
remediation: true
|
||||
---
|
||||
# Generic 403 Forbidden (Authorization) Errors
|
||||
type: leaky
|
||||
#debug: true
|
||||
name: LePresidente/http-generic-403-bf
|
||||
description: "Detect generic 403 Forbidden (Authorization) error brute force"
|
||||
filter: "evt.Meta.log_type == 'http_access-log' && evt.Parsed.verb == 'POST' && evt.Meta.http_status == '403'"
|
||||
groupby: evt.Meta.source_ip
|
||||
capacity: 5
|
||||
leakspeed: "10s"
|
||||
blackhole: 1m
|
||||
labels:
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1110
|
||||
behavior: "http:bruteforce"
|
||||
label: "http bruteforce"
|
||||
service: http
|
||||
remediation: true
|
||||
@@ -1,16 +0,0 @@
|
||||
type: trigger
|
||||
name: crowdsecurity/http-open-proxy
|
||||
description: "Detect scan for open proxy"
|
||||
#apache returns 405, nginx 400
|
||||
filter: "evt.Meta.log_type == 'http_access-log' && evt.Meta.http_status in ['400','405'] && (evt.Parsed.verb == 'CONNECT' || evt.Parsed.request matches '^http[s]?://')"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
service: http
|
||||
type: scan
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595
|
||||
behavior: "http:scan"
|
||||
label: "HTTP Open Proxy Probing"
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
@@ -1,25 +0,0 @@
|
||||
# path traversal probing
|
||||
type: leaky
|
||||
#debug: true
|
||||
name: crowdsecurity/http-path-traversal-probing
|
||||
description: "Detect path traversal attempt"
|
||||
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && any(File('http_path_traversal.txt'),{evt.Meta.http_path contains #})"
|
||||
data:
|
||||
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/path_traversal.txt
|
||||
dest_file: http_path_traversal.txt
|
||||
type: string
|
||||
groupby: "evt.Meta.source_ip"
|
||||
distinct: "evt.Meta.http_path"
|
||||
capacity: 3
|
||||
reprocess: true
|
||||
leakspeed: 10s
|
||||
blackhole: 2m
|
||||
labels:
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595.002
|
||||
behavior: "http:exploit"
|
||||
label: "HTTP Path Traversal Exploit"
|
||||
service: http
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
@@ -1,21 +0,0 @@
|
||||
# 404 scan
|
||||
type: leaky
|
||||
#debug: true
|
||||
name: crowdsecurity/http-probing
|
||||
description: "Detect site scanning/probing from a single ip"
|
||||
filter: "evt.Meta.service == 'http' && evt.Meta.http_status in ['404', '403', '400'] && evt.Parsed.static_ressource == 'false'"
|
||||
groupby: "evt.Meta.source_ip + '/' + evt.Parsed.target_fqdn"
|
||||
distinct: "evt.Meta.http_path"
|
||||
capacity: 10
|
||||
reprocess: true
|
||||
leakspeed: "10s"
|
||||
blackhole: 5m
|
||||
labels:
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595.003
|
||||
behavior: "http:scan"
|
||||
label: "HTTP Probing"
|
||||
spoofable: 0
|
||||
service: http
|
||||
confidence: 1
|
||||
@@ -1,24 +0,0 @@
|
||||
type: leaky
|
||||
format: 2.0
|
||||
#debug: true
|
||||
name: crowdsecurity/http-sensitive-files
|
||||
description: "Detect attempt to access to sensitive files (.log, .db ..) or folders (.git)"
|
||||
filter: 'evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("sensitive_data.txt"), { evt.Parsed.request endsWith #})'
|
||||
groupby: "evt.Meta.source_ip"
|
||||
distinct: evt.Parsed.request
|
||||
data:
|
||||
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/sensitive_data.txt
|
||||
dest_file: sensitive_data.txt
|
||||
type: string
|
||||
capacity: 4
|
||||
leakspeed: 5s
|
||||
blackhole: 5m
|
||||
labels:
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595.003
|
||||
behavior: "http:scan"
|
||||
label: "Access to sensitive files over HTTP"
|
||||
spoofable: 0
|
||||
service: http
|
||||
confidence: 3
|
||||
@@ -1,25 +0,0 @@
|
||||
type: leaky
|
||||
#requires at least 2.0 because it's using the 'data' section and the 'Upper' expr helper
|
||||
format: 2.0
|
||||
name: crowdsecurity/http-sqli-probbing-detection
|
||||
data:
|
||||
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/sqli_probe_patterns.txt
|
||||
dest_file: sqli_probe_patterns.txt
|
||||
type: string
|
||||
description: "A scenario that detects SQL injection probing with minimal false positives"
|
||||
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && any(File('sqli_probe_patterns.txt'), {Upper(evt.Parsed.http_args) contains Upper(#)})"
|
||||
groupby: evt.Meta.source_ip
|
||||
capacity: 10
|
||||
leakspeed: 1s
|
||||
blackhole: 5m
|
||||
#low false positives approach : we require distinct payloads to avoid false positives
|
||||
distinct: evt.Parsed.http_args
|
||||
labels:
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595.002
|
||||
behavior: "http:exploit"
|
||||
label: "SQL Injection Attempt"
|
||||
spoofable: 0
|
||||
service: http
|
||||
confidence: 3
|
||||
@@ -1,25 +0,0 @@
|
||||
type: leaky
|
||||
#requires at least 2.0 because it's using the 'data' section and the 'Upper' expr helper
|
||||
format: 2.0
|
||||
name: crowdsecurity/http-xss-probbing
|
||||
data:
|
||||
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/xss_probe_patterns.txt
|
||||
dest_file: xss_probe_patterns.txt
|
||||
type: string
|
||||
description: "A scenario that detects XSS probing with minimal false positives"
|
||||
filter: "evt.Meta.log_type in ['http_access-log', 'http_error-log'] && any(File('xss_probe_patterns.txt'), {Upper(evt.Parsed.http_args) contains Upper(#)})"
|
||||
groupby: evt.Meta.source_ip
|
||||
capacity: 5
|
||||
leakspeed: 1s
|
||||
blackhole: 5m
|
||||
#low false positives approach : we require distinct payloads to avoid false positives
|
||||
distinct: evt.Parsed.http_args
|
||||
labels:
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595.002
|
||||
behavior: "http:exploit"
|
||||
label: "XSS Attempt"
|
||||
spoofable: 0
|
||||
service: http
|
||||
confidence: 3
|
||||
@@ -1,24 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
#debug: true
|
||||
name: crowdsecurity/jira_cve-2021-26086
|
||||
description: "Detect Atlassian Jira CVE-2021-26086 exploitation attemps"
|
||||
filter: |
|
||||
evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("jira_cve_2021-26086.txt"), {Upper(evt.Meta.http_path) contains Upper(#)})
|
||||
data:
|
||||
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/jira_cve_2021-26086.txt
|
||||
dest_file: jira_cve_2021-26086.txt
|
||||
type: string
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
remediation: true
|
||||
classification:
|
||||
- attack.T1595.001
|
||||
- attack.T1190
|
||||
- cve.CVE-2021-26086
|
||||
behavior: "http:exploit"
|
||||
label: "Jira CVE-2021-26086 exploitation"
|
||||
spoofable: 0
|
||||
service: jira
|
||||
confidence: 3
|
||||
@@ -1,19 +0,0 @@
|
||||
# mysql bruteforce
|
||||
type: leaky
|
||||
#debug: true
|
||||
name: crowdsecurity/mysql-bf
|
||||
description: "Detect mysql bruteforce"
|
||||
filter: evt.Meta.log_type == 'mysql_failed_auth'
|
||||
leakspeed: "10s"
|
||||
capacity: 5
|
||||
groupby: evt.Meta.source_ip
|
||||
blackhole: 5m
|
||||
labels:
|
||||
remediation: true
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1110
|
||||
behavior: "database:bruteforce"
|
||||
label: "MySQL Bruteforce"
|
||||
service: mysql
|
||||
@@ -1,20 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
name: crowdsecurity/netgear_rce
|
||||
description: "Detect Netgear RCE DGN1000/DGN220 exploitation attempts"
|
||||
filter: |
|
||||
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && Lower(QueryUnescape(evt.Meta.http_path)) startsWith Lower('/setup.cgi?next_file=netgear.cfg&todo=syscmd&cmd=')
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
references:
|
||||
- "https://www.exploit-db.com/exploits/25978"
|
||||
labels:
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1595
|
||||
- attack.T1190
|
||||
behavior: "http:exploit"
|
||||
label: "Netgear RCE"
|
||||
service: netgear
|
||||
remediation: true
|
||||
@@ -1,18 +0,0 @@
|
||||
type: leaky
|
||||
#debug: true
|
||||
name: crowdsecurity/nginx-req-limit-exceeded
|
||||
description: "Detects IPs which violate nginx's user set request limit."
|
||||
filter: evt.Meta.sub_type == 'req_limit_exceeded'
|
||||
leakspeed: "60s"
|
||||
capacity: 5
|
||||
groupby: evt.Meta.source_ip
|
||||
blackhole: 5m
|
||||
labels:
|
||||
remediation: true
|
||||
confidence: 2
|
||||
spoofable: 2
|
||||
classification:
|
||||
- attack.T1498
|
||||
behavior: "http:dos"
|
||||
label: "Nginx request limit exceeded"
|
||||
service: http
|
||||
@@ -1,21 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
name: crowdsecurity/pulse-secure-sslvpn-cve-2019-11510
|
||||
description: "Detect cve-2019-11510 exploitation attemps"
|
||||
filter: |
|
||||
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
|
||||
(Upper(evt.Meta.http_path) matches Upper('/dana-na/../dana/html5acc/guacamole/../../../../../../../[^?]+\\?/dana/html5acc/guacamole/')
|
||||
or
|
||||
Upper(evt.Meta.http_path) matches Upper('/dana-na/%2E%2E/dana/html5acc/guacamole/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/[^?]+\\?/dana/html5acc/guacamole/'))
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
remediation: true
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1190
|
||||
- cve.CVE-2019-11510
|
||||
behavior: "http:exploit"
|
||||
label: "Pulse Secure CVE-2019-11510"
|
||||
service: pulse-secure
|
||||
@@ -1,19 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
name: crowdsecurity/spring4shell_cve-2022-22965
|
||||
description: "Detect cve-2022-22965 probing"
|
||||
filter: |
|
||||
evt.Meta.log_type in ["http_access-log", "http_error-log"] and
|
||||
(Upper(evt.Meta.http_path) contains 'CLASS.MODULE.CLASSLOADER.')
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
remediation: true
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1190
|
||||
- cve.CVE-2022-22965
|
||||
behavior: "http:exploit"
|
||||
label: "Spring4shell CVE-2022-22965"
|
||||
service: spring
|
||||
@@ -1,41 +0,0 @@
|
||||
# ssh bruteforce
|
||||
type: leaky
|
||||
name: crowdsecurity/ssh-bf
|
||||
description: "Detect ssh bruteforce"
|
||||
filter: "evt.Meta.log_type == 'ssh_failed-auth'"
|
||||
leakspeed: "10s"
|
||||
references:
|
||||
- http://wikipedia.com/ssh-bf-is-bad
|
||||
capacity: 5
|
||||
groupby: evt.Meta.source_ip
|
||||
blackhole: 1m
|
||||
reprocess: true
|
||||
labels:
|
||||
service: ssh
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1110
|
||||
label: "SSH Bruteforce"
|
||||
behavior: "ssh:bruteforce"
|
||||
remediation: true
|
||||
---
|
||||
# ssh user-enum
|
||||
type: leaky
|
||||
name: crowdsecurity/ssh-bf_user-enum
|
||||
description: "Detect ssh user enum bruteforce"
|
||||
filter: evt.Meta.log_type == 'ssh_failed-auth'
|
||||
groupby: evt.Meta.source_ip
|
||||
distinct: evt.Meta.target_user
|
||||
leakspeed: 10s
|
||||
capacity: 5
|
||||
blackhole: 1m
|
||||
labels:
|
||||
service: ssh
|
||||
remediation: true
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1589
|
||||
behavior: "ssh:bruteforce"
|
||||
label: "SSH User Enumeration"
|
||||
@@ -1,41 +0,0 @@
|
||||
# ssh bruteforce
|
||||
type: leaky
|
||||
name: crowdsecurity/ssh-slow-bf
|
||||
description: "Detect slow ssh bruteforce"
|
||||
filter: "evt.Meta.log_type == 'ssh_failed-auth'"
|
||||
leakspeed: "60s"
|
||||
references:
|
||||
- http://wikipedia.com/ssh-bf-is-bad
|
||||
capacity: 10
|
||||
groupby: evt.Meta.source_ip
|
||||
blackhole: 1m
|
||||
reprocess: true
|
||||
labels:
|
||||
service: ssh
|
||||
remediation: true
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1110
|
||||
behavior: "ssh:bruteforce"
|
||||
label: "SSH Slow Bruteforce"
|
||||
---
|
||||
# ssh user-enum
|
||||
type: leaky
|
||||
name: crowdsecurity/ssh-slow-bf_user-enum
|
||||
description: "Detect slow ssh user enum bruteforce"
|
||||
filter: evt.Meta.log_type == 'ssh_failed-auth'
|
||||
groupby: evt.Meta.source_ip
|
||||
distinct: evt.Meta.target_user
|
||||
leakspeed: 60s
|
||||
capacity: 10
|
||||
blackhole: 1m
|
||||
labels:
|
||||
service: ssh
|
||||
remediation: true
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1110
|
||||
behavior: "ssh:bruteforce"
|
||||
label: "SSH Slow User Enumeration"
|
||||
@@ -1,24 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
#debug: true
|
||||
name: crowdsecurity/thinkphp-cve-2018-20062
|
||||
description: "Detect ThinkPHP CVE-2018-20062 exploitation attemps"
|
||||
filter: |
|
||||
evt.Meta.log_type in ["http_access-log", "http_error-log"] and any(File("thinkphp_cve_2018-20062.txt"), {Upper(evt.Meta.http_path) matches Upper(#)})
|
||||
data:
|
||||
- source_url: https://raw.githubusercontent.com/crowdsecurity/sec-lists/master/web/thinkphp_cve_2018-20062.txt
|
||||
dest_file: thinkphp_cve_2018-20062.txt
|
||||
type: string
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1190
|
||||
- attack.T1595
|
||||
- cve.CVE-2018-20062
|
||||
behavior: "http:exploit"
|
||||
label: "ThinkPHP CVE-2018-20062"
|
||||
remediation: true
|
||||
service: thinkphp
|
||||
@@ -1,19 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
name: crowdsecurity/vmware-cve-2022-22954
|
||||
description: "Detect Vmware CVE-2022-22954 exploitation attempts"
|
||||
filter: |
|
||||
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && Upper(QueryUnescape(evt.Meta.http_path)) startsWith Upper('/catalog-portal/ui/oauth/verify?error=&deviceUdid=${"freemarker.template.utility.Execute"?new()(')
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1190
|
||||
- attack.T1595
|
||||
- cve.CVE-2022-22954
|
||||
behavior: "vm-management:exploit"
|
||||
label: "VMWARE CVE-2022-22954"
|
||||
remediation: true
|
||||
service: vmware
|
||||
@@ -1,19 +0,0 @@
|
||||
type: trigger
|
||||
format: 2.0
|
||||
name: crowdsecurity/vmware-vcenter-vmsa-2021-0027
|
||||
description: "Detect VMSA-2021-0027 exploitation attemps"
|
||||
filter: |
|
||||
evt.Meta.log_type in ['http_access-log', 'http_error-log'] && evt.Meta.http_path matches '/ui/vcav-bootstrap/rest/vcav-providers/provider-logo\\?url=(file|http)'
|
||||
groupby: "evt.Meta.source_ip"
|
||||
blackhole: 2m
|
||||
labels:
|
||||
confidence: 3
|
||||
spoofable: 0
|
||||
classification:
|
||||
- attack.T1190
|
||||
- attack.T1595
|
||||
- cve.CVE-2021-0027
|
||||
behavior: "vm-management:exploit"
|
||||
label: "VMWARE VCenter VMSA CVE-2021-0027"
|
||||
remediation: true
|
||||
service: vmware
|
||||
@@ -1,17 +0,0 @@
|
||||
#contributed by ltsich
|
||||
type: trigger
|
||||
name: ltsich/http-w00tw00t
|
||||
description: "detect w00tw00t"
|
||||
debug: false
|
||||
filter: "evt.Meta.log_type == 'http_access-log' && evt.Parsed.file_name contains 'w00tw00t.at.ISC.SANS.DFind'"
|
||||
groupby: evt.Meta.source_ip
|
||||
blackhole: 5m
|
||||
labels:
|
||||
service: http
|
||||
classification:
|
||||
- attack.T1595
|
||||
spoofable: 0
|
||||
confidence: 3
|
||||
behavior: "http:scan"
|
||||
label: "w00t w00t Scanner"
|
||||
remediation: true
|
||||
@@ -1,3 +0,0 @@
|
||||
url: http://127.0.0.1:8080
|
||||
login: 96067cce28e74eaf96e5506a1d6ae631ckyFXsLoytHbzoUU
|
||||
password: jVJRQEcOCPx899IPygKxGOSY4sau6A6yPfGiABYzp9EWlUuek6c92RnrLMic3Y5B
|
||||
@@ -1,55 +0,0 @@
|
||||
type: email # Don't change
|
||||
name: email_default # Must match the registered plugin in the profile
|
||||
|
||||
# One of "trace", "debug", "info", "warn", "error", "off"
|
||||
log_level: info
|
||||
|
||||
# group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s"
|
||||
# group_threshold: # Amount of alerts that triggers a message before <group_wait> has expired, eg "10"
|
||||
# max_retry: # Number of attempts to relay messages to plugins in case of error
|
||||
timeout: 20s # Time to wait for response from the plugin before considering the attempt a failure, eg "10s"
|
||||
|
||||
#-------------------------
|
||||
# plugin-specific options
|
||||
|
||||
# The following template receives a list of models.Alert objects
|
||||
# The output goes in the email message body
|
||||
format: |
|
||||
<html><body>
|
||||
{{range . -}}
|
||||
{{$alert := . -}}
|
||||
{{range .Decisions -}}
|
||||
<p><a href="https://www.whois.com/whois/{{.Value}}">{{.Value}}</a> will get <b>{{.Type}}</b> for next <b>{{.Duration}}</b> for triggering <b>{{.Scenario}}</b> on machine <b>{{$alert.MachineID}}</b>.</p> <p><a href="https://app.crowdsec.net/cti/{{.Value}}">CrowdSec CTI</a></p>
|
||||
{{end -}}
|
||||
{{end -}}
|
||||
</body></html>
|
||||
|
||||
smtp_host: # example: smtp.gmail.com
|
||||
smtp_username: # Replace with your actual username
|
||||
smtp_password: # Replace with your actual password
|
||||
smtp_port: # Common values are any of [25, 465, 587, 2525]
|
||||
auth_type: # Valid choices are "none", "crammd5", "login", "plain"
|
||||
sender_name: "CrowdSec"
|
||||
sender_email: # example: foo@gmail.com
|
||||
email_subject: "CrowdSec Notification"
|
||||
receiver_emails:
|
||||
# - email1@gmail.com
|
||||
# - email2@gmail.com
|
||||
|
||||
# One of "ssltls", "starttls", "none"
|
||||
encryption_type: "ssltls"
|
||||
|
||||
# If you need to set the HELO hostname:
|
||||
# helo_host: "localhost"
|
||||
|
||||
# If the email server is hitting the default timeouts (10 seconds), you can increase them here
|
||||
#
|
||||
# connect_timeout: 10s
|
||||
# send_timeout: 10s
|
||||
|
||||
---
|
||||
|
||||
# type: email
|
||||
# name: email_second_notification
|
||||
# ...
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
type: http # Don't change
|
||||
name: http_default # Must match the registered plugin in the profile
|
||||
|
||||
# One of "trace", "debug", "info", "warn", "error", "off"
|
||||
log_level: info
|
||||
|
||||
# group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s"
|
||||
# group_threshold: # Amount of alerts that triggers a message before <group_wait> has expired, eg "10"
|
||||
# max_retry: # Number of attempts to relay messages to plugins in case of error
|
||||
# timeout: # Time to wait for response from the plugin before considering the attempt a failure, eg "10s"
|
||||
|
||||
#-------------------------
|
||||
# plugin-specific options
|
||||
|
||||
# The following template receives a list of models.Alert objects
|
||||
# The output goes in the http request body
|
||||
format: |
|
||||
{{.|toJson}}
|
||||
|
||||
# The plugin will make requests to this url, eg: https://www.example.com/
|
||||
url: <HTTP_url>
|
||||
|
||||
# Any of the http verbs: "POST", "GET", "PUT"...
|
||||
method: POST
|
||||
|
||||
# headers:
|
||||
# Authorization: token 0x64312313
|
||||
|
||||
# skip_tls_verification: # true or false. Default is false
|
||||
|
||||
---
|
||||
|
||||
# type: http
|
||||
# name: http_second_notification
|
||||
# ...
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
type: sentinel # Don't change
|
||||
name: sentinel_default # Must match the registered plugin in the profile
|
||||
|
||||
# One of "trace", "debug", "info", "warn", "error", "off"
|
||||
log_level: info
|
||||
# group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s"
|
||||
# group_threshold: # Amount of alerts that triggers a message before <group_wait> has expired, eg "10"
|
||||
# max_retry: # Number of attempts to relay messages to plugins in case of error
|
||||
# timeout: # Time to wait for response from the plugin before considering the attempt a failure, eg "10s"
|
||||
|
||||
#-------------------------
|
||||
# plugin-specific options
|
||||
|
||||
# The following template receives a list of models.Alert objects
|
||||
# The output goes in the http request body
|
||||
format: |
|
||||
{{.|toJson}}
|
||||
|
||||
customer_id: XXX-XXX
|
||||
shared_key: XXXXXXX
|
||||
log_type: crowdsec
|
||||
@@ -1,36 +0,0 @@
|
||||
type: slack # Don't change
|
||||
name: slack_default # Must match the registered plugin in the profile
|
||||
|
||||
# One of "trace", "debug", "info", "warn", "error", "off"
|
||||
log_level: info
|
||||
|
||||
# group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s"
|
||||
# group_threshold: # Amount of alerts that triggers a message before <group_wait> has expired, eg "10"
|
||||
# max_retry: # Number of attempts to relay messages to plugins in case of error
|
||||
# timeout: # Time to wait for response from the plugin before considering the attempt a failure, eg "10s"
|
||||
|
||||
#-------------------------
|
||||
# plugin-specific options
|
||||
|
||||
# The following template receives a list of models.Alert objects
|
||||
# The output goes in the slack message
|
||||
format: |
|
||||
{{range . -}}
|
||||
{{$alert := . -}}
|
||||
{{range .Decisions -}}
|
||||
{{if $alert.Source.Cn -}}
|
||||
:flag-{{$alert.Source.Cn}}: <https://www.whois.com/whois/{{.Value}}|{{.Value}}> will get {{.Type}} for next {{.Duration}} for triggering {{.Scenario}} on machine '{{$alert.MachineID}}'. <https://app.crowdsec.net/cti/{{.Value}}|CrowdSec CTI>{{end}}
|
||||
{{if not $alert.Source.Cn -}}
|
||||
:pirate_flag: <https://www.whois.com/whois/{{.Value}}|{{.Value}}> will get {{.Type}} for next {{.Duration}} for triggering {{.Scenario}} on machine '{{$alert.MachineID}}'. <https://app.crowdsec.net/cti/{{.Value}}|CrowdSec CTI>{{end}}
|
||||
{{end -}}
|
||||
{{end -}}
|
||||
|
||||
|
||||
webhook: <WEBHOOK_URL>
|
||||
|
||||
---
|
||||
|
||||
# type: slack
|
||||
# name: slack_second_notification
|
||||
# ...
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
type: splunk # Don't change
|
||||
name: splunk_default # Must match the registered plugin in the profile
|
||||
|
||||
# One of "trace", "debug", "info", "warn", "error", "off"
|
||||
log_level: info
|
||||
|
||||
# group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s"
|
||||
# group_threshold: # Amount of alerts that triggers a message before <group_wait> has expired, eg "10"
|
||||
# max_retry: # Number of attempts to relay messages to plugins in case of error
|
||||
# timeout: # Time to wait for response from the plugin before considering the attempt a failure, eg "10s"
|
||||
|
||||
#-------------------------
|
||||
# plugin-specific options
|
||||
|
||||
# The following template receives a list of models.Alert objects
|
||||
# The output goes in the splunk notification
|
||||
format: |
|
||||
{{.|toJson}}
|
||||
|
||||
url: <SPLUNK_HTTP_URL>
|
||||
token: <SPLUNK_TOKEN>
|
||||
|
||||
---
|
||||
|
||||
# type: splunk
|
||||
# name: splunk_second_notification
|
||||
# ...
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
url: https://api.crowdsec.net/
|
||||
login: 96067cce28e74eaf96e5506a1d6ae631V4zDUMoTQvfH1X7j
|
||||
password: 1VWwNq23hSSBpaDN8tkbhA30harJ1h9wWUXn5kmwg5ReE42hYHIbNehDk7p4yGnX
|
||||
@@ -1,11 +0,0 @@
|
||||
S3_REQUEST_LINE (?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})
|
||||
|
||||
S3_ACCESS_LOG %{WORD:owner} %{NOTSPACE:bucket} \[%{HTTPDATE:timestamp}\] %{IP:clientip} %{NOTSPACE:requester} %{NOTSPACE:request_id} %{NOTSPACE:operation} %{NOTSPACE:key} (?:"%{S3_REQUEST_LINE}"|-) (?:%{INT:response:int}|-) (?:-|%{NOTSPACE:error_code}) (?:%{INT:bytes:int}|-) (?:%{INT:object_size:int}|-) (?:%{INT:request_time_ms:int}|-) (?:%{INT:turnaround_time_ms:int}|-) (?:%{QS:referrer}|-) (?:"?%{QS:agent}"?|-) (?:-|%{NOTSPACE:version_id})
|
||||
|
||||
ELB_URIPATHPARAM %{URIPATH:path}(?:%{URIPARAM:params})?
|
||||
|
||||
ELB_URI %{URIPROTO:proto}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST:urihost})?(?:%{ELB_URIPATHPARAM})?
|
||||
|
||||
ELB_REQUEST_LINE (?:%{WORD:verb} %{ELB_URI:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})
|
||||
|
||||
ELB_ACCESS_LOG %{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:elb} %{IP:clientip}:%{INT:clientport:int} (?:(%{IP:backendip}:?:%{INT:backendport:int})|-) %{NUMBER:request_processing_time:float} %{NUMBER:backend_processing_time:float} %{NUMBER:response_processing_time:float} %{INT:response:int} %{INT:backend_response:int} %{INT:received_bytes:int} %{INT:bytes:int} "%{ELB_REQUEST_LINE}"
|
||||
@@ -1,50 +0,0 @@
|
||||
BACULA_TIMESTAMP %{MONTHDAY}-%{MONTH} %{HOUR}:%{MINUTE}
|
||||
BACULA_HOST [a-zA-Z0-9-]+
|
||||
BACULA_VOLUME %{USER}
|
||||
BACULA_DEVICE %{USER}
|
||||
BACULA_DEVICEPATH %{UNIXPATH}
|
||||
BACULA_CAPACITY %{INT}{1,3}(,%{INT}{3})*
|
||||
BACULA_VERSION %{USER}
|
||||
BACULA_JOB %{USER}
|
||||
|
||||
BACULA_LOG_MAX_CAPACITY User defined maximum volume capacity %{BACULA_CAPACITY} exceeded on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\)
|
||||
BACULA_LOG_END_VOLUME End of medium on Volume \"%{BACULA_VOLUME:volume}\" Bytes=%{BACULA_CAPACITY} Blocks=%{BACULA_CAPACITY} at %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}.
|
||||
BACULA_LOG_NEW_VOLUME Created new Volume \"%{BACULA_VOLUME:volume}\" in catalog.
|
||||
BACULA_LOG_NEW_LABEL Labeled new Volume \"%{BACULA_VOLUME:volume}\" on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\).
|
||||
BACULA_LOG_WROTE_LABEL Wrote label to prelabeled Volume \"%{BACULA_VOLUME:volume}\" on device \"%{BACULA_DEVICE}\" \(%{BACULA_DEVICEPATH}\)
|
||||
BACULA_LOG_NEW_MOUNT New volume \"%{BACULA_VOLUME:volume}\" mounted on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\) at %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}.
|
||||
BACULA_LOG_NOOPEN \s+Cannot open %{DATA}: ERR=%{GREEDYDATA:berror}
|
||||
BACULA_LOG_NOOPENDIR \s+Could not open directory %{DATA}: ERR=%{GREEDYDATA:berror}
|
||||
BACULA_LOG_NOSTAT \s+Could not stat %{DATA}: ERR=%{GREEDYDATA:berror}
|
||||
BACULA_LOG_NOJOBS There are no more Jobs associated with Volume \"%{BACULA_VOLUME:volume}\". Marking it purged.
|
||||
BACULA_LOG_ALL_RECORDS_PRUNED All records pruned from Volume \"%{BACULA_VOLUME:volume}\"; marking it \"Purged\"
|
||||
BACULA_LOG_BEGIN_PRUNE_JOBS Begin pruning Jobs older than %{INT} month %{INT} days .
|
||||
BACULA_LOG_BEGIN_PRUNE_FILES Begin pruning Files.
|
||||
BACULA_LOG_PRUNED_JOBS Pruned %{INT} Jobs* for client %{BACULA_HOST:client} from catalog.
|
||||
BACULA_LOG_PRUNED_FILES Pruned Files from %{INT} Jobs* for client %{BACULA_HOST:client} from catalog.
|
||||
BACULA_LOG_ENDPRUNE End auto prune.
|
||||
BACULA_LOG_STARTJOB Start Backup JobId %{INT}, Job=%{BACULA_JOB:job}
|
||||
BACULA_LOG_STARTRESTORE Start Restore Job %{BACULA_JOB:job}
|
||||
BACULA_LOG_USEDEVICE Using Device \"%{BACULA_DEVICE:device}\"
|
||||
BACULA_LOG_DIFF_FS \s+%{UNIXPATH} is a different filesystem. Will not descend from %{UNIXPATH} into it.
|
||||
BACULA_LOG_JOBEND Job write elapsed time = %{DATA:elapsed}, Transfer rate = %{NUMBER} (K|M|G)? Bytes/second
|
||||
BACULA_LOG_NOPRUNE_JOBS No Jobs found to prune.
|
||||
BACULA_LOG_NOPRUNE_FILES No Files found to prune.
|
||||
BACULA_LOG_VOLUME_PREVWRITTEN Volume \"%{BACULA_VOLUME:volume}\" previously written, moving to end of data.
|
||||
BACULA_LOG_READYAPPEND Ready to append to end of Volume \"%{BACULA_VOLUME:volume}\" size=%{INT}
|
||||
BACULA_LOG_CANCELLING Cancelling duplicate JobId=%{INT}.
|
||||
BACULA_LOG_MARKCANCEL JobId %{INT}, Job %{BACULA_JOB:job} marked to be canceled.
|
||||
BACULA_LOG_CLIENT_RBJ shell command: run ClientRunBeforeJob \"%{GREEDYDATA:runjob}\"
|
||||
BACULA_LOG_VSS (Generate )?VSS (Writer)?
|
||||
BACULA_LOG_MAXSTART Fatal error: Job canceled because max start delay time exceeded.
|
||||
BACULA_LOG_DUPLICATE Fatal error: JobId %{INT:duplicate} already running. Duplicate job not allowed.
|
||||
BACULA_LOG_NOJOBSTAT Fatal error: No Job status returned from FD.
|
||||
BACULA_LOG_FATAL_CONN Fatal error: bsock.c:133 Unable to connect to (Client: %{BACULA_HOST:client}|Storage daemon) on %{HOSTNAME}:%{POSINT}. ERR=%{GREEDYDATA:berror}
|
||||
BACULA_LOG_NO_CONNECT Warning: bsock.c:127 Could not connect to (Client: %{BACULA_HOST:client}|Storage daemon) on %{HOSTNAME}:%{POSINT}. ERR=%{GREEDYDATA:berror}
|
||||
BACULA_LOG_NO_AUTH Fatal error: Unable to authenticate with File daemon at %{HOSTNAME}. Possible causes:
|
||||
BACULA_LOG_NOSUIT No prior or suitable Full backup found in catalog. Doing FULL backup.
|
||||
BACULA_LOG_NOPRIOR No prior Full backup Job record found.
|
||||
|
||||
BACULA_LOG_JOB (Error: )?Bacula %{BACULA_HOST} %{BACULA_VERSION} \(%{BACULA_VERSION}\):
|
||||
|
||||
BACULA_LOGLINE %{BACULA_TIMESTAMP:bts} %{BACULA_HOST:hostname} JobId %{INT:jobid}: (%{BACULA_LOG_MAX_CAPACITY}|%{BACULA_LOG_END_VOLUME}|%{BACULA_LOG_NEW_VOLUME}|%{BACULA_LOG_NEW_LABEL}|%{BACULA_LOG_WROTE_LABEL}|%{BACULA_LOG_NEW_MOUNT}|%{BACULA_LOG_NOOPEN}|%{BACULA_LOG_NOOPENDIR}|%{BACULA_LOG_NOSTAT}|%{BACULA_LOG_NOJOBS}|%{BACULA_LOG_ALL_RECORDS_PRUNED}|%{BACULA_LOG_BEGIN_PRUNE_JOBS}|%{BACULA_LOG_BEGIN_PRUNE_FILES}|%{BACULA_LOG_PRUNED_JOBS}|%{BACULA_LOG_PRUNED_FILES}|%{BACULA_LOG_ENDPRUNE}|%{BACULA_LOG_STARTJOB}|%{BACULA_LOG_STARTRESTORE}|%{BACULA_LOG_USEDEVICE}|%{BACULA_LOG_DIFF_FS}|%{BACULA_LOG_JOBEND}|%{BACULA_LOG_NOPRUNE_JOBS}|%{BACULA_LOG_NOPRUNE_FILES}|%{BACULA_LOG_VOLUME_PREVWRITTEN}|%{BACULA_LOG_READYAPPEND}|%{BACULA_LOG_CANCELLING}|%{BACULA_LOG_MARKCANCEL}|%{BACULA_LOG_CLIENT_RBJ}|%{BACULA_LOG_VSS}|%{BACULA_LOG_MAXSTART}|%{BACULA_LOG_DUPLICATE}|%{BACULA_LOG_NOJOBSTAT}|%{BACULA_LOG_FATAL_CONN}|%{BACULA_LOG_NO_CONNECT}|%{BACULA_LOG_NO_AUTH}|%{BACULA_LOG_NOSUIT}|%{BACULA_LOG_JOB}|%{BACULA_LOG_NOPRIOR})
|
||||
@@ -1,13 +0,0 @@
|
||||
# https://www.bro.org/sphinx/script-reference/log-files.html
|
||||
|
||||
# http.log
|
||||
BRO_HTTP %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{INT:trans_depth}\t%{GREEDYDATA:method}\t%{GREEDYDATA:domain}\t%{GREEDYDATA:uri}\t%{GREEDYDATA:referrer}\t%{GREEDYDATA:user_agent}\t%{NUMBER:request_body_len}\t%{NUMBER:response_body_len}\t%{GREEDYDATA:status_code}\t%{GREEDYDATA:status_msg}\t%{GREEDYDATA:info_code}\t%{GREEDYDATA:info_msg}\t%{GREEDYDATA:filename}\t%{GREEDYDATA:bro_tags}\t%{GREEDYDATA:username}\t%{GREEDYDATA:password}\t%{GREEDYDATA:proxied}\t%{GREEDYDATA:orig_fuids}\t%{GREEDYDATA:orig_mime_types}\t%{GREEDYDATA:resp_fuids}\t%{GREEDYDATA:resp_mime_types}
|
||||
|
||||
# dns.log
|
||||
BRO_DNS %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{WORD:proto}\t%{INT:trans_id}\t%{GREEDYDATA:query}\t%{GREEDYDATA:qclass}\t%{GREEDYDATA:qclass_name}\t%{GREEDYDATA:qtype}\t%{GREEDYDATA:qtype_name}\t%{GREEDYDATA:rcode}\t%{GREEDYDATA:rcode_name}\t%{GREEDYDATA:AA}\t%{GREEDYDATA:TC}\t%{GREEDYDATA:RD}\t%{GREEDYDATA:RA}\t%{GREEDYDATA:Z}\t%{GREEDYDATA:answers}\t%{GREEDYDATA:TTLs}\t%{GREEDYDATA:rejected}
|
||||
|
||||
# conn.log
|
||||
BRO_CONN %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{WORD:proto}\t%{GREEDYDATA:service}\t%{NUMBER:duration}\t%{NUMBER:orig_bytes}\t%{NUMBER:resp_bytes}\t%{GREEDYDATA:conn_state}\t%{GREEDYDATA:local_orig}\t%{GREEDYDATA:missed_bytes}\t%{GREEDYDATA:history}\t%{GREEDYDATA:orig_pkts}\t%{GREEDYDATA:orig_ip_bytes}\t%{GREEDYDATA:resp_pkts}\t%{GREEDYDATA:resp_ip_bytes}\t%{GREEDYDATA:tunnel_parents}
|
||||
|
||||
# files.log
|
||||
BRO_FILES %{NUMBER:ts}\t%{NOTSPACE:fuid}\t%{IP:tx_hosts}\t%{IP:rx_hosts}\t%{NOTSPACE:conn_uids}\t%{GREEDYDATA:source}\t%{GREEDYDATA:depth}\t%{GREEDYDATA:analyzers}\t%{GREEDYDATA:mime_type}\t%{GREEDYDATA:filename}\t%{GREEDYDATA:duration}\t%{GREEDYDATA:local_orig}\t%{GREEDYDATA:is_orig}\t%{GREEDYDATA:seen_bytes}\t%{GREEDYDATA:total_bytes}\t%{GREEDYDATA:missing_bytes}\t%{GREEDYDATA:overflow_bytes}\t%{GREEDYDATA:timedout}\t%{GREEDYDATA:parent_fuid}\t%{GREEDYDATA:md5}\t%{GREEDYDATA:sha1}\t%{GREEDYDATA:sha256}\t%{GREEDYDATA:extracted}
|
||||
@@ -1 +0,0 @@
|
||||
COWRIE_NEW_CO New connection: %{IPV4:source_ip}:[0-9]+ \(%{IPV4:dest_ip}:%{INT:dest_port}\) \[session: %{DATA:telnet_session}\]$
|
||||
@@ -1,12 +0,0 @@
|
||||
EXIM_MSGID [0-9A-Za-z]{6}-[0-9A-Za-z]{6}-[0-9A-Za-z]{2}
|
||||
EXIM_FLAGS (<=|[-=>*]>|[*]{2}|==)
|
||||
EXIM_DATE %{YEAR:exim_year}-%{MONTHNUM:exim_month}-%{MONTHDAY:exim_day} %{TIME:exim_time}
|
||||
EXIM_PID \[%{POSINT}\]
|
||||
EXIM_QT ((\d+y)?(\d+w)?(\d+d)?(\d+h)?(\d+m)?(\d+s)?)
|
||||
EXIM_EXCLUDE_TERMS (Message is frozen|(Start|End) queue run| Warning: | retry time not reached | no (IP address|host name) found for (IP address|host) | unexpected disconnection while reading SMTP command | no immediate delivery: |another process is handling this message)
|
||||
EXIM_REMOTE_HOST (H=(%{NOTSPACE:remote_hostname} )?(\(%{NOTSPACE:remote_heloname}\) )?\[%{IP:remote_host}\])
|
||||
EXIM_INTERFACE (I=\[%{IP:exim_interface}\](:%{NUMBER:exim_interface_port}))
|
||||
EXIM_PROTOCOL (P=%{NOTSPACE:protocol})
|
||||
EXIM_MSG_SIZE (S=%{NUMBER:exim_msg_size})
|
||||
EXIM_HEADER_ID (id=%{NOTSPACE:exim_header_id})
|
||||
EXIM_SUBJECT (T=%{QS:exim_subject})
|
||||
@@ -1,86 +0,0 @@
|
||||
# NetScreen firewall logs
|
||||
NETSCREENSESSIONLOG %{SYSLOGTIMESTAMP:date} %{IPORHOST:device} %{IPORHOST}: NetScreen device_id=%{WORD:device_id}%{DATA}: start_time=%{QUOTEDSTRING:start_time} duration=%{INT:duration} policy_id=%{INT:policy_id} service=%{DATA:service} proto=%{INT:proto} src zone=%{WORD:src_zone} dst zone=%{WORD:dst_zone} action=%{WORD:action} sent=%{INT:sent} rcvd=%{INT:rcvd} src=%{IPORHOST:src_ip} dst=%{IPORHOST:dst_ip} src_port=%{INT:src_port} dst_port=%{INT:dst_port} src-xlated ip=%{IPORHOST:src_xlated_ip} port=%{INT:src_xlated_port} dst-xlated ip=%{IPORHOST:dst_xlated_ip} port=%{INT:dst_xlated_port} session_id=%{INT:session_id} reason=%{GREEDYDATA:reason}
|
||||
|
||||
#== Cisco ASA ==
|
||||
CISCOTAG [A-Z0-9]+-%{INT}-(?:[A-Z0-9_]+)
|
||||
CISCOTIMESTAMP %{MONTH} +%{MONTHDAY}(?: %{YEAR})? %{TIME}
|
||||
CISCO_TAGGED_SYSLOG ^<%{POSINT:syslog_pri}>%{CISCOTIMESTAMP:timestamp}( %{SYSLOGHOST:sysloghost})? ?: %%{CISCOTAG:ciscotag}:
|
||||
# Common Particles
|
||||
CISCO_ACTION Built|Teardown|Deny|Denied|denied|requested|permitted|denied by ACL|discarded|est-allowed|Dropping|created|deleted
|
||||
CISCO_REASON Duplicate TCP SYN|Failed to locate egress interface|Invalid transport field|No matching connection|DNS Response|DNS Query|(?:%{WORD}\s*)*
|
||||
CISCO_DIRECTION Inbound|inbound|Outbound|outbound
|
||||
CISCO_INTERVAL first hit|%{INT}-second interval
|
||||
CISCO_XLATE_TYPE static|dynamic
|
||||
# ASA-1-104001
|
||||
CISCOFW104001 \((?:Primary|Secondary)\) Switching to ACTIVE - %{GREEDYDATA:switch_reason}
|
||||
# ASA-1-104002
|
||||
CISCOFW104002 \((?:Primary|Secondary)\) Switching to STANDBY - %{GREEDYDATA:switch_reason}
|
||||
# ASA-1-104003
|
||||
CISCOFW104003 \((?:Primary|Secondary)\) Switching to FAILED\.
|
||||
# ASA-1-104004
|
||||
CISCOFW104004 \((?:Primary|Secondary)\) Switching to OK\.
|
||||
# ASA-1-105003
|
||||
CISCOFW105003 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{GREEDYDATA:interface_name} waiting
|
||||
# ASA-1-105004
|
||||
CISCOFW105004 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{GREEDYDATA:interface_name} normal
|
||||
# ASA-1-105005
|
||||
CISCOFW105005 \((?:Primary|Secondary)\) Lost Failover communications with mate on [Ii]nterface %{GREEDYDATA:interface_name}
|
||||
# ASA-1-105008
|
||||
CISCOFW105008 \((?:Primary|Secondary)\) Testing [Ii]nterface %{GREEDYDATA:interface_name}
|
||||
# ASA-1-105009
|
||||
CISCOFW105009 \((?:Primary|Secondary)\) Testing on [Ii]nterface %{GREEDYDATA:interface_name} (?:Passed|Failed)
|
||||
# ASA-2-106001
|
||||
CISCOFW106001 %{CISCO_DIRECTION:direction} %{WORD:protocol} connection %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{GREEDYDATA:tcp_flags} on interface %{GREEDYDATA:interface}
|
||||
# ASA-2-106006, ASA-2-106007, ASA-2-106010
|
||||
CISCOFW106006_106007_106010 %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} (?:from|src) %{IP:src_ip}/%{INT:src_port}(\(%{DATA:src_fwuser}\))? (?:to|dst) %{IP:dst_ip}/%{INT:dst_port}(\(%{DATA:dst_fwuser}\))? (?:on interface %{DATA:interface}|due to %{CISCO_REASON:reason})
|
||||
# ASA-3-106014
|
||||
CISCOFW106014 %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} src %{DATA:src_interface}:%{IP:src_ip}(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{IP:dst_ip}(\(%{DATA:dst_fwuser}\))? \(type %{INT:icmp_type}, code %{INT:icmp_code}\)
|
||||
# ASA-6-106015
|
||||
CISCOFW106015 %{CISCO_ACTION:action} %{WORD:protocol} \(%{DATA:policy_id}\) from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{DATA:tcp_flags} on interface %{GREEDYDATA:interface}
|
||||
# ASA-1-106021
|
||||
CISCOFW106021 %{CISCO_ACTION:action} %{WORD:protocol} reverse path check from %{IP:src_ip} to %{IP:dst_ip} on interface %{GREEDYDATA:interface}
|
||||
# ASA-4-106023
|
||||
CISCOFW106023 %{CISCO_ACTION:action}( protocol)? %{WORD:protocol} src %{DATA:src_interface}:%{DATA:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{DATA:dst_ip}(/%{INT:dst_port})?(\(%{DATA:dst_fwuser}\))?( \(type %{INT:icmp_type}, code %{INT:icmp_code}\))? by access-group "?%{DATA:policy_id}"? \[%{DATA:hashcode1}, %{DATA:hashcode2}\]
|
||||
# ASA-4-106100, ASA-4-106102, ASA-4-106103
|
||||
CISCOFW106100_2_3 access-list %{NOTSPACE:policy_id} %{CISCO_ACTION:action} %{WORD:protocol} for user '%{DATA:src_fwuser}' %{DATA:src_interface}/%{IP:src_ip}\(%{INT:src_port}\) -> %{DATA:dst_interface}/%{IP:dst_ip}\(%{INT:dst_port}\) hit-cnt %{INT:hit_count} %{CISCO_INTERVAL:interval} \[%{DATA:hashcode1}, %{DATA:hashcode2}\]
|
||||
# ASA-5-106100
|
||||
CISCOFW106100 access-list %{NOTSPACE:policy_id} %{CISCO_ACTION:action} %{WORD:protocol} %{DATA:src_interface}/%{IP:src_ip}\(%{INT:src_port}\)(\(%{DATA:src_fwuser}\))? -> %{DATA:dst_interface}/%{IP:dst_ip}\(%{INT:dst_port}\)(\(%{DATA:src_fwuser}\))? hit-cnt %{INT:hit_count} %{CISCO_INTERVAL:interval} \[%{DATA:hashcode1}, %{DATA:hashcode2}\]
|
||||
# ASA-6-110002
|
||||
CISCOFW110002 %{CISCO_REASON:reason} for %{WORD:protocol} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port}
|
||||
# ASA-6-302010
|
||||
CISCOFW302010 %{INT:connection_count} in use, %{INT:connection_count_max} most used
|
||||
# ASA-6-302013, ASA-6-302014, ASA-6-302015, ASA-6-302016
|
||||
CISCOFW302013_302014_302015_302016 %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection %{INT:connection_id} for %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port}( \(%{IP:src_mapped_ip}/%{INT:src_mapped_port}\))?(\(%{DATA:src_fwuser}\))? to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}( \(%{IP:dst_mapped_ip}/%{INT:dst_mapped_port}\))?(\(%{DATA:dst_fwuser}\))?( duration %{TIME:duration} bytes %{INT:bytes})?(?: %{CISCO_REASON:reason})?( \(%{DATA:user}\))?
|
||||
# ASA-6-302020, ASA-6-302021
|
||||
CISCOFW302020_302021 %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection for faddr %{IP:dst_ip}/%{INT:icmp_seq_num}(?:\(%{DATA:fwuser}\))? gaddr %{IP:src_xlated_ip}/%{INT:icmp_code_xlated} laddr %{IP:src_ip}/%{INT:icmp_code}( \(%{DATA:user}\))?
|
||||
# ASA-6-305011
|
||||
CISCOFW305011 %{CISCO_ACTION:action} %{CISCO_XLATE_TYPE:xlate_type} %{WORD:protocol} translation from %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? to %{DATA:src_xlated_interface}:%{IP:src_xlated_ip}/%{DATA:src_xlated_port}
|
||||
# ASA-3-313001, ASA-3-313004, ASA-3-313008
|
||||
CISCOFW313001_313004_313008 %{CISCO_ACTION:action} %{WORD:protocol} type=%{INT:icmp_type}, code=%{INT:icmp_code} from %{IP:src_ip} on interface %{DATA:interface}( to %{IP:dst_ip})?
|
||||
# ASA-4-313005
|
||||
CISCOFW313005 %{CISCO_REASON:reason} for %{WORD:protocol} error message: %{WORD:err_protocol} src %{DATA:err_src_interface}:%{IP:err_src_ip}(\(%{DATA:err_src_fwuser}\))? dst %{DATA:err_dst_interface}:%{IP:err_dst_ip}(\(%{DATA:err_dst_fwuser}\))? \(type %{INT:err_icmp_type}, code %{INT:err_icmp_code}\) on %{DATA:interface} interface\. Original IP payload: %{WORD:protocol} src %{IP:orig_src_ip}/%{INT:orig_src_port}(\(%{DATA:orig_src_fwuser}\))? dst %{IP:orig_dst_ip}/%{INT:orig_dst_port}(\(%{DATA:orig_dst_fwuser}\))?
|
||||
# ASA-5-321001
|
||||
CISCOFW321001 Resource '%{WORD:resource_name}' limit of %{POSINT:resource_limit} reached for system
|
||||
# ASA-4-402117
|
||||
CISCOFW402117 %{WORD:protocol}: Received a non-IPSec packet \(protocol= %{WORD:orig_protocol}\) from %{IP:src_ip} to %{IP:dst_ip}
|
||||
# ASA-4-402119
|
||||
CISCOFW402119 %{WORD:protocol}: Received an %{WORD:orig_protocol} packet \(SPI= %{DATA:spi}, sequence number= %{DATA:seq_num}\) from %{IP:src_ip} \(user= %{DATA:user}\) to %{IP:dst_ip} that failed anti-replay checking
|
||||
# ASA-4-419001
|
||||
CISCOFW419001 %{CISCO_ACTION:action} %{WORD:protocol} packet from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}, reason: %{GREEDYDATA:reason}
|
||||
# ASA-4-419002
|
||||
CISCOFW419002 %{CISCO_REASON:reason} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port} with different initial sequence number
|
||||
# ASA-4-500004
|
||||
CISCOFW500004 %{CISCO_REASON:reason} for protocol=%{WORD:protocol}, from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port}
|
||||
# ASA-6-602303, ASA-6-602304
|
||||
CISCOFW602303_602304 %{WORD:protocol}: An %{CISCO_DIRECTION:direction} %{GREEDYDATA:tunnel_type} SA \(SPI= %{DATA:spi}\) between %{IP:src_ip} and %{IP:dst_ip} \(user= %{DATA:user}\) has been %{CISCO_ACTION:action}
|
||||
# ASA-7-710001, ASA-7-710002, ASA-7-710003, ASA-7-710005, ASA-7-710006
|
||||
CISCOFW710001_710002_710003_710005_710006 %{WORD:protocol} (?:request|access) %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}
|
||||
# ASA-6-713172
|
||||
CISCOFW713172 Group = %{GREEDYDATA:group}, IP = %{IP:src_ip}, Automatic NAT Detection Status:\s+Remote end\s*%{DATA:is_remote_natted}\s*behind a NAT device\s+This\s+end\s*%{DATA:is_local_natted}\s*behind a NAT device
|
||||
# ASA-4-733100
|
||||
CISCOFW733100 \[\s*%{DATA:drop_type}\s*\] drop %{DATA:drop_rate_id} exceeded. Current burst rate is %{INT:drop_rate_current_burst} per second, max configured rate is %{INT:drop_rate_max_burst}; Current average rate is %{INT:drop_rate_current_avg} per second, max configured rate is %{INT:drop_rate_max_avg}; Cumulative total count is %{INT:drop_total_count}
|
||||
#== End Cisco ASA ==
|
||||
|
||||
# Shorewall firewall logs
|
||||
SHOREWALL (%{SYSLOGTIMESTAMP:timestamp}) (%{WORD:nf_host}) kernel:.*Shorewall:(%{WORD:nf_action1})?:(%{WORD:nf_action2})?.*IN=(%{USERNAME:nf_in_interface})?.*(OUT= *MAC=(%{COMMONMAC:nf_dst_mac}):(%{COMMONMAC:nf_src_mac})?|OUT=%{USERNAME:nf_out_interface}).*SRC=(%{IPV4:nf_src_ip}).*DST=(%{IPV4:nf_dst_ip}).*LEN=(%{WORD:nf_len}).*?TOS=(%{WORD:nf_tos}).*?PREC=(%{WORD:nf_prec}).*?TTL=(%{INT:nf_ttl}).*?ID=(%{INT:nf_id}).*?PROTO=(%{WORD:nf_protocol}).*?SPT=(%{INT:nf_src_port}?.*DPT=%{INT:nf_dst_port}?.*)
|
||||
#== End Shorewall
|
||||
@@ -1,39 +0,0 @@
|
||||
## These patterns were tested w/ haproxy-1.4.15
|
||||
|
||||
## Documentation of the haproxy log formats can be found at the following links:
|
||||
## http://code.google.com/p/haproxy-docs/wiki/HTTPLogFormat
|
||||
## http://code.google.com/p/haproxy-docs/wiki/TCPLogFormat
|
||||
|
||||
HAPROXYTIME %{HOUR:haproxy_hour}:%{MINUTE:haproxy_minute}(?::%{SECOND:haproxy_second})
|
||||
HAPROXYDATE %{MONTHDAY:haproxy_monthday}/%{MONTH:haproxy_month}/%{YEAR:haproxy_year}:%{HAPROXYTIME:haproxy_time}.%{INT:haproxy_milliseconds}
|
||||
|
||||
# Override these default patterns to parse out what is captured in your haproxy.cfg
|
||||
HAPROXYCAPTUREDREQUESTHEADERS %{DATA:captured_request_headers}
|
||||
HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:captured_response_headers}
|
||||
|
||||
# Example:
|
||||
# These haproxy config lines will add data to the logs that are captured
|
||||
# by the patterns below. Place them in your custom patterns directory to
|
||||
# override the defaults.
|
||||
#
|
||||
# capture request header Host len 40
|
||||
# capture request header X-Forwarded-For len 50
|
||||
# capture request header Accept-Language len 50
|
||||
# capture request header Referer len 200
|
||||
# capture request header User-Agent len 200
|
||||
#
|
||||
# capture response header Content-Type len 30
|
||||
# capture response header Content-Encoding len 10
|
||||
# capture response header Cache-Control len 200
|
||||
# capture response header Last-Modified len 200
|
||||
#
|
||||
# HAPROXYCAPTUREDREQUESTHEADERS %{DATA:request_header_host}\|%{DATA:request_header_x_forwarded_for}\|%{DATA:request_header_accept_language}\|%{DATA:request_header_referer}\|%{DATA:request_header_user_agent}
|
||||
# HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:response_header_content_type}\|%{DATA:response_header_content_encoding}\|%{DATA:response_header_cache_control}\|%{DATA:response_header_last_modified}
|
||||
|
||||
# parse a haproxy 'httplog' line
|
||||
HAPROXYHTTPBASE %{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_request}/%{INT:time_queue}/%{INT:time_backend_connect}/%{INT:time_backend_response}/%{NOTSPACE:time_duration} %{INT:http_status_code} %{NOTSPACE:bytes_read} %{DATA:captured_request_cookie} %{DATA:captured_response_cookie} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue} (\{%{HAPROXYCAPTUREDREQUESTHEADERS}\})?( )?(\{%{HAPROXYCAPTUREDRESPONSEHEADERS}\})?( )?"(<BADREQ>|(%{WORD:http_verb} (%{URIPROTO:http_proto}://)?(?:%{USER:http_user}(?::[^@]*)?@)?(?:%{URIHOST:http_host})?(?:%{URIPATHPARAM:http_request})?( HTTP/%{NUMBER:http_version})?))?"
|
||||
|
||||
HAPROXYHTTP (?:%{SYSLOGTIMESTAMP:syslog_timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{HAPROXYHTTPBASE}
|
||||
|
||||
# parse a haproxy 'tcplog' line
|
||||
HAPROXYTCP (?:%{SYSLOGTIMESTAMP:syslog_timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_queue}/%{INT:time_backend_connect}/%{NOTSPACE:time_duration} %{NOTSPACE:bytes_read} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue}
|
||||
@@ -1,20 +0,0 @@
|
||||
JAVACLASS (?:[a-zA-Z$_][a-zA-Z$_0-9]*\.)*[a-zA-Z$_][a-zA-Z$_0-9]*
|
||||
#Space is an allowed character to match special cases like 'Native Method' or 'Unknown Source'
|
||||
JAVAFILE (?:[A-Za-z0-9_. -]+)
|
||||
#Allow special <init> method
|
||||
JAVAMETHOD (?:(<init>)|[a-zA-Z$_][a-zA-Z$_0-9]*)
|
||||
#Line number is optional in special cases 'Native method' or 'Unknown source'
|
||||
JAVASTACKTRACEPART %{SPACE}at %{JAVACLASS:class}\.%{JAVAMETHOD:method}\(%{JAVAFILE:file}(?::%{NUMBER:line})?\)
|
||||
# Java Logs
|
||||
JAVATHREAD (?:[A-Z]{2}-Processor[\d]+)
|
||||
##JAVACLASS (?:[a-zA-Z0-9-]+\.)+[A-Za-z0-9$]+
|
||||
##JAVAFILE (?:[A-Za-z0-9_.-]+)
|
||||
##JAVASTACKTRACEPART at %{JAVACLASS:class}\.%{WORD:method}\(%{JAVAFILE:file}:%{NUMBER:line}\)
|
||||
JAVALOGMESSAGE (.*)
|
||||
# MMM dd, yyyy HH:mm:ss eg: Jan 9, 2014 7:13:13 AM
|
||||
CATALINA_DATESTAMP %{MONTH} %{MONTHDAY}, 20%{YEAR} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) (?:AM|PM)
|
||||
# yyyy-MM-dd HH:mm:ss,SSS ZZZ eg: 2014-01-09 17:32:25,527 -0800
|
||||
TOMCAT_DATESTAMP 20%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) %{ISO8601_TIMEZONE}
|
||||
CATALINALOG %{CATALINA_DATESTAMP:timestamp} %{JAVACLASS:class} %{JAVALOGMESSAGE:logmessage}
|
||||
# 2014-01-09 20:03:28,269 -0800 | ERROR | com.example.service.ExampleService - something compeletely unexpected happened...
|
||||
TOMCATLOG %{TOMCAT_DATESTAMP:timestamp} \| %{LOGLEVEL:level} \| %{JAVACLASS:class} - %{JAVALOGMESSAGE:logmessage}
|
||||
@@ -1,8 +0,0 @@
|
||||
# JUNOS 11.4 RT_FLOW patterns
|
||||
RT_FLOW_EVENT (RT_FLOW_SESSION_CREATE|RT_FLOW_SESSION_CLOSE|RT_FLOW_SESSION_DENY)
|
||||
|
||||
RT_FLOW1 %{RT_FLOW_EVENT:event}: %{GREEDYDATA:close-reason}: %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{IP:nat-src-ip}/%{INT:nat-src-port}->%{IP:nat-dst-ip}/%{INT:nat-dst-port} %{DATA:src-nat-rule-name} %{DATA:dst-nat-rule-name} %{INT:protocol-id} %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} %{INT:session-id} \d+\(%{DATA:sent}\) \d+\(%{DATA:received}\) %{INT:elapsed-time} .*
|
||||
|
||||
RT_FLOW2 %{RT_FLOW_EVENT:event}: session created %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{IP:nat-src-ip}/%{INT:nat-src-port}->%{IP:nat-dst-ip}/%{INT:nat-dst-port} %{DATA:src-nat-rule-name} %{DATA:dst-nat-rule-name} %{INT:protocol-id} %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} %{INT:session-id} .*
|
||||
|
||||
RT_FLOW3 %{RT_FLOW_EVENT:event}: session denied %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{INT:protocol-id}\(\d\) %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} .*
|
||||
@@ -1,16 +0,0 @@
|
||||
SYSLOG5424PRINTASCII [!-~]+
|
||||
|
||||
SYSLOGBASE2 (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource}+(?: %{SYSLOGPROG}:|)
|
||||
SYSLOGPAMSESSION %{SYSLOGBASE} %{GREEDYDATA:message}%{WORD:pam_module}\(%{DATA:pam_caller}\): session %{WORD:pam_session_state} for user %{USERNAME:username}(?: by %{GREEDYDATA:pam_by})?
|
||||
|
||||
CRON_ACTION [A-Z ]+
|
||||
CRONLOG %{SYSLOGBASE} \(%{USER:user}\) %{CRON_ACTION:action} \(%{DATA:message}\)
|
||||
|
||||
SYSLOGLINE %{SYSLOGBASE2} %{GREEDYDATA:message}
|
||||
|
||||
# IETF 5424 syslog(8) format (see http://www.rfc-editor.org/info/rfc5424)
|
||||
SYSLOG5424PRI <%{NONNEGINT:syslog5424_pri}>
|
||||
SYSLOG5424SD \[%{DATA}\]+
|
||||
SYSLOG5424BASE %{SYSLOG5424PRI}%{NONNEGINT:syslog5424_ver} +(?:%{TIMESTAMP_ISO8601:syslog5424_ts}|-) +(?:%{HOSTNAME:syslog5424_host}|-) +(-|%{SYSLOG5424PRINTASCII:syslog5424_app}) +(-|%{SYSLOG5424PRINTASCII:syslog5424_proc}) +(-|%{SYSLOG5424PRINTASCII:syslog5424_msgid}) +(?:%{SYSLOG5424SD:syslog5424_sd}|-|)
|
||||
|
||||
SYSLOG5424LINE %{SYSLOG5424BASE} +%{GREEDYDATA:syslog5424_msg}
|
||||
@@ -1,4 +0,0 @@
|
||||
# Remember, these can be multi-line events.
|
||||
MCOLLECTIVE ., \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\]%{SPACE}%{LOGLEVEL:event_level}
|
||||
|
||||
MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}:
|
||||
@@ -1,18 +0,0 @@
|
||||
APACHEERRORTIME %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}
|
||||
APACHEERRORPREFIX \[%{APACHEERRORTIME:timestamp}\] \[%{NOTSPACE:apacheseverity}\] (\[pid %{INT}:tid %{INT}\] )?\[client %{IPORHOST:sourcehost}(:%{INT:source_port})?\] (\[client %{IPORHOST}\])?
|
||||
GENERICAPACHEERROR %{APACHEERRORPREFIX} %{GREEDYDATA:message}
|
||||
MODSECPREFIX %{APACHEERRORPREFIX} ModSecurity: %{NOTSPACE:modsecseverity}\. %{GREEDYDATA:modsecmessage}
|
||||
MODSECRULEFILE \[file %{QUOTEDSTRING:rulefile}\]
|
||||
MODSECRULELINE \[line %{QUOTEDSTRING:ruleline}\]
|
||||
MODSECMATCHOFFSET \[offset %{QUOTEDSTRING:matchoffset}\]
|
||||
MODSECRULEID \[id %{QUOTEDSTRING:ruleid}\]
|
||||
MODSECRULEREV \[rev %{QUOTEDSTRING:rulerev}\]
|
||||
MODSECRULEMSG \[msg %{QUOTEDSTRING:rulemessage}\]
|
||||
MODSECRULEDATA \[data %{QUOTEDSTRING:ruledata}\]
|
||||
MODSECRULESEVERITY \[severity ["']%{WORD:ruleseverity}["']\]
|
||||
MODSECRULEVERS \[ver "[^"]+"\]
|
||||
MODSECRULETAGS (?:\[tag %{QUOTEDSTRING:ruletag0}\] )?(?:\[tag %{QUOTEDSTRING:ruletag1}\] )?(?:\[tag %{QUOTEDSTRING:ruletag2}\] )?(?:\[tag %{QUOTEDSTRING:ruletag3}\] )?(?:\[tag %{QUOTEDSTRING:ruletag4}\] )?(?:\[tag %{QUOTEDSTRING:ruletag5}\] )?(?:\[tag %{QUOTEDSTRING:ruletag6}\] )?(?:\[tag %{QUOTEDSTRING:ruletag7}\] )?(?:\[tag %{QUOTEDSTRING:ruletag8}\] )?(?:\[tag %{QUOTEDSTRING:ruletag9}\] )?(?:\[tag %{QUOTEDSTRING}\] )*
|
||||
MODSECHOSTNAME \[hostname ['"]%{DATA:targethost}["']\]
|
||||
MODSECURI \[uri ["']%{DATA:targeturi}["']\]
|
||||
MODSECUID \[unique_id %{QUOTEDSTRING:uniqueid}\]
|
||||
MODSECAPACHEERROR %{MODSECPREFIX} %{MODSECRULEFILE} %{MODSECRULELINE} (?:%{MODSECMATCHOFFSET} )?(?:%{MODSECRULEID} )?(?:%{MODSECRULEREV} )?(?:%{MODSECRULEMSG} )?(?:%{MODSECRULEDATA} )?(?:%{MODSECRULESEVERITY} )?(?:%{MODSECRULEVERS} )?%{MODSECRULETAGS}%{MODSECHOSTNAME} %{MODSECURI} %{MODSECUID}
|
||||
@@ -1,7 +0,0 @@
|
||||
MONGO_LOG %{SYSLOGTIMESTAMP:timestamp} \[%{WORD:component}\] %{GREEDYDATA:message}
|
||||
MONGO_QUERY \{ \{ .* \} ntoreturn: \}
|
||||
MONGO_WORDDASH \b[\w-]+\b
|
||||
MONGO_SLOWQUERY %{WORD} %{MONGO_WORDDASH:database}\.%{MONGO_WORDDASH:collection} %{WORD}: %{MONGO_QUERY:query} %{WORD}:%{NONNEGINT:ntoreturn} %{WORD}:%{NONNEGINT:ntoskip} %{WORD}:%{NONNEGINT:nscanned}.*nreturned:%{NONNEGINT:nreturned}..+ %{POSINT:duration}ms
|
||||
MONGO3_SEVERITY \w
|
||||
MONGO3_COMPONENT %{WORD}|-
|
||||
MONGO3_LOG %{TIMESTAMP_ISO8601:timestamp} %{MONGO3_SEVERITY:severity} %{MONGO3_COMPONENT:component}%{SPACE}(?:\[%{DATA:context}\])? %{GREEDYDATA:message}
|
||||
@@ -1 +0,0 @@
|
||||
MYSQL_AUTH_FAIL %{TIMESTAMP_ISO8601:time} %{NUMBER} \[Note\] Access denied for user '%{DATA:user}'@'%{IP:source_ip}' \(using password: %{WORD:using_password}\)
|
||||
@@ -1,124 +0,0 @@
|
||||
##################################################################################
|
||||
##################################################################################
|
||||
# Chop Nagios log files to smithereens!
|
||||
#
|
||||
# A set of GROK filters to process logfiles generated by Nagios.
|
||||
# While it does not, this set intends to cover all possible Nagios logs.
|
||||
#
|
||||
# Some more work needs to be done to cover all External Commands:
|
||||
# http://old.nagios.org/developerinfo/externalcommands/commandlist.php
|
||||
#
|
||||
# If you need some support on these rules please contact:
|
||||
# Jelle Smet http://smetj.net
|
||||
#
|
||||
#################################################################################
|
||||
#################################################################################
|
||||
|
||||
NAGIOSTIME \[%{NUMBER:nagios_epoch}\]
|
||||
|
||||
###############################################
|
||||
######## Begin nagios log types
|
||||
###############################################
|
||||
NAGIOS_TYPE_CURRENT_SERVICE_STATE CURRENT SERVICE STATE
|
||||
NAGIOS_TYPE_CURRENT_HOST_STATE CURRENT HOST STATE
|
||||
|
||||
NAGIOS_TYPE_SERVICE_NOTIFICATION SERVICE NOTIFICATION
|
||||
NAGIOS_TYPE_HOST_NOTIFICATION HOST NOTIFICATION
|
||||
|
||||
NAGIOS_TYPE_SERVICE_ALERT SERVICE ALERT
|
||||
NAGIOS_TYPE_HOST_ALERT HOST ALERT
|
||||
|
||||
NAGIOS_TYPE_SERVICE_FLAPPING_ALERT SERVICE FLAPPING ALERT
|
||||
NAGIOS_TYPE_HOST_FLAPPING_ALERT HOST FLAPPING ALERT
|
||||
|
||||
NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT SERVICE DOWNTIME ALERT
|
||||
NAGIOS_TYPE_HOST_DOWNTIME_ALERT HOST DOWNTIME ALERT
|
||||
|
||||
NAGIOS_TYPE_PASSIVE_SERVICE_CHECK PASSIVE SERVICE CHECK
|
||||
NAGIOS_TYPE_PASSIVE_HOST_CHECK PASSIVE HOST CHECK
|
||||
|
||||
NAGIOS_TYPE_SERVICE_EVENT_HANDLER SERVICE EVENT HANDLER
|
||||
NAGIOS_TYPE_HOST_EVENT_HANDLER HOST EVENT HANDLER
|
||||
|
||||
NAGIOS_TYPE_EXTERNAL_COMMAND EXTERNAL COMMAND
|
||||
NAGIOS_TYPE_TIMEPERIOD_TRANSITION TIMEPERIOD TRANSITION
|
||||
###############################################
|
||||
######## End nagios log types
|
||||
###############################################
|
||||
|
||||
###############################################
|
||||
######## Begin external check types
|
||||
###############################################
|
||||
NAGIOS_EC_DISABLE_SVC_CHECK DISABLE_SVC_CHECK
|
||||
NAGIOS_EC_ENABLE_SVC_CHECK ENABLE_SVC_CHECK
|
||||
NAGIOS_EC_DISABLE_HOST_CHECK DISABLE_HOST_CHECK
|
||||
NAGIOS_EC_ENABLE_HOST_CHECK ENABLE_HOST_CHECK
|
||||
NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT PROCESS_SERVICE_CHECK_RESULT
|
||||
NAGIOS_EC_PROCESS_HOST_CHECK_RESULT PROCESS_HOST_CHECK_RESULT
|
||||
NAGIOS_EC_SCHEDULE_SERVICE_DOWNTIME SCHEDULE_SERVICE_DOWNTIME
|
||||
NAGIOS_EC_SCHEDULE_HOST_DOWNTIME SCHEDULE_HOST_DOWNTIME
|
||||
NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS DISABLE_HOST_SVC_NOTIFICATIONS
|
||||
NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS ENABLE_HOST_SVC_NOTIFICATIONS
|
||||
NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS DISABLE_HOST_NOTIFICATIONS
|
||||
NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS ENABLE_HOST_NOTIFICATIONS
|
||||
NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS DISABLE_SVC_NOTIFICATIONS
|
||||
NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS ENABLE_SVC_NOTIFICATIONS
|
||||
###############################################
|
||||
######## End external check types
|
||||
###############################################
|
||||
NAGIOS_WARNING Warning:%{SPACE}%{GREEDYDATA:nagios_message}
|
||||
|
||||
NAGIOS_CURRENT_SERVICE_STATE %{NAGIOS_TYPE_CURRENT_SERVICE_STATE:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statetype};%{DATA:nagios_statecode};%{GREEDYDATA:nagios_message}
|
||||
NAGIOS_CURRENT_HOST_STATE %{NAGIOS_TYPE_CURRENT_HOST_STATE:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statetype};%{DATA:nagios_statecode};%{GREEDYDATA:nagios_message}
|
||||
|
||||
NAGIOS_SERVICE_NOTIFICATION %{NAGIOS_TYPE_SERVICE_NOTIFICATION:nagios_type}: %{DATA:nagios_notifyname};%{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_contact};%{GREEDYDATA:nagios_message}
|
||||
NAGIOS_HOST_NOTIFICATION %{NAGIOS_TYPE_HOST_NOTIFICATION:nagios_type}: %{DATA:nagios_notifyname};%{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_contact};%{GREEDYDATA:nagios_message}
|
||||
|
||||
NAGIOS_SERVICE_ALERT %{NAGIOS_TYPE_SERVICE_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{NUMBER:nagios_attempt};%{GREEDYDATA:nagios_message}
|
||||
NAGIOS_HOST_ALERT %{NAGIOS_TYPE_HOST_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{NUMBER:nagios_attempt};%{GREEDYDATA:nagios_message}
|
||||
|
||||
NAGIOS_SERVICE_FLAPPING_ALERT %{NAGIOS_TYPE_SERVICE_FLAPPING_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_message}
|
||||
NAGIOS_HOST_FLAPPING_ALERT %{NAGIOS_TYPE_HOST_FLAPPING_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_message}
|
||||
|
||||
NAGIOS_SERVICE_DOWNTIME_ALERT %{NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment}
|
||||
NAGIOS_HOST_DOWNTIME_ALERT %{NAGIOS_TYPE_HOST_DOWNTIME_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment}
|
||||
|
||||
NAGIOS_PASSIVE_SERVICE_CHECK %{NAGIOS_TYPE_PASSIVE_SERVICE_CHECK:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment}
|
||||
NAGIOS_PASSIVE_HOST_CHECK %{NAGIOS_TYPE_PASSIVE_HOST_CHECK:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment}
|
||||
|
||||
NAGIOS_SERVICE_EVENT_HANDLER %{NAGIOS_TYPE_SERVICE_EVENT_HANDLER:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{DATA:nagios_event_handler_name}
|
||||
NAGIOS_HOST_EVENT_HANDLER %{NAGIOS_TYPE_HOST_EVENT_HANDLER:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{DATA:nagios_event_handler_name}
|
||||
|
||||
NAGIOS_TIMEPERIOD_TRANSITION %{NAGIOS_TYPE_TIMEPERIOD_TRANSITION:nagios_type}: %{DATA:nagios_service};%{DATA:nagios_unknown1};%{DATA:nagios_unknown2}
|
||||
|
||||
####################
|
||||
#### External checks
|
||||
####################
|
||||
|
||||
#Disable host & service check
|
||||
NAGIOS_EC_LINE_DISABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_SVC_CHECK:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service}
|
||||
NAGIOS_EC_LINE_DISABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_CHECK:nagios_command};%{DATA:nagios_hostname}
|
||||
|
||||
#Enable host & service check
|
||||
NAGIOS_EC_LINE_ENABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_SVC_CHECK:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service}
|
||||
NAGIOS_EC_LINE_ENABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_CHECK:nagios_command};%{DATA:nagios_hostname}
|
||||
|
||||
#Process host & service check
|
||||
NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_check_result}
|
||||
NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_PROCESS_HOST_CHECK_RESULT:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_check_result}
|
||||
|
||||
#Disable host & service notifications
|
||||
NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname}
|
||||
NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname}
|
||||
NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS:nagios_command};%{DATA:nagios_hostname};%{GREEDYDATA:nagios_service}
|
||||
|
||||
#Enable host & service notifications
|
||||
NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname}
|
||||
NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname}
|
||||
NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS:nagios_command};%{DATA:nagios_hostname};%{GREEDYDATA:nagios_service}
|
||||
|
||||
#Schedule host & service downtime
|
||||
NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_SCHEDULE_HOST_DOWNTIME:nagios_command};%{DATA:nagios_hostname};%{NUMBER:nagios_start_time};%{NUMBER:nagios_end_time};%{NUMBER:nagios_fixed};%{NUMBER:nagios_trigger_id};%{NUMBER:nagios_duration};%{DATA:author};%{DATA:comment}
|
||||
|
||||
#End matching line
|
||||
NAGIOSLOGLINE %{NAGIOSTIME} (?:%{NAGIOS_WARNING}|%{NAGIOS_CURRENT_SERVICE_STATE}|%{NAGIOS_CURRENT_HOST_STATE}|%{NAGIOS_SERVICE_NOTIFICATION}|%{NAGIOS_HOST_NOTIFICATION}|%{NAGIOS_SERVICE_ALERT}|%{NAGIOS_HOST_ALERT}|%{NAGIOS_SERVICE_FLAPPING_ALERT}|%{NAGIOS_HOST_FLAPPING_ALERT}|%{NAGIOS_SERVICE_DOWNTIME_ALERT}|%{NAGIOS_HOST_DOWNTIME_ALERT}|%{NAGIOS_PASSIVE_SERVICE_CHECK}|%{NAGIOS_PASSIVE_HOST_CHECK}|%{NAGIOS_SERVICE_EVENT_HANDLER}|%{NAGIOS_HOST_EVENT_HANDLER}|%{NAGIOS_TIMEPERIOD_TRANSITION}|%{NAGIOS_EC_LINE_DISABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_ENABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_DISABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_ENABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT}|%{NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT}|%{NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME}|%{NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS})
|
||||
@@ -1,19 +0,0 @@
|
||||
NGUSERNAME [a-zA-Z\.\@\-\+_%]+
|
||||
NGUSER %{NGUSERNAME}
|
||||
|
||||
# '$remote_addr - $remote_user [$time_local] '
|
||||
# '"$request" $status $body_bytes_sent '
|
||||
# '"$http_referer" "$http_user_agent"';
|
||||
|
||||
# 127.0.0.1 - - [28/Jan/2016:14:19:36 +0300] "GET /zero.html HTTP/1.1" 200 398 "-" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"
|
||||
|
||||
NOTDQUOTE [^"]*
|
||||
DAY2 \d{2}
|
||||
|
||||
#NGINXERRTIME %{YEAR:year}/%{MONTHNUM2:month}/%{DAY2:day} %{HOUR:hour}:%{MINUTE:minute}:%{SECOND:second}
|
||||
NGINXERRTIME %{YEAR}/%{MONTHNUM2}/%{DAY2} %{HOUR}:%{MINUTE}:%{SECOND}
|
||||
|
||||
NGINXACCESS %{IPORHOST:remote_addr} - %{NGUSER:remote_user} \[%{HTTPDATE:time_local}\] "%{WORD:method} %{URIPATHPARAM:request} HTTP/%{NUMBER:http_version}" %{NUMBER:status} %{NUMBER:body_bytes_sent} "%{NOTDQUOTE:http_referer}" "%{NOTDQUOTE:http_user_agent}"
|
||||
|
||||
# YYYY/MM/DD HH:MM:SS [LEVEL] PID#TID: *CID MESSAGE
|
||||
NGINXERROR %{NGINXERRTIME:time} \[%{LOGLEVEL:loglevel}\] %{NONNEGINT:pid}#%{NONNEGINT:tid}: (\*%{NONNEGINT:cid} )?%{GREEDYDATA:message}
|
||||
@@ -1,14 +0,0 @@
|
||||
|
||||
#DIR ^.*/
|
||||
#FILE [^/].*$
|
||||
|
||||
#URI_SPLIT ^%{GREEDYDATA:request}\?%{GREEDYDATA:http_args}$
|
||||
#FULLPATH_SPLITTER %{DIR:prefix_directory}%{FILE:file_name}
|
||||
|
||||
|
||||
NAXSI_FMT ^NAXSI_FMT: ip=%{IPORHOST:src_ip}&server=%{IPORHOST:target_ip}&uri=%{PATH:http_path}&learning=\d&vers=%{DATA:naxsi_version}&total_processed=\d+&total_blocked=\d+&block=\d+(&cscore\d=%{WORD:score_label}&score\d=%{INT:score})+&zone0=%{WORD:zone}
|
||||
#^NAXSI_FMT: ip=%{IPORHOST:src_ip}&server=%{IPORHOST:target_ip}&uri=%{PATH:http_path}&learning=\d&vers=%{DATA:naxsi_version}&total_processed=\d+&total_blocked=\d+&block=\d+(&cscore\d=%{WORD:score_label}&score\d=%{INT:score})+&cscore2
|
||||
#^NAXSI_FMT: ip=%{IPORHOST:src_ip}&server=%{IPORHOST:target_ip}&uri=%{PATH:http_path}(&cscore\d=%{WORD:score_label}&score\d=%{INT:score})+&cscore2
|
||||
#^NAXSI_FMT: ip=%{IPORHOST:src_ip}&server=%{IPORHOST:target_ip}&uri=%{PATH:http_path}&learning=\d&vers=%{DATA:naxsi_version}&total_processed=\d+&total_blocked=\d+&block=\d+(&cscore\d=%{WORD:score_label}&score\d=%{INT:score})+&cscore2
|
||||
|
||||
NAXSI_EXLOG ^NAXSI_EXLOG: ip=%{IPORHOST:naxsi_src_ip}&server=%{IPORHOST:naxsi_dst_ip}&uri=%{PATH:http_path}&id=%{INT:naxsi_id}&zone=%{WORD:naxsi_zone}&var_name=%{DATA:naxsi_var_name}&content=
|
||||
@@ -1,2 +0,0 @@
|
||||
# Default postgresql pg_log format pattern
|
||||
POSTGRESQL %{DATESTAMP:timestamp} %{TZ} %{DATA:user_id} %{GREEDYDATA:connection_id} %{POSINT:pid}
|
||||
@@ -1,18 +0,0 @@
|
||||
RUUID \s{32}
|
||||
# rails controller with action
|
||||
RAILS_CONSTROLLER [^#]+
|
||||
RAIL_ACTION \w+
|
||||
RCONTROLLER %{RAILS_CONSTROLLER:controller}#%{RAIL_ACTION:action}
|
||||
|
||||
# this will often be the only line:
|
||||
RAILS_TIMESTAMP %{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND} %{ISO8601_TIMEZONE}
|
||||
RAILS3HEAD (?m)Started %{WORD:verb} "%{URIPATHPARAM:request}" for %{IPORHOST:clientip} at %{RAILS_TIMESTAMP:timestamp}
|
||||
# for some a strange reason, params are stripped of {} - not sure that's a good idea.
|
||||
RPROCESSING \W*Processing by %{RCONTROLLER} as %{NOTSPACE:format}(?:\W*Parameters: {%{DATA:params}}\W*)?
|
||||
RAILS3PROFILE (?:\(Views: %{NUMBER:viewms}ms \| ActiveRecord: %{NUMBER:activerecordms}ms|\(ActiveRecord: %{NUMBER:activerecordms}ms)?
|
||||
RAILS3FOOT Completed %{NUMBER:response}%{DATA} in %{NUMBER:totalms}ms %{RAILS3PROFILE}%{GREEDYDATA}
|
||||
|
||||
RAILS_CONTEXT (?:%{DATA}\n)*
|
||||
|
||||
# putting it all together
|
||||
RAILS3 %{RAILS3HEAD}(?:%{RPROCESSING})?%{RAILS_CONTEXT:context}(?:%{RAILS3FOOT})?
|
||||
@@ -1,21 +0,0 @@
|
||||
|
||||
#
|
||||
# Format 1:
|
||||
#
|
||||
# [43569] 27 Aug 12:38:58.471 * RDB: 12 MB of memory used by copy-on-write
|
||||
#
|
||||
|
||||
#
|
||||
# Format 2:
|
||||
#
|
||||
# 31493:M 17 Sep 09:02:54.807 # Server started, Redis version 3.0.2
|
||||
# 31493:M 17 Sep 09:02:54.807 # WARNING overcommit_memory is set to 0! Background save may fail under low memory condition. To fix this issue add 'vm.overcommit_memory = 1' to /etc/sysctl.conf and then reboot or run the command 'sysctl vm$
|
||||
# 31493:M 17 Sep 09:02:54.807 # WARNING: The TCP backlog setting of 511 cannot be enforced because /proc/sys/net/core/somaxconn is set to the lower value of 128.
|
||||
# 31493:M 17 Sep 09:02:54.807 * DB loaded from disk: 0.000 seconds
|
||||
# 31493:M 17 Sep 09:02:54.807 * The server is now ready to accept connections on port 6379
|
||||
#
|
||||
|
||||
REDISTIMESTAMP %{MONTHDAY} %{MONTH} %{TIME}
|
||||
REDISLOG \[%{POSINT:pid}\] %{REDISTIMESTAMP:time} \*\s
|
||||
REDISLOG1 %{REDISLOG}
|
||||
REDISLOG2 %{POSINT:pid}:M %{REDISTIMESTAMP:time} [*#] %{GREEDYDATA:message}
|
||||
@@ -1,2 +0,0 @@
|
||||
RUBY_LOGLEVEL DEBUG|FATAL|ERROR|WARN|INFO
|
||||
RUBY_LOGGER [DFEWI], \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\] *%{RUBY_LOGLEVEL:loglevel} -- +%{DATA:progname}: %{GREEDYDATA:message}
|
||||
@@ -1 +0,0 @@
|
||||
SMB_AUTH_FAIL Auth:%{GREEDYDATA} user \[%{DATA:smb_domain}\]\\\[%{DATA:user}\]%{GREEDYDATA} status \[NT_STATUS_NO_SUCH_USER\]%{GREEDYDATA} remote host \[ipv4:%{IP:ip_source}
|
||||
@@ -1,61 +0,0 @@
|
||||
# sshd grok pattern
|
||||
|
||||
# Start/Stop
|
||||
SSHD_LISTEN Server listening on %{IP:sshd_listen_ip} port %{NUMBER:sshd_listen_port}.
|
||||
SSHD_TERMINATE Received signal %{NUMBER:sshd_signal}; terminating.
|
||||
|
||||
# SSH Tunnel
|
||||
SSHD_TUNN_ERR1 error: connect_to %{IP:sshd_listen_ip} port %{NUMBER:sshd_listen_port}: failed.
|
||||
SSHD_TUNN_ERR2 error: channel_setup_fwd_listener: cannot listen to port: %{NUMBER:sshd_listen_port}
|
||||
SSHD_TUNN_ERR3 error: bind: Address already in use
|
||||
SSHD_TUNN_ERR4 error: channel_setup_fwd_listener_tcpip: cannot listen to port: %{NUMBER:sshd_listen_port}
|
||||
SSHD_TUNN_TIMEOUT Timeout, client not responding.
|
||||
|
||||
# Normal
|
||||
SSHD_SUCCESS Accepted %{WORD:sshd_auth_type} for %{USERNAME:sshd_user} from %{IP:sshd_client_ip} port %{NUMBER:sshd_port} %{WORD:sshd_protocol}: %{GREEDYDATA:sshd_cipher}
|
||||
SSHD_DISCONNECT Received disconnect from %{IP:sshd_client_ip} port %{NUMBER:sshd_port}:%{NUMBER:sshd_disconnect_code}: %{GREEDYDATA:sshd_disconnect_status}
|
||||
SSHD_CONN_CLOSE Connection closed by %{IP:sshd_client_ip}$
|
||||
SSHD_SESSION_OPEN pam_unix\(sshd:session\): session opened for user %{USERNAME:sshd_user} by \(uid=\d+\)
|
||||
SSHD_SESSION_CLOSE pam_unix\(sshd:session\): session closed for user %{USERNAME:sshd_user}
|
||||
SSHD_SESSION_FAIL pam_systemd\(sshd:session\): Failed to release session: %{GREEDYDATA:sshd_disconnect_status}
|
||||
SSHD_LOGOUT_ERR syslogin_perform_logout: logout\(\) returned an error
|
||||
|
||||
# Probe
|
||||
SSHD_REFUSE_CONN refused connect from %{DATA:sshd_client_hostname} \(%{IPORHOST:sshd_client_ip}\)
|
||||
SSHD_TCPWRAP_FAIL1 warning: %{DATA:sshd_tcpd_file}, line %{NUMBER}: can't verify hostname: getaddrinfo\(%{DATA:sshd_paranoid_hostname}, %{DATA:sshd_sa_family}\) failed
|
||||
SSHD_TCPWRAP_FAIL2 warning: %{DATA:sshd_tcpd_file}, line %{NUMBER}: host name/address mismatch: %{IPORHOST:sshd_client_ip} != %{HOSTNAME:sshd_paranoid_hostname}
|
||||
SSHD_TCPWRAP_FAIL3 warning: %{DATA:sshd_tcpd_file}, line %{NUMBER}: host name/name mismatch: %{HOSTNAME:sshd_paranoid_hostname_1} != %{HOSTNAME:sshd_paranoid_hostname_2}
|
||||
SSHD_TCPWRAP_FAIL4 warning: %{DATA:sshd_tcpd_file}, line %{NUMBER}: host name/name mismatch: reverse lookup results in non-FQDN %{HOSTNAME:sshd_paranoid_hostname}
|
||||
SSHD_TCPWRAP_FAIL5 warning: can't get client address: Connection reset by peer
|
||||
SSHD_FAIL Failed %{WORD:sshd_auth_type} for %{USERNAME:sshd_invalid_user} from %{IP:sshd_client_ip} port %{NUMBER:sshd_port} %{WORD:sshd_protocol}
|
||||
SSHD_USER_FAIL Failed password for invalid user %{USERNAME:sshd_invalid_user} from %{IP:sshd_client_ip} port %{NUMBER:sshd_port} %{WORD:sshd_protocol}
|
||||
SSHD_INVAL_USER Invalid user\s*%{USERNAME:sshd_invalid_user}? from %{IP:sshd_client_ip}
|
||||
|
||||
# preauth
|
||||
SSHD_DISC_PREAUTH Disconnected from %{IP:sshd_client_ip} port %{NUMBER:sshd_port}\s*(?:\[%{GREEDYDATA:sshd_privsep}\]|)
|
||||
SSHD_MAXE_PREAUTH error: maximum authentication attempts exceeded for (?:invalid user |)%{USERNAME:sshd_invalid_user} from %{IP:sshd_client_ip} port %{NUMBER:sshd_port} %{WORD:sshd_protocol}\s*(?:\[%{GREEDYDATA:sshd_privsep}\]|)
|
||||
SSHD_DISR_PREAUTH Disconnecting: %{GREEDYDATA:sshd_disconnect_status} \[%{GREEDYDATA:sshd_privsep}\]
|
||||
SSHD_INVA_PREAUTH input_userauth_request: invalid user %{USERNAME:sshd_invalid_user}?\s*(?:\[%{GREEDYDATA:sshd_privsep}\]|)
|
||||
SSHD_REST_PREAUTH Connection reset by %{IP:sshd_client_ip} port %{NUMBER:sshd_port}\s*(?:\[%{GREEDYDATA:sshd_privsep}\]|)
|
||||
SSHD_CLOS_PREAUTH Connection closed by %{IP:sshd_client_ip} port %{NUMBER:sshd_port}\s*(?:\[%{GREEDYDATA:sshd_privsep}\]|)
|
||||
SSHD_FAIL_PREAUTH fatal: Unable to negotiate with %{IP:sshd_client_ip} port %{NUMBER:sshd_port}:\s*%{GREEDYDATA:sshd_disconnect_status}? \[%{GREEDYDATA:sshd_privsep}\]
|
||||
SSHD_FAI2_PREAUTH fatal: %{GREEDYDATA:sshd_fatal_status}: Connection from %{IP:sshd_client_ip} port %{NUMBER:sshd_port}:\s*%{GREEDYDATA:sshd_disconnect_status}? \[%{GREEDYDATA:sshd_privsep}\]
|
||||
SSHD_BADL_PREAUTH Bad packet length %{NUMBER:sshd_packet_length}. \[%{GREEDYDATA:sshd_privsep}\]
|
||||
|
||||
# Corrupted
|
||||
SSHD_IDENT_FAIL Did not receive identification string from %{IP:sshd_client_ip}
|
||||
SSHD_MAPB_FAIL Address %{IP:sshd_client_ip} maps to %{HOSTNAME:sshd_client_hostname}, but this does not map back to the address - POSSIBLE BREAK-IN ATTEMPT!
|
||||
SSHD_RMAP_FAIL reverse mapping checking getaddrinfo for %{HOSTNAME:sshd_client_hostname} \[%{IP:sshd_client_ip}\] failed - POSSIBLE BREAK-IN ATTEMPT!
|
||||
SSHD_TOOMANY_AUTH Disconnecting: Too many authentication failures for %{USERNAME:sshd_invalid_user}
|
||||
SSHD_CORRUPT_MAC Corrupted MAC on input
|
||||
SSHD_PACKET_CORRUPT Disconnecting: Packet corrupt
|
||||
SSHD_BAD_VERSION Bad protocol version identification '%{GREEDYDATA}' from %{IP:sshd_client_ip}
|
||||
|
||||
####
|
||||
SSHD_INIT %{SSHD_LISTEN}|%{SSHD_TERMINATE}
|
||||
SSHD_TUNN %{SSHD_TUNN_ERR1}|%{SSHD_TUNN_ERR2}|%{SSHD_TUNN_ERR3}|%{SSHD_TUNN_ERR4}|%{SSHD_TUNN_TIMEOUT}
|
||||
SSHD_NORMAL_LOG %{SSHD_SUCCESS}|%{SSHD_DISCONNECT}|%{SSHD_CONN_CLOSE}|%{SSHD_SESSION_OPEN}|%{SSHD_SESSION_CLOSE}|%{SSHD_SESSION_FAIL}|%{SSHD_LOGOUT_ERR}
|
||||
SSHD_PROBE_LOG %{SSHD_REFUSE_CONN}|%{SSHD_TCPWRAP_FAIL1}|%{SSHD_TCPWRAP_FAIL2}|%{SSHD_TCPWRAP_FAIL3}|%{SSHD_TCPWRAP_FAIL4}|%{SSHD_TCPWRAP_FAIL5}|%{SSHD_FAIL}|%{SSHD_USER_FAIL}|%{SSHD_INVAL_USER}
|
||||
SSHD_PREAUTH %{SSHD_DISC_PREAUTH}|%{SSHD_MAXE_PREAUTH}|%{SSHD_DISR_PREAUTH}|%{SSHD_INVA_PREAUTH}|%{SSHD_REST_PREAUTH}|%{SSHD_FAIL_PREAUTH}|%{SSHD_CLOS_PREAUTH}|%{SSHD_FAI2_PREAUTH}|%{SSHD_BADL_PREAUTH}
|
||||
SSHD_CORRUPTED %{SSHD_IDENT_FAIL}|%{SSHD_MAPB_FAIL}|%{SSHD_RMAP_FAIL}|%{SSHD_TOOMANY_AUTH}|%{SSHD_CORRUPT_MAC}|%{SSHD_PACKET_CORRUPT}|%{SSHD_BAD_VERSION}
|
||||
SSHD_LOG %{SSHD_INIT}|%{SSHD_NORMAL_LOG}|%{SSHD_PROBE_LOG}|%{SSHD_CORRUPTED}|%{SSHD_TUNN}|%{SSHD_PREAUTH}
|
||||
@@ -1 +0,0 @@
|
||||
TCPDUMP_OUTPUT %{GREEDYDATA:timestamp} IP %{IPORHOST:source_ip}\.%{INT:source_port} > %{IPORHOST:dest_ip}\.%{INT:dest_port}: Flags \[%{GREEDYDATA:tcpflags}\], seq
|
||||
@@ -1,14 +0,0 @@
|
||||
name: default_ip_remediation
|
||||
#debug: true
|
||||
filters:
|
||||
- Alert.Remediation == true && Alert.GetScope() == "Ip"
|
||||
decisions:
|
||||
- type: ban
|
||||
duration: 4h
|
||||
#duration_expr: Sprintf('%dh', (GetDecisionsCount(Alert.GetValue()) + 1) * 4)
|
||||
# notifications:
|
||||
# - slack_default # Set the webhook in /etc/crowdsec/notifications/slack.yaml before enabling this.
|
||||
# - splunk_default # Set the splunk url and token in /etc/crowdsec/notifications/splunk.yaml before enabling this.
|
||||
# - http_default # Set the required http parameters in /etc/crowdsec/notifications/http.yaml before enabling this.
|
||||
# - email_default # Set the required email parameters in /etc/crowdsec/notifications/email.yaml before enabling this.
|
||||
on_success: break
|
||||
Reference in New Issue
Block a user