Skip to content

Commit

Permalink
Merge pull request #18 from 464d41/devel
Browse files Browse the repository at this point in the history
Devel
  • Loading branch information
464d41 authored Oct 29, 2020
2 parents 9098830 + 53c2ef4 commit eedd87e
Show file tree
Hide file tree
Showing 9 changed files with 6,079 additions and 2 deletions.
23 changes: 23 additions & 0 deletions .github/workflows/create-release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
on:
push:
tags:
- 'v*'

name: Create release

jobs:
build:
name: Create Release
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Create Release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: Release ${{ github.ref }}
draft: true
prerelease: false
14 changes: 14 additions & 0 deletions fluentd/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
FROM fluent/fluentd:v1.11-1

USER root

RUN apk add --no-cache --update --virtual .build-deps \
sudo build-base ruby-dev \
geoip-dev geoip libmaxminddb automake autoconf libtool libc6-compat \
&& apk add geoip \
&& sudo gem install fluent-plugin-elasticsearch fluent-plugin-grok-parser fluent-plugin-geoip \
&& sudo gem sources --clear-all \
&& apk del .build-deps \
&& rm -rf /tmp/* /var/tmp/* /usr/lib/ruby/gems/*/cache/*.gem

USER fluent
103 changes: 103 additions & 0 deletions fluentd/config/fluentd.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
<source>
@type tcp
bind 0.0.0.0
port 24224
<parse>
@type none
message_key message
</parse>
tag nginx-app-protect
@label @NGINX_APP_PROTECT
</source>
<label @NGINX_APP_PROTECT>
<filter nginx_app_protect.**>
@type parser
key_name message
reserve_data true
<parse>
@type grok
grok_failure_key grokfailure
<grok>
pattern attack_type="%{DATA:attack_type}",blocking_exception_reason="%{DATA:blocking_exception_reason}",date_time="%{DATA:date_time}",dest_port="%{DATA:dest_port}",ip_client="%{DATA:ip_client}",is_truncated="%{DATA:is_truncated}",method="%{DATA:method}",policy_name="%{DATA:policy_name}",protocol="%{DATA:protocol}",request_status="%{DATA:request_status}",response_code="%{DATA:response_code}",severity="%{DATA:severity}",sig_cves="%{DATA:sig_cves}",sig_ids="%{DATA:sig_ids}",sig_names="%{DATA:sig_names}",sig_set_names="%{DATA:sig_set_names}",src_port="%{DATA:src_port}",sub_violations="%{DATA:sub_violations}",support_id="%{DATA:support_id}",threat_campaign_names="%{DATA:threat_campaign_names}",unit_hostname="%{DATA:unit_hostname}",uri="%{DATA:uri}",violation_rating="%{DATA:violation_rating}",vs_name="%{DATA:vs_name}",x_forwarded_for_header_value="%{DATA:x_forwarded_for_header_value}",outcome="%{DATA:outcome}",outcome_reason="%{DATA:outcome_reason}",violations="%{DATA:violations}",violation_details="%{DATA:violation_details}",request="%{DATA:request}"
</grok>
</parse>
</filter>
<filter nginx_app_protect.**>
@type record_transformer
enable_ruby
<record>
attack_type ${record["attack_type"].split(',')}
</record>
<record>
sig_cves ${record["sig_cves"].split(',')}
</record>
<record>
sig_ids ${record["sig_ids"].split(',')}
</record>
<record>
sig_names ${record["sig_names"].split(',')}
</record>
<record>
sig_set_names ${record["sig_set_names"].split(',')}
</record>
<record>
threat_campaign_names ${record["threat_campaign_names"].split(',')}
</record>
<record>
violations ${record["violations"].split(',')}
</record>
<record>
sub_violations ${record["sub_violations"].split(',')}
</record>
</filter>
<filter nginx_app_protect.**>
@type record_transformer
enable_ruby
<record>
source_host ${if record["x_forwarded_for_header_value"]!="N/A";record["x_forwarded_for_header_value"];else;record["ip_client"];end;}
</record>
</filter>
<filter nginx_app_protect.**>
@type geoip
geoip_lookup_keys source_host
backend_library geoip2_c
<record>
geoip.city_name ${city.names.en["source_host"]}
geoip.latitude ${location.latitude["source_host"]}
geoip.longitude ${location.longitude["source_host"]}
geoip.country_code3 ${country.iso_code["source_host"]}
geoip.country_name ${country.names.en["source_host"]}
geoip.postal_code ${postal.code["source_host"]}
geoip.region_code ${subdivisions.0.iso_code["source_host"]}
geoip.region_name ${subdivisions.0.names.en["source_host"]}
</record>
</filter>
<match nginx_app_protect.**>
@type copy
<store>
@type elasticsearch
<buffer>
flush_mode immediate
</buffer>
scheme https
host kibana.f5-demo.com
port 443
ssl_verify false
max_retry_get_es_version 3
max_retry_putting_template 3
reconnect_on_error true
reload_on_failure true
reload_connections false
logstash_format true
logstash_prefix waf-logs
</store>
<store>
@type stdout
</store>
</match>
</label>
<label @FLUENT_LOG>
<match fluent.*>
@type stdout
</match>
</label>
12 changes: 12 additions & 0 deletions helpers.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
ndjson2json () {
jq '. | if .attributes.fields then .attributes.fields|=fromjson else . end
| if .attributes.visState then .attributes.visState|=fromjson else . end
| if .attributes.kibanaSavedObjectMeta.searchSourceJSON then .attributes.kibanaSavedObjectMeta.searchSourceJSON|=fromjson else . end
| if .attributes.panelsJSON then .attributes.panelsJSON|=fromjson else . end'
}
json2ndjson () {
jq -c '. | if .attributes.fields then .attributes.fields|=tojson else . end
| if .attributes.visState then .attributes.visState|=tojson else . end
| if .attributes.kibanaSavedObjectMeta.searchSourceJSON then .attributes.kibanaSavedObjectMeta.searchSourceJSON|=tojson else . end
| if .attributes.panelsJSON then .attributes.panelsJSON|=tojson else . end'
}
Loading

0 comments on commit eedd87e

Please sign in to comment.