mirror of
https://github.com/Icinga/icingabeat.git
synced 2025-07-28 08:14:02 +02:00
Update to libbeat 5.6
This commit is contained in:
parent
7b88a2049b
commit
0fc9123310
14
vendor/github.com/elastic/beats/.appveyor.yml
generated
vendored
14
vendor/github.com/elastic/beats/.appveyor.yml
generated
vendored
@ -6,8 +6,8 @@ os: Windows Server 2012 R2
|
||||
|
||||
# Environment variables
|
||||
environment:
|
||||
GOROOT: c:\go1.7.4
|
||||
GOPATH: c:\gopath
|
||||
GVM_DL: https://github.com/andrewkroh/gvm/releases/download/v0.0.1/gvm-windows-amd64.exe
|
||||
PYWIN_DL: https://beats-files.s3.amazonaws.com/deps/pywin32-220.win32-py2.7.exe
|
||||
matrix:
|
||||
- PROJ: github.com\elastic\beats\metricbeat
|
||||
@ -24,15 +24,19 @@ clone_folder: c:\gopath\src\github.com\elastic\beats
|
||||
cache:
|
||||
- C:\ProgramData\chocolatey\bin -> .appveyor.yml
|
||||
- C:\ProgramData\chocolatey\lib -> .appveyor.yml
|
||||
- C:\go1.7.4 -> .appveyor.yml
|
||||
- C:\Users\appveyor\.gvm -> .go-version
|
||||
- C:\Windows\System32\gvm.exe -> .appveyor.yml
|
||||
- C:\tools\mingw64 -> .appveyor.yml
|
||||
- C:\pywin_inst.exe -> .appveyor.yml
|
||||
|
||||
# Scripts that run after cloning repository
|
||||
install:
|
||||
- ps: c:\gopath\src\github.com\elastic\beats\libbeat\scripts\install-go.ps1 -version 1.7.4
|
||||
- set PATH=%GOROOT%\bin;%PATH%
|
||||
# AppVeyor installed mingw is 32-bit only.
|
||||
- ps: >-
|
||||
if(!(Test-Path "C:\Windows\System32\gvm.exe")) {
|
||||
wget "$env:GVM_DL" -Outfile C:\Windows\System32\gvm.exe
|
||||
}
|
||||
- ps: gvm --format=powershell $(Get-Content .go-version) | Invoke-Expression
|
||||
# AppVeyor installed mingw is 32-bit only so install 64-bit version.
|
||||
- ps: >-
|
||||
if(!(Test-Path "C:\tools\mingw64\bin\gcc.exe")) {
|
||||
cinst mingw > mingw-install.txt
|
||||
|
3
vendor/github.com/elastic/beats/.gitignore
generated
vendored
3
vendor/github.com/elastic/beats/.gitignore
generated
vendored
@ -4,7 +4,8 @@
|
||||
/build
|
||||
/*/data
|
||||
/*/logs
|
||||
/*/_meta/kibana/index-pattern
|
||||
/*/_meta/kibana/5.x/index-pattern
|
||||
/*/_meta/kibana/default/index-pattern
|
||||
|
||||
# Files
|
||||
.DS_Store
|
||||
|
1
vendor/github.com/elastic/beats/.go-version
generated
vendored
Normal file
1
vendor/github.com/elastic/beats/.go-version
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
1.7.6
|
37
vendor/github.com/elastic/beats/.travis.yml
generated
vendored
37
vendor/github.com/elastic/beats/.travis.yml
generated
vendored
@ -12,73 +12,73 @@ env:
|
||||
global:
|
||||
# Cross-compile for amd64 only to speed up testing.
|
||||
- GOX_FLAGS="-arch amd64"
|
||||
- DOCKER_COMPOSE_VERSION: 1.9.0
|
||||
- &go_version 1.7.4
|
||||
- DOCKER_COMPOSE_VERSION=1.9.0
|
||||
- GO_VERSION="$(cat .go-version)"
|
||||
|
||||
matrix:
|
||||
include:
|
||||
# General checks
|
||||
- os: linux
|
||||
env: TARGETS="check"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
|
||||
# Filebeat
|
||||
- os: linux
|
||||
env: TARGETS="-C filebeat testsuite"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
- os: osx
|
||||
env: TARGETS="TEST_ENVIRONMENT=0 -C filebeat testsuite"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
|
||||
# Heartbeat
|
||||
- os: linux
|
||||
env: TARGETS="-C heartbeat testsuite"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
- os: osx
|
||||
env: TARGETS="TEST_ENVIRONMENT=0 -C heartbeat testsuite"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
|
||||
# Libbeat
|
||||
- os: linux
|
||||
env: TARGETS="-C libbeat testsuite"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
- os: linux
|
||||
env: TARGETS="-C libbeat crosscompile"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
|
||||
# Metricbeat
|
||||
- os: linux
|
||||
env: TARGETS="-C metricbeat testsuite"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
- os: osx
|
||||
env: TARGETS="TEST_ENVIRONMENT=0 -C metricbeat testsuite"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
- os: linux
|
||||
env: TARGETS="-C metricbeat crosscompile"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
|
||||
# Packetbeat
|
||||
- os: linux
|
||||
env: TARGETS="-C packetbeat testsuite"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
|
||||
# Winlogbeat
|
||||
- os: linux
|
||||
env: TARGETS="-C winlogbeat crosscompile"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
|
||||
# Dashboards
|
||||
- os: linux
|
||||
env: TARGETS="-C libbeat/dashboards"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
|
||||
# Generators
|
||||
- os: linux
|
||||
env: TARGETS="-C generator/metricbeat test"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
- os: linux
|
||||
env: TARGETS="-C generator/beat test"
|
||||
go: *go_version
|
||||
go: $GO_VERSION
|
||||
|
||||
addons:
|
||||
apt:
|
||||
@ -104,6 +104,9 @@ script:
|
||||
|
||||
notifications:
|
||||
slack:
|
||||
on_success: change
|
||||
on_failure: always
|
||||
on_pull_requests: false
|
||||
rooms:
|
||||
secure: "e25J5puEA31dOooTI4T+K+zrTs8XeWIGq2cgmiPt9u/g7eqWeQj1UJnVsr8GOu1RPDyuJZJHXqfrvuOYJTdHzXbwjD0JTbwwVVZMkkZW2SWZHG46HCXPiucjWXEr3hXJKBJDDpIx6VxrN7r17dejv1biQ8QuEFZfiB1H8kbH/ho="
|
||||
|
||||
|
295
vendor/github.com/elastic/beats/CHANGELOG.asciidoc
generated
vendored
295
vendor/github.com/elastic/beats/CHANGELOG.asciidoc
generated
vendored
@ -8,7 +8,7 @@
|
||||
// Template, add newest changes here
|
||||
|
||||
=== Beats version HEAD
|
||||
https://github.com/elastic/beats/compare/v5.3.1...master[Check the HEAD diff]
|
||||
https://github.com/elastic/beats/compare/v5.6.5...5.6[Check the HEAD diff]
|
||||
|
||||
==== Breaking changes
|
||||
|
||||
@ -24,20 +24,16 @@ https://github.com/elastic/beats/compare/v5.3.1...master[Check the HEAD diff]
|
||||
|
||||
*Winlogbeat*
|
||||
|
||||
|
||||
==== Bugfixes
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
*Filebeat*
|
||||
- Properly shut down crawler in case one prospector is misconfigured. {pull}4037[4037]
|
||||
- Fix panic in JSON decoding code if the input line is "null". {pull}4042[4042]
|
||||
|
||||
*Heartbeat*
|
||||
|
||||
*Metricbeat*
|
||||
|
||||
|
||||
*Packetbeat*
|
||||
|
||||
*Winlogbeat*
|
||||
@ -70,7 +66,7 @@ https://github.com/elastic/beats/compare/v5.3.1...master[Check the HEAD diff]
|
||||
|
||||
*Winlogbeat*
|
||||
|
||||
==== Knwon Issue
|
||||
==== Known Issue
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
@ -84,9 +80,283 @@ https://github.com/elastic/beats/compare/v5.3.1...master[Check the HEAD diff]
|
||||
|
||||
*Winlogbeat*
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
[[release-notes-5.6.5]]
|
||||
=== Beats version 5.6.5
|
||||
https://github.com/elastic/beats/compare/v5.6.4...v5.6.5[View commits]
|
||||
|
||||
==== Bugfixes
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Fix duplicate batches of events in retry queue. {pull}5520[5520]
|
||||
|
||||
*Metricbeat*
|
||||
|
||||
- Clarify meaning of percentages reported by system core metricset. {pull}5565[5565]
|
||||
- Fix map overwrite in docker diskio module. {issue}5582[5582]
|
||||
|
||||
[[release-notes-5.6.4]]
|
||||
=== Beats version 5.6.4
|
||||
https://github.com/elastic/beats/compare/v5.6.3...v5.6.4[View commits]
|
||||
|
||||
==== Bugfixes
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Fix race condition in internal logging rotator. {pull}4519[4519]
|
||||
|
||||
*Packetbeat*
|
||||
|
||||
- Fix missing length check in the PostgreSQL module. {pull}5457[5457]
|
||||
|
||||
==== Added
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Add support for enabling TLS renegotiation. {issue}4386[4386]
|
||||
- Add setting to enable/disable the slow start in logstash output. {pull}5400[5400]
|
||||
|
||||
[[release-notes-5.6.3]]
|
||||
=== Beats version 5.6.3
|
||||
https://github.com/elastic/beats/compare/v5.6.2...v5.6.3[View commits]
|
||||
|
||||
No changes in this release.
|
||||
|
||||
[[release-notes-5.6.2]]
|
||||
=== Beats version 5.6.2
|
||||
https://github.com/elastic/beats/compare/v5.6.1...v5.6.2[View commits]
|
||||
|
||||
No changes in this release.
|
||||
|
||||
[[release-notes-5.6.1]]
|
||||
=== Beats version 5.6.1
|
||||
https://github.com/elastic/beats/compare/v5.6.0...v5.6.1[View commits]
|
||||
|
||||
No changes in this release.
|
||||
|
||||
[[release-notes-5.6.0]]
|
||||
=== Beats version 5.6.0
|
||||
https://github.com/elastic/beats/compare/v5.5.3...v5.6.0[View commits]
|
||||
|
||||
==== Breaking changes
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- The _all.norms setting in the Elasticsearch template is no longer disabled.
|
||||
This increases the storage size with one byte per document, but allows for a
|
||||
better upgrade experience to 6.0. {issue}4901[4901]
|
||||
|
||||
|
||||
==== Bugfixes
|
||||
|
||||
*Filebeat*
|
||||
|
||||
- Fix issue where the `fileset.module` could have the wrong value. {issue}4761[4761]
|
||||
|
||||
*Packetbeat*
|
||||
|
||||
- Update flow timestamp on each packet being received. {issue}4895[4895]
|
||||
|
||||
*Metricbeat*
|
||||
|
||||
- Fix a debug statement that said a module wrapper had stopped when it hadn't. {pull}4264[4264]
|
||||
- Use MemAvailable value from /proc/meminfo on Linux 3.14. {pull}4316[4316]
|
||||
- Fix panic when events were dropped by filters. {issue}4327[4327]
|
||||
|
||||
==== Added
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Add option to the import_dashboards script to load the dashboards via Kibana API. {pull}4682[4682]
|
||||
- Add `logging.files` `permissions` option. {pull}4295[4295]
|
||||
|
||||
*Filebeat*
|
||||
|
||||
- Add support for loading Xpack Machine Learning configurations from the modules, and added sample configurations for the Nginx module. {pull}4506[4506] {pull}4609[4609]
|
||||
- Add ability to parse nginx logs exposing the X-Forwarded-For header instead of the remote address. {pull}4351[4351]
|
||||
|
||||
*Metricbeat*
|
||||
|
||||
- Add `filesystem.ignore_types` to system module for ignoring filesystem types. {issue}4685[4685]
|
||||
|
||||
==== Deprecated
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Loading more than one output is deprecated and will be removed in 6.0. {pull}4907[4907]
|
||||
|
||||
[[release-notes-5.5.3]]
|
||||
=== Beats version 5.5.3
|
||||
https://github.com/elastic/beats/compare/v5.5.2...v5.5.3[View commits]
|
||||
|
||||
No changes in this release.
|
||||
|
||||
[[release-notes-5.5.2]]
|
||||
=== Beats version 5.5.2
|
||||
https://github.com/elastic/beats/compare/v5.5.1...v5.5.2[View commits]
|
||||
|
||||
No changes in this release.
|
||||
|
||||
[[release-notes-5.5.1]]
|
||||
=== Beats version 5.5.1
|
||||
https://github.com/elastic/beats/compare/v5.5.0...v5.5.1[View commits]
|
||||
|
||||
==== Bugfixes
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Normalize all times to UTC to ensure proper index naming. {issue}4569[4569]
|
||||
|
||||
[[release-notes-5.5.0]]
|
||||
=== Beats version 5.5.0
|
||||
https://github.com/elastic/beats/compare/v5.4.2...v5.5.0[View commits]
|
||||
|
||||
==== Breaking changes
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Usage of field `_type` is now ignored and hardcoded to `doc`. {pull}3757[3757]
|
||||
|
||||
*Metricbeat*
|
||||
- Change all `system.cpu.*.pct` metrics to be scaled by the number of CPU cores.
|
||||
This will make the CPU usage percentages from the system cpu metricset consistent
|
||||
with the system process metricset. The documentation for these metrics already
|
||||
stated that on multi-core systems the percentages could be greater than 100%. {pull}4544[4544]
|
||||
|
||||
==== Bugfixes
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Fix console output. {pull}4045[4045]
|
||||
|
||||
*Filebeat*
|
||||
|
||||
- Allow string characters in user agent patch version (NGINX and Apache) {pull}4415[4415]
|
||||
|
||||
*Metricbeat*
|
||||
|
||||
- Fix type of field `haproxy.stat.check.health.last`. {issue}4407[4407]
|
||||
|
||||
*Packetbeat*
|
||||
|
||||
- Fix `packetbeat.interface` options that contain underscores (e.g. `with_vlans` or `bpf_filter`). {pull}4378[4378]
|
||||
- Enabled /proc/net/tcp6 scanning and fixed ip v6 parsing. {pull}4442[4442]
|
||||
|
||||
==== Deprecated
|
||||
|
||||
*Filebeat*
|
||||
|
||||
- Deprecate `document_type` prospector config option as _type is removed in elasticsearch 6.0. Use fields instead. {pull}4225[4225]
|
||||
|
||||
*Winlogbeat*
|
||||
|
||||
- Deprecated metrics endpoint. It is superseded by a libbeat feature that can serve metrics on an HTTP endpoint. {pull}4145[4145]
|
||||
|
||||
[[release-notes-5.4.2]]
|
||||
=== Beats version 5.4.2
|
||||
https://github.com/elastic/beats/compare/v5.4.1...v5.4.2[View commits]
|
||||
|
||||
==== Bugfixes
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Removed empty sections from the template files, causing indexing errors for array objects. {pull}4488[4488]
|
||||
|
||||
*Metricbeat*
|
||||
|
||||
- Fix issue affecting Windows services timing out at startup. {pull}4491[4491]
|
||||
- Add filtering to system filesystem metricset to remove relative mountpoints like those
|
||||
from Linux network namespaces. {pull}4370[4370]
|
||||
|
||||
*Packetbeat*
|
||||
|
||||
- Clean configured geoip.paths before attempting to open the database. {pull}4306[4306]
|
||||
|
||||
[[release-notes-5.4.1]]
|
||||
=== Beats version 5.4.1
|
||||
https://github.com/elastic/beats/compare/v5.4.0...v5.4.1[View commits]
|
||||
|
||||
==== Bugfixes
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Fix importing the dashboards when the limit for max open files is too low. {issue}4244[4244]
|
||||
- Fix console output. {pull}4045[4045]
|
||||
|
||||
*Filebeat*
|
||||
|
||||
- Fix issue that new prospector was not reloaded on conflict. {pull}4128[4128]
|
||||
- Fix grok pattern in filebeat module system/auth without hostname. {pull}4224[4224]
|
||||
- Fix the Mysql slowlog parsing of IP addresses. {pull}4183[4183]
|
||||
|
||||
==== Added
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Binaries upgraded to Go 1.7.6 which contains security fixes. {pull}4400[4400]
|
||||
|
||||
*Winlogbeat*
|
||||
|
||||
- Add the ability to use LevelRaw if Level isn't populated in the event XML. {pull}4257[4257]
|
||||
|
||||
[[release-notes-5.4.0]]
|
||||
=== Beats version 5.4.0
|
||||
https://github.com/elastic/beats/compare/v5.3.1...v5.4.0[View commits]
|
||||
|
||||
==== Bugfixes
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Improve error message when downloading the dashboards fails. {pull}3805[3805]
|
||||
- Fix potential Elasticsearch output URL parsing error if protocol scheme is missing. {pull}3671[3671]
|
||||
- Downgrade Elasticsearch per batch item failure log to debug level. {issue}3953[3953]
|
||||
- Make `@timestamp` accessible from format strings. {pull}3721[3721]
|
||||
|
||||
*Filebeat*
|
||||
|
||||
- Allow log lines without a program name in the Syslog fileset. {pull}3944[3944]
|
||||
- Don't stop Filebeat when modules are used with the Logstash output. {pull}3929[3929]
|
||||
- Properly shut down crawler in case one prospector is misconfigured. {pull}4037[4037]
|
||||
|
||||
*Metricbeat*
|
||||
|
||||
- Fixing panic on the Prometheus collector when label has a comma. {pull}3947[3947]
|
||||
- Make system process metricset honor the `cpu_ticks` config option. {issue}3590[3590]
|
||||
|
||||
*Winlogbeat*
|
||||
|
||||
- Fix null terminators include in raw XML string when include_xml is enabled. {pull}3943[3943]
|
||||
|
||||
==== Added
|
||||
|
||||
*Affecting all Beats*
|
||||
|
||||
- Update index mappings to support future Elasticsearch 6.X. {pull}3778[3778]
|
||||
|
||||
*Filebeat*
|
||||
|
||||
- Add auditd module for reading audit logs on Linux. {pull}3750[3750] {pull}3941[3941]
|
||||
- Add fileset for the Linux authorization logs. {pull}3669[3669]
|
||||
|
||||
*Heartbeat*
|
||||
|
||||
- Add default ports in HTTP monitor. {pull}3924[3924]
|
||||
|
||||
*Metricbeat*
|
||||
|
||||
- Add beta Jolokia module. {pull}3844[3844]
|
||||
- Add dashboard for the MySQL module. {pull}3716[3716]
|
||||
- Module configuration reloading is now beta instead of experimental. {pull}3841[3841]
|
||||
- Marked http fields from the HAProxy module optional to improve compatibility with 1.5. {pull}3788[3788]
|
||||
- Add support for custom HTTP headers and TLS for the Metricbeat modules. {pull}3945[3945]
|
||||
|
||||
*Packetbeat*
|
||||
|
||||
- Add DNS dashboard for an overview the DNS traffic. {pull}3883[3883]
|
||||
- Add DNS Tunneling dashboard to highlight domains with large numbers of subdomains or high data volume. {pull}3884[3884]
|
||||
|
||||
[[release-notes-5.3.1]]
|
||||
=== Beats version 5.3.1
|
||||
https://github.com/elastic/beats/compare/v5.3.0...v5.3.1[View commits]
|
||||
@ -106,7 +376,6 @@ https://github.com/elastic/beats/compare/v5.3.0...v5.3.1[View commits]
|
||||
|
||||
- Avoid errors when some Apache status fields are missing. {issue}3074[3074]
|
||||
|
||||
|
||||
[[release-notes-5.3.0]]
|
||||
=== Beats version 5.3.0
|
||||
https://github.com/elastic/beats/compare/v5.2.2...v5.3.0[View commits]
|
||||
@ -132,6 +401,10 @@ https://github.com/elastic/beats/compare/v5.2.2...v5.3.0[View commits]
|
||||
*Affecting all Beats*
|
||||
|
||||
- Add `_id`, `_type`, `_index` and `_score` fields in the generated index pattern. {pull}3282[3282]
|
||||
- Fix potential elasticsearch output URL parsing error if protocol scheme is missing. {pull}3671[3671]
|
||||
- Improve error message when downloading the dashboards fails. {pull}3805[3805]
|
||||
- Downgrade Elasticsearch per batch item failure log to debug level. {issue}3953[3953]
|
||||
- Fix panic due to race condition in kafka output. {pull}4098[4098]
|
||||
|
||||
*Filebeat*
|
||||
- Always use absolute path for event and registry. {pull}3328[3328]
|
||||
@ -143,6 +416,12 @@ https://github.com/elastic/beats/compare/v5.2.2...v5.3.0[View commits]
|
||||
|
||||
- Add error handling to system process metricset for when Linux cgroups are missing from the kernel. {pull}3692[3692]
|
||||
- Add labels to the Docker healthcheck metricset output. {pull}3707[3707]
|
||||
- Make system process metricset honor the cpu_ticks config option. {issue}3590[3590]
|
||||
- Support common.Time in mapstriface.toTime() {pull}3812[3812]
|
||||
- Fixing panic on prometheus collector when label has , {pull}3947[3947]
|
||||
- Fix MongoDB dbstats fields mapping. {pull}4025[4025]
|
||||
|
||||
*Packetbeat*
|
||||
|
||||
*Winlogbeat*
|
||||
|
||||
|
2
vendor/github.com/elastic/beats/CONTRIBUTING.md
generated
vendored
2
vendor/github.com/elastic/beats/CONTRIBUTING.md
generated
vendored
@ -51,7 +51,7 @@ Beats](https://github.com/elastic/beats/blob/master/libbeat/docs/communitybeats.
|
||||
|
||||
The Beats are Go programs, so install the latest version of
|
||||
[golang](http://golang.org/) if you don't have it already. The current Go version
|
||||
used for development is Golang 1.7.4.
|
||||
used for development is Golang 1.7.6.
|
||||
|
||||
The location where you clone is important. Please clone under the source
|
||||
directory of your `GOPATH`. If you don't have `GOPATH` already set, you can
|
||||
|
2
vendor/github.com/elastic/beats/Dockerfile
generated
vendored
2
vendor/github.com/elastic/beats/Dockerfile
generated
vendored
@ -1,4 +1,4 @@
|
||||
FROM golang:1.7.4
|
||||
FROM golang:1.7.6
|
||||
MAINTAINER Nicolas Ruflin <ruflin@elastic.co>
|
||||
|
||||
RUN set -x && \
|
||||
|
32
vendor/github.com/elastic/beats/codecov.yml
generated
vendored
32
vendor/github.com/elastic/beats/codecov.yml
generated
vendored
@ -1 +1,33 @@
|
||||
# Enable coverage report message for diff on commit
|
||||
# Docs can be found here: https://docs.codecov.io/v4.3.0/docs/commit-status
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
# Overall coverage should never drop more then 0.5%
|
||||
threshold: 0.5
|
||||
base: auto
|
||||
branches: null
|
||||
if_no_uploads: error
|
||||
if_not_found: success
|
||||
if_ci_failed: error
|
||||
only_pulls: false
|
||||
flags: null
|
||||
paths: null
|
||||
patch:
|
||||
default:
|
||||
target: auto
|
||||
# Allows PRs without tests, overall stats count
|
||||
threshold: 100
|
||||
base: auto
|
||||
branches: null
|
||||
if_no_uploads: error
|
||||
if_not_found: success
|
||||
if_ci_failed: error
|
||||
only_pulls: false
|
||||
flags: null
|
||||
paths: null
|
||||
|
||||
# Disable comments on Pull Requests
|
||||
comment: false
|
71
vendor/github.com/elastic/beats/dev-tools/cherrypick_pr
generated
vendored
71
vendor/github.com/elastic/beats/dev-tools/cherrypick_pr
generated
vendored
@ -1,17 +1,22 @@
|
||||
#!/usr/bin/env python
|
||||
"""Cherry pick and backport a PR"""
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
from os.path import expanduser
|
||||
import re
|
||||
from subprocess import check_call, call, check_output
|
||||
import requests
|
||||
|
||||
"""
|
||||
usage = """
|
||||
Example usage:
|
||||
|
||||
./dev-tools/cherrypick_pr 5.0 2565 6490604aa0cf7fa61932a90700e6ca988fc8a527
|
||||
./dev-tools/cherrypick_pr --create_pr 5.0 2565 6490604aa0cf7fa61932a90700e6ca988fc8a527
|
||||
|
||||
In case of backporting errors, fix them, then run:
|
||||
|
||||
git cherry-pick --continue
|
||||
./dev-tools/cherrypick_pr 5.0 2565 6490604aa0cf7fa61932a90700e6ca988fc8a527 --continue
|
||||
./dev-tools/cherrypick_pr --create_pr 5.0 2565 6490604aa0cf7fa61932a90700e6ca988fc8a527 --continue
|
||||
|
||||
This script does the following:
|
||||
|
||||
@ -20,8 +25,9 @@ This script does the following:
|
||||
* calls the git cherry-pick command in this branch
|
||||
* after fixing the merge errors (if needed), pushes the branch to your
|
||||
remote
|
||||
|
||||
You then just need to go to Github and open the PR.
|
||||
* if the --create_pr flag is used, it uses the GitHub API to create the PR
|
||||
for you. Note that this requires you to have a Github token with the
|
||||
public_repo scope in the `~/.elastic/github.token` file
|
||||
|
||||
Note that you need to take the commit hashes from `git log` on the
|
||||
from_branch, copying the IDs from Github doesn't work in case we squashed the
|
||||
@ -30,8 +36,11 @@ PR.
|
||||
|
||||
|
||||
def main():
|
||||
"""Main"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Creates a PR for merging two branches")
|
||||
description="Creates a PR for merging two branches",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog=usage)
|
||||
parser.add_argument("to_branch",
|
||||
help="To branch (e.g 5.0)")
|
||||
parser.add_argument("pr_number",
|
||||
@ -45,15 +54,18 @@ def main():
|
||||
help="Continue after fixing merging errors.")
|
||||
parser.add_argument("--from_branch", default="master",
|
||||
help="From branch")
|
||||
parser.add_argument("--create_pr", action="store_true",
|
||||
help="Create a PR using the Github API " +
|
||||
"(requires token in ~/.elastic/github.token)")
|
||||
args = parser.parse_args()
|
||||
|
||||
print args
|
||||
print(args)
|
||||
|
||||
tmp_branch = "backport_{}_{}".format(args.pr_number, args.to_branch)
|
||||
|
||||
if not vars(args)["continue"]:
|
||||
if not args.yes and raw_input("This will destroy all local changes. " +
|
||||
"Continue? [y/n]: ") != "y":
|
||||
"Continue? [y/n]: ") != "y":
|
||||
return 1
|
||||
check_call("git reset --hard", shell=True)
|
||||
check_call("git clean -df", shell=True)
|
||||
@ -91,9 +103,46 @@ def main():
|
||||
shell=True)
|
||||
check_call("git push --set-upstream {} {}"
|
||||
.format(remote, tmp_branch), shell=True)
|
||||
print("Done. Open PR by following this URL: \n\t" +
|
||||
"https://github.com/elastic/beats/compare/{}...{}:{}?expand=1"
|
||||
.format(args.to_branch, remote, tmp_branch))
|
||||
if not args.create_pr:
|
||||
print("Done. Open PR by following this URL: \n\t" +
|
||||
"https://github.com/elastic/beats/compare/{}...{}:{}?expand=1"
|
||||
.format(args.to_branch, remote, tmp_branch))
|
||||
else:
|
||||
token = open(expanduser("~/.elastic/github.token"), "r").read().strip()
|
||||
base = "https://api.github.com/repos/elastic/beats"
|
||||
session = requests.Session()
|
||||
session.headers.update({"Authorization": "token " + token})
|
||||
|
||||
original_pr = session.get(base + "/pulls/" + args.pr_number).json()
|
||||
|
||||
# get the github username from the remote where we pushed
|
||||
remote_url = check_output("git remote get-url {}".format(remote),
|
||||
shell=True)
|
||||
remote_user = re.search("github.com:(.+)/beats", remote_url).group(1)
|
||||
|
||||
# create PR
|
||||
request = session.post(base + "/pulls", json=dict(
|
||||
title="Cherry-pick #{} to {}: {}".format(args.pr_number, args.to_branch, original_pr["title"]),
|
||||
head=remote_user + ":" + tmp_branch,
|
||||
base=args.to_branch,
|
||||
body="Cherry-pick of PR #{} to {} branch. Original message: \n\n{}"
|
||||
.format(args.pr_number, args.to_branch, original_pr["body"])
|
||||
))
|
||||
if request.status_code > 299:
|
||||
print("Creating PR failed: {}".format(request.json()))
|
||||
sys.exit(1)
|
||||
new_pr = request.json()
|
||||
|
||||
# add labels
|
||||
session.post(
|
||||
base + "/issues/{}/labels".format(new_pr["number"]), json=["backport", "review"])
|
||||
|
||||
# remove needs backport label from the original PR
|
||||
session.delete(base + "/issues/{}/labels/needs_backport".format(args.pr_number))
|
||||
|
||||
print("\nDone. PR created: {}".format(new_pr["html_url"]))
|
||||
print("Please go and check it and add the review tags")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
20
vendor/github.com/elastic/beats/dev-tools/common.bash
generated
vendored
20
vendor/github.com/elastic/beats/dev-tools/common.bash
generated
vendored
@ -27,7 +27,7 @@ err()
|
||||
# Read the project's Go version and return it in the GO_VERSION variable.
|
||||
# On failure it will exit.
|
||||
get_go_version() {
|
||||
GO_VERSION=$(awk '/^:go-version:/{print $NF}' "${_sdir}/../libbeat/docs/version.asciidoc")
|
||||
GO_VERSION=$(cat "${_sdir}/../.go-version")
|
||||
if [ -z "$GO_VERSION" ]; then
|
||||
err "Failed to detect the project's Go version"
|
||||
exit 1
|
||||
@ -78,3 +78,21 @@ setup_go_path() {
|
||||
|
||||
debug "GOPATH=${GOPATH}"
|
||||
}
|
||||
|
||||
jenkins_setup() {
|
||||
: "${HOME:?Need to set HOME to a non-empty value.}"
|
||||
: "${WORKSPACE:?Need to set WORKSPACE to a non-empty value.}"
|
||||
|
||||
# Setup Go.
|
||||
export GOPATH=${WORKSPACE}
|
||||
export PATH=${GOPATH}/bin:${PATH}
|
||||
if [ -f ".go-version" ]; then
|
||||
eval "$(gvm $(cat .go-version))"
|
||||
else
|
||||
eval "$(gvm 1.7.5)"
|
||||
fi
|
||||
|
||||
# Workaround for Python virtualenv path being too long.
|
||||
export TEMP_PYTHON_ENV=$(mktemp -d)
|
||||
export PYTHON_ENV="${TEMP_PYTHON_ENV}/python-env"
|
||||
}
|
||||
|
145
vendor/github.com/elastic/beats/dev-tools/jenkins_ci
generated
vendored
145
vendor/github.com/elastic/beats/dev-tools/jenkins_ci
generated
vendored
@ -1,145 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
# Script directory:
|
||||
SDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
SNAME="$(basename "$0")"
|
||||
|
||||
source "${SDIR}/common.bash"
|
||||
|
||||
usage() {
|
||||
cat << EOF
|
||||
Usage: $SNAME [-d] [-h] [-v] [-r] [-w=WORKSPACE] (-g|-b|-c)
|
||||
|
||||
Description: Executes a build using the project's Go version.
|
||||
|
||||
Options:
|
||||
-w=WORKSPACE Required. Specifies the path to the Jenkins workspace.
|
||||
If not set then the WORKSPACE environment variable is
|
||||
used. The workspace will be treated as the GOPATH.
|
||||
-b | --build Perform a build which includes make targets: check,
|
||||
testsuite, coverage-report, and docs.
|
||||
-c | --cleanup Clean up after the build by removing the checkout of
|
||||
elastic/docs and stopping any running containers
|
||||
started by the build. This cannot be specified with
|
||||
--build.
|
||||
-g | --go-version Optional. Write the project's Go version to stdout
|
||||
and then exits. Can be used to setup Go with
|
||||
eval "\$(gimme \$(./jenkins_ci -g))".
|
||||
-i | --install-gimme Optional. Installs gimme to HOME/bin.
|
||||
-r | --race Optional. Enable the Go race detector for tests that
|
||||
are run.
|
||||
-d | --debug Optional. Runs the script with 'set -x' to log a trace
|
||||
of all commands and their arguments being executed.
|
||||
-v | --verbose Optional. Enable verbose logging from this script to stderr.
|
||||
-h | --help Optional. Print this usage information.
|
||||
|
||||
Examples:
|
||||
Print project Go version: ./$SNAME --go-version
|
||||
Build with race detector: ./$SNAME -b -r
|
||||
Stop test environment: ./$SNAME -c
|
||||
|
||||
Jenkins Setup:
|
||||
|
||||
1) Jenkins should be setup to checkout elastic/beats into
|
||||
\$WORKSPACE/src/github.com/elastic/
|
||||
2) The single build script should be added that executes
|
||||
\$WORKSPACE/src/github.com/elastic/beats/dev-tools/$SNAME -d -v -b --race
|
||||
3) A post build action should be added that executes
|
||||
\$WORKSPACE/src/github.com/elastic/beats/dev-tools/$SNAME -d -v -c
|
||||
EOF
|
||||
}
|
||||
|
||||
# Parse command line arguments.
|
||||
parse_args() {
|
||||
for i in "$@"
|
||||
do
|
||||
case $i in
|
||||
-b|--build)
|
||||
BUILD=true
|
||||
shift
|
||||
;;
|
||||
-c|--cleanup)
|
||||
CLEANUP=true
|
||||
shift
|
||||
;;
|
||||
-d|--debug)
|
||||
set -x
|
||||
shift
|
||||
;;
|
||||
-g|--go-version)
|
||||
get_go_version
|
||||
echo "${GO_VERSION}"
|
||||
exit 0
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
-i|--install-gimme)
|
||||
install_gimme
|
||||
exit 0
|
||||
;;
|
||||
-r|--race)
|
||||
export RACE_DETECTOR=1
|
||||
shift
|
||||
;;
|
||||
-v|--verbose)
|
||||
VERBOSE=true
|
||||
shift
|
||||
;;
|
||||
-w=*|--workspace=*)
|
||||
WORKSPACE="${i#*=}"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
echo "Invalid argument: $i"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -z "$WORKSPACE" ]; then
|
||||
err "WORKSPACE env var must be set or --workspace must be specified"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
build() {
|
||||
make check
|
||||
make testsuite
|
||||
make coverage-report
|
||||
make docs
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
# Remove the checkout of elastic/docs if it exists.
|
||||
rm -rf "${SDIR}/../build/docs"
|
||||
|
||||
make stop-environments
|
||||
}
|
||||
|
||||
main() {
|
||||
cd "${SDIR}/.."
|
||||
parse_args $*
|
||||
get_go_version
|
||||
setup_go_root ${GO_VERSION}
|
||||
setup_go_path ${WORKSPACE}
|
||||
|
||||
if [ "$BUILD" == "true" ] && [ "$CLEANUP" == "true" ]; then
|
||||
err "--build and --cleanup cannot be used together"
|
||||
exit 1
|
||||
elif [ "$BUILD" == "true" ]; then
|
||||
chmod -R go-w "${GOPATH}/src/github.com/elastic/beats"
|
||||
build
|
||||
elif [ "$CLEANUP" == "true" ]; then
|
||||
cleanup
|
||||
else
|
||||
err "Use either --build or --cleanup"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
umask 022
|
||||
main $*
|
61
vendor/github.com/elastic/beats/dev-tools/jenkins_ci.ps1
generated
vendored
Executable file
61
vendor/github.com/elastic/beats/dev-tools/jenkins_ci.ps1
generated
vendored
Executable file
@ -0,0 +1,61 @@
|
||||
function Exec
|
||||
{
|
||||
param(
|
||||
[Parameter(Position=0,Mandatory=1)][scriptblock]$cmd,
|
||||
[Parameter(Position=1,Mandatory=0)][string]$errorMessage = ($msgs.error_bad_command -f $cmd)
|
||||
)
|
||||
|
||||
& $cmd
|
||||
if ($LastExitCode -ne 0) {
|
||||
Write-Error $errorMessage
|
||||
exit $LastExitCode
|
||||
}
|
||||
}
|
||||
|
||||
# Setup Go.
|
||||
$env:GOPATH = $env:WORKSPACE
|
||||
$env:PATH = "$env:GOPATH\bin;C:\tools\mingw64\bin;$env:PATH"
|
||||
if (Test-Path -PathType leaf .go-version) {
|
||||
& gvm --format=powershell $(Get-Content .go-version) | Invoke-Expression
|
||||
} else {
|
||||
& gvm --format=powershell 1.7.5 | Invoke-Expression
|
||||
}
|
||||
|
||||
if (Test-Path "$env:beat") {
|
||||
cd "$env:beat"
|
||||
} else {
|
||||
echo "$env:beat does not exist"
|
||||
New-Item -ItemType directory -Path build | Out-Null
|
||||
New-Item -Name build\TEST-empty.xml -ItemType File | Out-Null
|
||||
exit
|
||||
}
|
||||
|
||||
if (Test-Path "build") { Remove-Item -Recurse -Force build }
|
||||
New-Item -ItemType directory -Path build\coverage | Out-Null
|
||||
New-Item -ItemType directory -Path build\system-tests | Out-Null
|
||||
New-Item -ItemType directory -Path build\system-tests\run | Out-Null
|
||||
|
||||
exec { go get -u github.com/jstemmer/go-junit-report }
|
||||
|
||||
echo "Building $env:beat"
|
||||
exec { go build } "Build FAILURE"
|
||||
|
||||
if ($env:beat -eq "metricbeat") {
|
||||
cp .\_meta\fields.common.yml .\_meta\fields.generated.yml
|
||||
python .\scripts\fields_collector.py | out-file -append -encoding UTF8 -filepath .\_meta\fields.generated.yml
|
||||
} elseif ($env:beat -eq "libbeat") {
|
||||
cp .\_meta\fields.common.yml .\_meta\fields.generated.yml
|
||||
cat processors\*\_meta\fields.yml | Out-File -append -encoding UTF8 -filepath .\_meta\fields.generated.yml
|
||||
cp .\_meta\fields.generated.yml .\fields.yml
|
||||
}
|
||||
|
||||
echo "Unit testing $env:beat"
|
||||
go test -v $(go list ./... | select-string -Pattern "vendor" -NotMatch) 2>&1 | Out-File -encoding UTF8 build/TEST-go-unit.out
|
||||
exec { Get-Content build/TEST-go-unit.out | go-junit-report.exe -set-exit-code | Out-File -encoding UTF8 build/TEST-go-unit.xml } "Unit test FAILURE"
|
||||
|
||||
echo "System testing $env:beat"
|
||||
# TODO (elastic/beats#5050): Use a vendored copy of this.
|
||||
exec { go get github.com/docker/libcompose }
|
||||
exec { go test -race -c -cover -covermode=atomic -coverpkg ./... }
|
||||
exec { cd tests/system }
|
||||
exec { nosetests --with-timer --with-xunit --xunit-file=../../build/TEST-system.xml } "System test FAILURE"
|
33
vendor/github.com/elastic/beats/dev-tools/jenkins_ci.sh
generated
vendored
Executable file
33
vendor/github.com/elastic/beats/dev-tools/jenkins_ci.sh
generated
vendored
Executable file
@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euox pipefail
|
||||
|
||||
: "${HOME:?Need to set HOME to a non-empty value.}"
|
||||
: "${WORKSPACE:?Need to set WORKSPACE to a non-empty value.}"
|
||||
: "${beat:?Need to set beat to a non-empty value.}"
|
||||
|
||||
if [ ! -d "$beat" ]; then
|
||||
echo "$beat does not exist"
|
||||
mkdir -p build
|
||||
touch build/TEST-empty.xml
|
||||
exit
|
||||
fi
|
||||
|
||||
source ./dev-tools/common.bash
|
||||
|
||||
jenkins_setup
|
||||
|
||||
cleanup() {
|
||||
echo "Running cleanup..."
|
||||
rm -rf $TEMP_PYTHON_ENV
|
||||
make stop-environment fix-permissions
|
||||
echo "Killing all running containers..."
|
||||
docker ps -q | xargs -r docker kill || true
|
||||
echo "Cleaning stopped docker containers and dangling images/networks/volumes..."
|
||||
docker system prune -f || true
|
||||
echo "Cleanup complete."
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
rm -rf ${GOPATH}/pkg
|
||||
cd ${beat}
|
||||
RACE_DETECTOR=1 make clean check testsuite
|
18
vendor/github.com/elastic/beats/dev-tools/jenkins_intake.sh
generated
vendored
Executable file
18
vendor/github.com/elastic/beats/dev-tools/jenkins_intake.sh
generated
vendored
Executable file
@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euox pipefail
|
||||
|
||||
: "${HOME:?Need to set HOME to a non-empty value.}"
|
||||
: "${WORKSPACE:?Need to set WORKSPACE to a non-empty value.}"
|
||||
|
||||
source ./dev-tools/common.bash
|
||||
|
||||
jenkins_setup
|
||||
|
||||
cleanup() {
|
||||
echo "Running cleanup..."
|
||||
rm -rf $TEMP_PYTHON_ENV
|
||||
echo "Cleanup complete."
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
make check
|
@ -1,4 +1,4 @@
|
||||
FROM tudorg/xgo-deb6-1.7.4
|
||||
FROM tudorg/xgo-deb6-1.7.6
|
||||
|
||||
MAINTAINER Tudor Golubenco <tudor@elastic.co>
|
||||
|
||||
|
2
vendor/github.com/elastic/beats/dev-tools/packer/docker/xgo-image-deb6/build.sh
generated
vendored
2
vendor/github.com/elastic/beats/dev-tools/packer/docker/xgo-image-deb6/build.sh
generated
vendored
@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
|
||||
docker build --rm=true -t tudorg/xgo-deb6-base base/ && \
|
||||
docker build --rm=true -t tudorg/xgo-deb6-1.7.4 go-1.7.4/ &&
|
||||
docker build --rm=true -t tudorg/xgo-deb6-1.7.6 go-1.7.6/ &&
|
||||
docker build --rm=true -t tudorg/beats-builder-deb6 beats-builder
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Go cross compiler (xgo): Go 1.7.4 layer
|
||||
# Go cross compiler (xgo): Go 1.7.6 layer
|
||||
# Copyright (c) 2014 Péter Szilágyi. All rights reserved.
|
||||
#
|
||||
# Released under the MIT license.
|
||||
@ -9,7 +9,7 @@ MAINTAINER Tudor Golubenco <tudor@elastic.co>
|
||||
|
||||
# Configure the root Go distribution and bootstrap based on it
|
||||
RUN \
|
||||
export ROOT_DIST="https://storage.googleapis.com/golang/go1.7.4.linux-amd64.tar.gz" && \
|
||||
export ROOT_DIST_SHA1="2e5baf03d1590e048c84d1d5b4b6f2540efaaea1" && \
|
||||
export ROOT_DIST="https://storage.googleapis.com/golang/go1.7.6.linux-amd64.tar.gz" && \
|
||||
export ROOT_DIST_SHA1="6a7014f34048d95ab60247814a1b8b98018810ff" && \
|
||||
\
|
||||
$BOOTSTRAP_PURE
|
5
vendor/github.com/elastic/beats/dev-tools/packer/docker/xgo-image/base/Dockerfile
generated
vendored
5
vendor/github.com/elastic/beats/dev-tools/packer/docker/xgo-image/base/Dockerfile
generated
vendored
@ -29,14 +29,13 @@ RUN \
|
||||
binutils-multiarch rsync \
|
||||
--no-install-recommends
|
||||
|
||||
# Configure the container for OSX cross compilation
|
||||
# Configure the container for OSX cross compilation
|
||||
ENV OSX_SDK MacOSX10.11.sdk
|
||||
ENV OSX_NDK_X86 /usr/local/osx-ndk-x86
|
||||
|
||||
RUN \
|
||||
OSX_SDK_PATH=https://s3.dockerproject.org/darwin/v2/$OSX_SDK.tar.xz && \
|
||||
$FETCH $OSX_SDK_PATH dd228a335194e3392f1904ce49aff1b1da26ca62 && \
|
||||
OSX_SDK_PATH=https://github.com/phracker/MacOSX-SDKs/releases/download/MacOSX10.11.sdk/MacOSX10.11.sdk.tar.xz && \
|
||||
$FETCH $OSX_SDK_PATH f3430e3d923644e66c0c13f7a48754e7b6aa2e3f && \
|
||||
\
|
||||
git clone https://github.com/tpoechtrager/osxcross.git && \
|
||||
mv `basename $OSX_SDK_PATH` /osxcross/tarballs/ && \
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM tudorg/xgo-1.7.4
|
||||
FROM tudorg/xgo-1.7.6
|
||||
|
||||
MAINTAINER Tudor Golubenco <tudor@elastic.co>
|
||||
|
||||
|
2
vendor/github.com/elastic/beats/dev-tools/packer/docker/xgo-image/build.sh
generated
vendored
2
vendor/github.com/elastic/beats/dev-tools/packer/docker/xgo-image/build.sh
generated
vendored
@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
|
||||
docker build --rm=true -t tudorg/xgo-base base/ && \
|
||||
docker build --rm=true -t tudorg/xgo-1.7.4 go-1.7.4/ &&
|
||||
docker build --rm=true -t tudorg/xgo-1.7.6 go-1.7.6/ &&
|
||||
docker build --rm=true -t tudorg/beats-builder beats-builder
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Go cross compiler (xgo): Go 1.7.4 layer
|
||||
# Go cross compiler (xgo): Go 1.7.6 layer
|
||||
# Copyright (c) 2014 Péter Szilágyi. All rights reserved.
|
||||
#
|
||||
# Released under the MIT license.
|
||||
@ -9,7 +9,7 @@ MAINTAINER Tudor Golubenco <tudor@elastic.co>
|
||||
|
||||
# Configure the root Go distribution and bootstrap based on it
|
||||
RUN \
|
||||
export ROOT_DIST="https://storage.googleapis.com/golang/go1.7.4.linux-amd64.tar.gz" && \
|
||||
export ROOT_DIST_SHA1="2e5baf03d1590e048c84d1d5b4b6f2540efaaea1" && \
|
||||
export ROOT_DIST="https://storage.googleapis.com/golang/go1.7.6.linux-amd64.tar.gz" && \
|
||||
export ROOT_DIST_SHA1="6a7014f34048d95ab60247814a1b8b98018810ff" && \
|
||||
\
|
||||
$BOOTSTRAP_PURE
|
1
vendor/github.com/elastic/beats/dev-tools/packer/platforms/binary/run.sh.j2
generated
vendored
1
vendor/github.com/elastic/beats/dev-tools/packer/platforms/binary/run.sh.j2
generated
vendored
@ -18,6 +18,7 @@ cp {{.beat_name}}-linux.yml /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/{{.be
|
||||
cp {{.beat_name}}-linux.full.yml /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/{{.beat_name}}.full.yml
|
||||
cp {{.beat_name}}.template.json /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/
|
||||
cp {{.beat_name}}.template-es2x.json /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/
|
||||
cp {{.beat_name}}.template-es6x.json /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/
|
||||
|
||||
mkdir -p upload
|
||||
tar czvf upload/{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}.tar.gz /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}
|
||||
|
1
vendor/github.com/elastic/beats/dev-tools/packer/platforms/centos/run.sh.j2
generated
vendored
1
vendor/github.com/elastic/beats/dev-tools/packer/platforms/centos/run.sh.j2
generated
vendored
@ -38,6 +38,7 @@ fpm --force -s dir -t rpm \
|
||||
{{.beat_name}}-linux.full.yml=/etc/{{.beat_name}}/{{.beat_name}}.full.yml \
|
||||
{{.beat_name}}.template.json=/etc/{{.beat_name}}/{{.beat_name}}.template.json \
|
||||
{{.beat_name}}.template-es2x.json=/etc/{{.beat_name}}/{{.beat_name}}.template-es2x.json \
|
||||
{{.beat_name}}.template-es6x.json=/etc/{{.beat_name}}/{{.beat_name}}.template-es6x.json \
|
||||
${RUNID}.service=/lib/systemd/system/{{.beat_name}}.service \
|
||||
god-linux-{{.arch}}=/usr/share/{{.beat_name}}/bin/{{.beat_name}}-god \
|
||||
import_dashboards-linux-{{.arch}}=/usr/share/{{.beat_name}}/scripts/import_dashboards
|
||||
|
1
vendor/github.com/elastic/beats/dev-tools/packer/platforms/darwin/run.sh.j2
generated
vendored
1
vendor/github.com/elastic/beats/dev-tools/packer/platforms/darwin/run.sh.j2
generated
vendored
@ -18,6 +18,7 @@ cp {{.beat_name}}-darwin.yml /{{.beat_name}}-${VERSION}-darwin-x86_64/{{.beat_na
|
||||
cp {{.beat_name}}-darwin.full.yml /{{.beat_name}}-${VERSION}-darwin-x86_64/{{.beat_name}}.full.yml
|
||||
cp {{.beat_name}}.template.json /{{.beat_name}}-${VERSION}-darwin-x86_64/
|
||||
cp {{.beat_name}}.template-es2x.json /{{.beat_name}}-${VERSION}-darwin-x86_64/
|
||||
cp {{.beat_name}}.template-es6x.json /{{.beat_name}}-${VERSION}-darwin-x86_64/
|
||||
|
||||
mkdir -p upload
|
||||
tar czvf upload/{{.beat_name}}-${VERSION}-darwin-x86_64.tar.gz /{{.beat_name}}-${VERSION}-darwin-x86_64
|
||||
|
1
vendor/github.com/elastic/beats/dev-tools/packer/platforms/debian/run.sh.j2
generated
vendored
1
vendor/github.com/elastic/beats/dev-tools/packer/platforms/debian/run.sh.j2
generated
vendored
@ -35,6 +35,7 @@ fpm --force -s dir -t deb \
|
||||
{{.beat_name}}-linux.full.yml=/etc/{{.beat_name}}/{{.beat_name}}.full.yml \
|
||||
{{.beat_name}}.template.json=/etc/{{.beat_name}}/{{.beat_name}}.template.json \
|
||||
{{.beat_name}}.template-es2x.json=/etc/{{.beat_name}}/{{.beat_name}}.template-es2x.json \
|
||||
{{.beat_name}}.template-es6x.json=/etc/{{.beat_name}}/{{.beat_name}}.template-es6x.json \
|
||||
${RUNID}.service=/lib/systemd/system/{{.beat_name}}.service \
|
||||
god-linux-{{.arch}}=/usr/share/{{.beat_name}}/bin/{{.beat_name}}-god \
|
||||
import_dashboards-linux-{{.arch}}=/usr/share/{{.beat_name}}/scripts/import_dashboards
|
||||
|
1
vendor/github.com/elastic/beats/dev-tools/packer/platforms/windows/run.sh.j2
generated
vendored
1
vendor/github.com/elastic/beats/dev-tools/packer/platforms/windows/run.sh.j2
generated
vendored
@ -19,6 +19,7 @@ cp {{.beat_name}}-win.yml /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/{{.be
|
||||
cp {{.beat_name}}-win.full.yml /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/{{.beat_name}}.full.yml
|
||||
cp {{.beat_name}}.template.json /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/
|
||||
cp {{.beat_name}}.template-es2x.json /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/
|
||||
cp {{.beat_name}}.template-es6x.json /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/
|
||||
cp install-service-{{.beat_name}}.ps1 /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/
|
||||
cp uninstall-service-{{.beat_name}}.ps1 /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/
|
||||
|
||||
|
2
vendor/github.com/elastic/beats/dev-tools/packer/version.yml
generated
vendored
2
vendor/github.com/elastic/beats/dev-tools/packer/version.yml
generated
vendored
@ -1 +1 @@
|
||||
version: "1.1.1"
|
||||
version: "5.6.6"
|
||||
|
1
vendor/github.com/elastic/beats/dev-tools/packer/xgo-scripts/before_build.sh
generated
vendored
1
vendor/github.com/elastic/beats/dev-tools/packer/xgo-scripts/before_build.sh
generated
vendored
@ -15,6 +15,7 @@ PREFIX=/build
|
||||
# Copy template
|
||||
cp $BEAT_NAME.template.json $PREFIX/$BEAT_NAME.template.json
|
||||
cp $BEAT_NAME.template-es2x.json $PREFIX/$BEAT_NAME.template-es2x.json
|
||||
cp $BEAT_NAME.template-es6x.json $PREFIX/$BEAT_NAME.template-es6x.json
|
||||
|
||||
# linux
|
||||
cp $BEAT_NAME.yml $PREFIX/$BEAT_NAME-linux.yml
|
||||
|
2
vendor/github.com/elastic/beats/filebeat/Dockerfile
generated
vendored
2
vendor/github.com/elastic/beats/filebeat/Dockerfile
generated
vendored
@ -1,4 +1,4 @@
|
||||
FROM golang:1.7.4
|
||||
FROM golang:1.7.6
|
||||
MAINTAINER Nicolas Ruflin <ruflin@elastic.co>
|
||||
|
||||
RUN set -x && \
|
||||
|
418
vendor/github.com/elastic/beats/filebeat/_meta/beat.full.yml
generated
vendored
418
vendor/github.com/elastic/beats/filebeat/_meta/beat.full.yml
generated
vendored
@ -1,418 +0,0 @@
|
||||
######################## Filebeat Configuration ############################
|
||||
|
||||
# This file is a full configuration example documenting all non-deprecated
|
||||
# options in comments. For a shorter configuration example, that contains only
|
||||
# the most common options, please see filebeat.yml in the same directory.
|
||||
#
|
||||
# You can find the full configuration reference here:
|
||||
# https://www.elastic.co/guide/en/beats/filebeat/index.html
|
||||
|
||||
|
||||
#========================== Modules configuration ============================
|
||||
filebeat.modules:
|
||||
|
||||
#------------------------------- System Module -------------------------------
|
||||
#- module: system
|
||||
# Syslog
|
||||
#syslog:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
# Authorization logs
|
||||
#auth:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
#------------------------------- Apache2 Module ------------------------------
|
||||
#- module: apache2
|
||||
# Access logs
|
||||
#access:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
# Error logs
|
||||
#error:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
#------------------------------- Auditd Module -------------------------------
|
||||
#- module: auditd
|
||||
#log:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
#------------------------------- Icinga Module -------------------------------
|
||||
#- module: icinga
|
||||
# Main logs
|
||||
#main:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
# Debug logs
|
||||
#debug:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
# Startup logs
|
||||
#startup:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
#-------------------------------- MySQL Module -------------------------------
|
||||
#- module: mysql
|
||||
# Error logs
|
||||
#error:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
# Slow logs
|
||||
#slowlog:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
#-------------------------------- Nginx Module -------------------------------
|
||||
#- module: nginx
|
||||
# Access logs
|
||||
#access:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
# Error logs
|
||||
#error:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
|
||||
#=========================== Filebeat prospectors =============================
|
||||
|
||||
# List of prospectors to fetch data.
|
||||
filebeat.prospectors:
|
||||
# Each - is a prospector. Most options can be set at the prospector level, so
|
||||
# you can use different prospectors for various configurations.
|
||||
# Below are the prospector specific configurations.
|
||||
|
||||
# Type of the files. Based on this the way the file is read is decided.
|
||||
# The different types cannot be mixed in one prospector
|
||||
#
|
||||
# Possible options are:
|
||||
# * log: Reads every line of the log file (default)
|
||||
# * stdin: Reads the standard in
|
||||
|
||||
#------------------------------ Log prospector --------------------------------
|
||||
- input_type: log
|
||||
|
||||
# Change to true to enable this prospector configuration.
|
||||
enabled: false
|
||||
|
||||
# Paths that should be crawled and fetched. Glob based paths.
|
||||
# To fetch all ".log" files from a specific level of subdirectories
|
||||
# /var/log/*/*.log can be used.
|
||||
# For each file found under this path, a harvester is started.
|
||||
# Make sure not file is defined twice as this can lead to unexpected behaviour.
|
||||
paths:
|
||||
- /var/log/*.log
|
||||
#- c:\programdata\elasticsearch\logs\*
|
||||
|
||||
# Configure the file encoding for reading files with international characters
|
||||
# following the W3C recommendation for HTML5 (http://www.w3.org/TR/encoding).
|
||||
# Some sample encodings:
|
||||
# plain, utf-8, utf-16be-bom, utf-16be, utf-16le, big5, gb18030, gbk,
|
||||
# hz-gb-2312, euc-kr, euc-jp, iso-2022-jp, shift-jis, ...
|
||||
#encoding: plain
|
||||
|
||||
|
||||
# Exclude lines. A list of regular expressions to match. It drops the lines that are
|
||||
# matching any regular expression from the list. The include_lines is called before
|
||||
# exclude_lines. By default, no lines are dropped.
|
||||
#exclude_lines: ['^DBG']
|
||||
|
||||
# Include lines. A list of regular expressions to match. It exports the lines that are
|
||||
# matching any regular expression from the list. The include_lines is called before
|
||||
# exclude_lines. By default, all the lines are exported.
|
||||
#include_lines: ['^ERR', '^WARN']
|
||||
|
||||
# Exclude files. A list of regular expressions to match. Filebeat drops the files that
|
||||
# are matching any regular expression from the list. By default, no files are dropped.
|
||||
#exclude_files: ['.gz$']
|
||||
|
||||
# Optional additional fields. These field can be freely picked
|
||||
# to add additional information to the crawled log files for filtering
|
||||
#fields:
|
||||
# level: debug
|
||||
# review: 1
|
||||
|
||||
# Set to true to store the additional fields as top level fields instead
|
||||
# of under the "fields" sub-dictionary. In case of name conflicts with the
|
||||
# fields added by Filebeat itself, the custom fields overwrite the default
|
||||
# fields.
|
||||
#fields_under_root: false
|
||||
|
||||
# Ignore files which were modified more then the defined timespan in the past.
|
||||
# ignore_older is disabled by default, so no files are ignored by setting it to 0.
|
||||
# Time strings like 2h (2 hours), 5m (5 minutes) can be used.
|
||||
#ignore_older: 0
|
||||
|
||||
# Type to be published in the 'type' field. For Elasticsearch output,
|
||||
# the type defines the document type these entries should be stored
|
||||
# in. Default: log
|
||||
#document_type: log
|
||||
|
||||
# How often the prospector checks for new files in the paths that are specified
|
||||
# for harvesting. Specify 1s to scan the directory as frequently as possible
|
||||
# without causing Filebeat to scan too frequently. Default: 10s.
|
||||
#scan_frequency: 10s
|
||||
|
||||
# Defines the buffer size every harvester uses when fetching the file
|
||||
#harvester_buffer_size: 16384
|
||||
|
||||
# Maximum number of bytes a single log event can have
|
||||
# All bytes after max_bytes are discarded and not sent. The default is 10MB.
|
||||
# This is especially useful for multiline log messages which can get large.
|
||||
#max_bytes: 10485760
|
||||
|
||||
### Recursive glob configuration
|
||||
|
||||
# Expand "**" patterns into regular glob patterns.
|
||||
#recursive_glob.enabled: true
|
||||
|
||||
### JSON configuration
|
||||
|
||||
# Decode JSON options. Enable this if your logs are structured in JSON.
|
||||
# JSON key on which to apply the line filtering and multiline settings. This key
|
||||
# must be top level and its value must be string, otherwise it is ignored. If
|
||||
# no text key is defined, the line filtering and multiline features cannot be used.
|
||||
#json.message_key:
|
||||
|
||||
# By default, the decoded JSON is placed under a "json" key in the output document.
|
||||
# If you enable this setting, the keys are copied top level in the output document.
|
||||
#json.keys_under_root: false
|
||||
|
||||
# If keys_under_root and this setting are enabled, then the values from the decoded
|
||||
# JSON object overwrite the fields that Filebeat normally adds (type, source, offset, etc.)
|
||||
# in case of conflicts.
|
||||
#json.overwrite_keys: false
|
||||
|
||||
# If this setting is enabled, Filebeat adds a "json_error" key in case of JSON
|
||||
# unmarshaling errors or when a text key is defined in the configuration but cannot
|
||||
# be used.
|
||||
#json.add_error_key: false
|
||||
|
||||
### Multiline options
|
||||
|
||||
# Mutiline can be used for log messages spanning multiple lines. This is common
|
||||
# for Java Stack Traces or C-Line Continuation
|
||||
|
||||
# The regexp Pattern that has to be matched. The example pattern matches all lines starting with [
|
||||
#multiline.pattern: ^\[
|
||||
|
||||
# Defines if the pattern set under pattern should be negated or not. Default is false.
|
||||
#multiline.negate: false
|
||||
|
||||
# Match can be set to "after" or "before". It is used to define if lines should be append to a pattern
|
||||
# that was (not) matched before or after or as long as a pattern is not matched based on negate.
|
||||
# Note: After is the equivalent to previous and before is the equivalent to to next in Logstash
|
||||
#multiline.match: after
|
||||
|
||||
# The maximum number of lines that are combined to one event.
|
||||
# In case there are more the max_lines the additional lines are discarded.
|
||||
# Default is 500
|
||||
#multiline.max_lines: 500
|
||||
|
||||
# After the defined timeout, an multiline event is sent even if no new pattern was found to start a new event
|
||||
# Default is 5s.
|
||||
#multiline.timeout: 5s
|
||||
|
||||
# Setting tail_files to true means filebeat starts reading new files at the end
|
||||
# instead of the beginning. If this is used in combination with log rotation
|
||||
# this can mean that the first entries of a new file are skipped.
|
||||
#tail_files: false
|
||||
|
||||
# The Ingest Node pipeline ID associated with this prospector. If this is set, it
|
||||
# overwrites the pipeline option from the Elasticsearch output.
|
||||
#pipeline:
|
||||
|
||||
# If symlinks is enabled, symlinks are opened and harvested. The harvester is openening the
|
||||
# original for harvesting but will report the symlink name as source.
|
||||
#symlinks: false
|
||||
|
||||
# Backoff values define how aggressively filebeat crawls new files for updates
|
||||
# The default values can be used in most cases. Backoff defines how long it is waited
|
||||
# to check a file again after EOF is reached. Default is 1s which means the file
|
||||
# is checked every second if new lines were added. This leads to a near real time crawling.
|
||||
# Every time a new line appears, backoff is reset to the initial value.
|
||||
#backoff: 1s
|
||||
|
||||
# Max backoff defines what the maximum backoff time is. After having backed off multiple times
|
||||
# from checking the files, the waiting time will never exceed max_backoff independent of the
|
||||
# backoff factor. Having it set to 10s means in the worst case a new line can be added to a log
|
||||
# file after having backed off multiple times, it takes a maximum of 10s to read the new line
|
||||
#max_backoff: 10s
|
||||
|
||||
# The backoff factor defines how fast the algorithm backs off. The bigger the backoff factor,
|
||||
# the faster the max_backoff value is reached. If this value is set to 1, no backoff will happen.
|
||||
# The backoff value will be multiplied each time with the backoff_factor until max_backoff is reached
|
||||
#backoff_factor: 2
|
||||
|
||||
# Max number of harvesters that are started in parallel.
|
||||
# Default is 0 which means unlimited
|
||||
#harvester_limit: 0
|
||||
|
||||
### Harvester closing options
|
||||
|
||||
# Close inactive closes the file handler after the predefined period.
|
||||
# The period starts when the last line of the file was, not the file ModTime.
|
||||
# Time strings like 2h (2 hours), 5m (5 minutes) can be used.
|
||||
#close_inactive: 5m
|
||||
|
||||
# Close renamed closes a file handler when the file is renamed or rotated.
|
||||
# Note: Potential data loss. Make sure to read and understand the docs for this option.
|
||||
#close_renamed: false
|
||||
|
||||
# When enabling this option, a file handler is closed immediately in case a file can't be found
|
||||
# any more. In case the file shows up again later, harvesting will continue at the last known position
|
||||
# after scan_frequency.
|
||||
#close_removed: true
|
||||
|
||||
# Closes the file handler as soon as the harvesters reaches the end of the file.
|
||||
# By default this option is disabled.
|
||||
# Note: Potential data loss. Make sure to read and understand the docs for this option.
|
||||
#close_eof: false
|
||||
|
||||
### State options
|
||||
|
||||
# Files for the modification data is older then clean_inactive the state from the registry is removed
|
||||
# By default this is disabled.
|
||||
#clean_inactive: 0
|
||||
|
||||
# Removes the state for file which cannot be found on disk anymore immediately
|
||||
#clean_removed: true
|
||||
|
||||
# Close timeout closes the harvester after the predefined time.
|
||||
# This is independent if the harvester did finish reading the file or not.
|
||||
# By default this option is disabled.
|
||||
# Note: Potential data loss. Make sure to read and understand the docs for this option.
|
||||
#close_timeout: 0
|
||||
|
||||
# Defines if prospectors is enabled
|
||||
#enabled: true
|
||||
|
||||
#----------------------------- Stdin prospector -------------------------------
|
||||
# Configuration to use stdin input
|
||||
#- input_type: stdin
|
||||
|
||||
#========================= Filebeat global options ============================
|
||||
|
||||
# Event count spool threshold - forces network flush if exceeded
|
||||
#filebeat.spool_size: 2048
|
||||
|
||||
# Enable async publisher pipeline in filebeat (Experimental!)
|
||||
#filebeat.publish_async: false
|
||||
|
||||
# Defines how often the spooler is flushed. After idle_timeout the spooler is
|
||||
# Flush even though spool_size is not reached.
|
||||
#filebeat.idle_timeout: 5s
|
||||
|
||||
# Name of the registry file. If a relative path is used, it is considered relative to the
|
||||
# data path.
|
||||
#filebeat.registry_file: ${path.data}/registry
|
||||
|
||||
#
|
||||
# These config files must have the full filebeat config part inside, but only
|
||||
# the prospector part is processed. All global options like spool_size are ignored.
|
||||
# The config_dir MUST point to a different directory then where the main filebeat config file is in.
|
||||
#filebeat.config_dir:
|
||||
|
||||
# How long filebeat waits on shutdown for the publisher to finish.
|
||||
# Default is 0, not waiting.
|
||||
#filebeat.shutdown_timeout: 0
|
||||
|
||||
# Enable filebeat config reloading
|
||||
#filebeat.config.prospectors:
|
||||
#enabled: false
|
||||
#path: configs/*.yml
|
||||
#reload.enabled: true
|
||||
#reload.period: 10s
|
151
vendor/github.com/elastic/beats/filebeat/_meta/beat.yml
generated
vendored
151
vendor/github.com/elastic/beats/filebeat/_meta/beat.yml
generated
vendored
@ -1,151 +0,0 @@
|
||||
###################### Filebeat Configuration Example #########################
|
||||
|
||||
# This file is an example configuration file highlighting only the most common
|
||||
# options. The filebeat.full.yml file from the same directory contains all the
|
||||
# supported options with more comments. You can use it as a reference.
|
||||
#
|
||||
# You can find the full configuration reference here:
|
||||
# https://www.elastic.co/guide/en/beats/filebeat/index.html
|
||||
|
||||
|
||||
#========================== Modules configuration ============================
|
||||
filebeat.modules:
|
||||
|
||||
#------------------------------- System Module -------------------------------
|
||||
#- module: system
|
||||
# Syslog
|
||||
#syslog:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Authorization logs
|
||||
#auth:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
#------------------------------- Apache2 Module ------------------------------
|
||||
#- module: apache2
|
||||
# Access logs
|
||||
#access:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Error logs
|
||||
#error:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
#------------------------------- Auditd Module -------------------------------
|
||||
#- module: auditd
|
||||
#log:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
|
||||
#-------------------------------- MySQL Module -------------------------------
|
||||
#- module: mysql
|
||||
# Error logs
|
||||
#error:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Slow logs
|
||||
#slowlog:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
#-------------------------------- Nginx Module -------------------------------
|
||||
#- module: nginx
|
||||
# Access logs
|
||||
#access:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Error logs
|
||||
#error:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
|
||||
# For more available modules and options, please see the filebeat.full.yml sample
|
||||
# configuration file.
|
||||
|
||||
#=========================== Filebeat prospectors =============================
|
||||
|
||||
filebeat.prospectors:
|
||||
|
||||
# Each - is a prospector. Most options can be set at the prospector level, so
|
||||
# you can use different prospectors for various configurations.
|
||||
# Below are the prospector specific configurations.
|
||||
|
||||
- input_type: log
|
||||
|
||||
# Change to true to enable this prospector configuration.
|
||||
enabled: false
|
||||
|
||||
# Paths that should be crawled and fetched. Glob based paths.
|
||||
paths:
|
||||
- /var/log/*.log
|
||||
#- c:\programdata\elasticsearch\logs\*
|
||||
|
||||
# Exclude lines. A list of regular expressions to match. It drops the lines that are
|
||||
# matching any regular expression from the list.
|
||||
#exclude_lines: ['^DBG']
|
||||
|
||||
# Include lines. A list of regular expressions to match. It exports the lines that are
|
||||
# matching any regular expression from the list.
|
||||
#include_lines: ['^ERR', '^WARN']
|
||||
|
||||
# Exclude files. A list of regular expressions to match. Filebeat drops the files that
|
||||
# are matching any regular expression from the list. By default, no files are dropped.
|
||||
#exclude_files: ['.gz$']
|
||||
|
||||
# Optional additional fields. These field can be freely picked
|
||||
# to add additional information to the crawled log files for filtering
|
||||
#fields:
|
||||
# level: debug
|
||||
# review: 1
|
||||
|
||||
### Multiline options
|
||||
|
||||
# Mutiline can be used for log messages spanning multiple lines. This is common
|
||||
# for Java Stack Traces or C-Line Continuation
|
||||
|
||||
# The regexp Pattern that has to be matched. The example pattern matches all lines starting with [
|
||||
#multiline.pattern: ^\[
|
||||
|
||||
# Defines if the pattern set under pattern should be negated or not. Default is false.
|
||||
#multiline.negate: false
|
||||
|
||||
# Match can be set to "after" or "before". It is used to define if lines should be append to a pattern
|
||||
# that was (not) matched before or after or as long as a pattern is not matched based on negate.
|
||||
# Note: After is the equivalent to previous and before is the equivalent to to next in Logstash
|
||||
#multiline.match: after
|
||||
|
55
vendor/github.com/elastic/beats/filebeat/beater/filebeat.go
generated
vendored
55
vendor/github.com/elastic/beats/filebeat/beater/filebeat.go
generated
vendored
@ -1,11 +1,12 @@
|
||||
package beater
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/elastic/beats/libbeat/beat"
|
||||
"github.com/elastic/beats/libbeat/common"
|
||||
"github.com/elastic/beats/libbeat/logp"
|
||||
@ -41,6 +42,9 @@ func New(b *beat.Beat, rawConfig *common.Config) (beat.Beater, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !moduleRegistry.Empty() {
|
||||
logp.Info("Enabled modules/filesets: %s", moduleRegistry.InfoString())
|
||||
}
|
||||
|
||||
moduleProspectors, err := moduleRegistry.GetProspectorConfigs()
|
||||
if err != nil {
|
||||
@ -54,8 +58,16 @@ func New(b *beat.Beat, rawConfig *common.Config) (beat.Beater, error) {
|
||||
// Add prospectors created by the modules
|
||||
config.Prospectors = append(config.Prospectors, moduleProspectors...)
|
||||
|
||||
if !config.ProspectorReload.Enabled() && len(config.Prospectors) == 0 {
|
||||
return nil, errors.New("No prospectors defined. What files do you want me to watch?")
|
||||
haveEnabledProspectors := false
|
||||
for _, prospector := range config.Prospectors {
|
||||
if prospector.Enabled() {
|
||||
haveEnabledProspectors = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !config.ProspectorReload.Enabled() && !haveEnabledProspectors {
|
||||
return nil, errors.New("No modules or prospectors enabled and configuration reloading disabled. What files do you want me to watch?")
|
||||
}
|
||||
|
||||
if *once && config.ProspectorReload.Enabled() {
|
||||
@ -67,15 +79,26 @@ func New(b *beat.Beat, rawConfig *common.Config) (beat.Beater, error) {
|
||||
config: &config,
|
||||
moduleRegistry: moduleRegistry,
|
||||
}
|
||||
|
||||
// register `setup` callback for ML jobs
|
||||
if !moduleRegistry.Empty() {
|
||||
b.SetupMLCallback = func(b *beat.Beat) error {
|
||||
return fb.loadModulesML(b)
|
||||
}
|
||||
}
|
||||
return fb, nil
|
||||
}
|
||||
|
||||
// modulesSetup is called when modules are configured to do the initial
|
||||
// loadModulesPipelines is called when modules are configured to do the initial
|
||||
// setup.
|
||||
func (fb *Filebeat) modulesSetup(b *beat.Beat) error {
|
||||
func (fb *Filebeat) loadModulesPipelines(b *beat.Beat) error {
|
||||
esConfig := b.Config.Output["elasticsearch"]
|
||||
if esConfig == nil || !esConfig.Enabled() {
|
||||
return fmt.Errorf("Filebeat modules configured but the Elasticsearch output is not configured/enabled")
|
||||
logp.Warn("Filebeat is unable to load the Ingest Node pipelines for the configured" +
|
||||
" modules because the Elasticsearch output is not configured/enabled. If you have" +
|
||||
" already loaded the Ingest Node pipelines or are using Logstash pipelines, you" +
|
||||
" can ignore this warning.")
|
||||
return nil
|
||||
}
|
||||
esClient, err := elasticsearch.NewConnectedClient(esConfig)
|
||||
if err != nil {
|
||||
@ -91,13 +114,31 @@ func (fb *Filebeat) modulesSetup(b *beat.Beat) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (fb *Filebeat) loadModulesML(b *beat.Beat) error {
|
||||
logp.Debug("machine-learning", "Setting up ML jobs for modules")
|
||||
|
||||
esConfig := b.Config.Output["elasticsearch"]
|
||||
if esConfig == nil || !esConfig.Enabled() {
|
||||
logp.Warn("Filebeat is unable to load the Xpack Machine Learning configurations for the" +
|
||||
" modules because the Elasticsearch output is not configured/enabled.")
|
||||
return nil
|
||||
}
|
||||
|
||||
esClient, err := elasticsearch.NewConnectedClient(esConfig)
|
||||
if err != nil {
|
||||
return errors.Errorf("Error creating Elasticsearch client: %v", err)
|
||||
}
|
||||
|
||||
return fb.moduleRegistry.LoadML(esClient)
|
||||
}
|
||||
|
||||
// Run allows the beater to be run as a beat.
|
||||
func (fb *Filebeat) Run(b *beat.Beat) error {
|
||||
var err error
|
||||
config := fb.config
|
||||
|
||||
if !fb.moduleRegistry.Empty() {
|
||||
err = fb.modulesSetup(b)
|
||||
err = fb.loadModulesPipelines(b)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
2
vendor/github.com/elastic/beats/filebeat/docs/command-line.asciidoc
generated
vendored
2
vendor/github.com/elastic/beats/filebeat/docs/command-line.asciidoc
generated
vendored
@ -1,4 +1,4 @@
|
||||
[[filebeat-command-line]]
|
||||
[[command-line-options]]
|
||||
=== Command Line Options
|
||||
|
||||
The following command line option is specific to Filebeat.
|
||||
|
435
vendor/github.com/elastic/beats/filebeat/docs/fields.asciidoc
generated
vendored
435
vendor/github.com/elastic/beats/filebeat/docs/fields.asciidoc
generated
vendored
@ -13,6 +13,7 @@ This document describes the fields that are exported by Filebeat. They are
|
||||
grouped in the following categories:
|
||||
|
||||
* <<exported-fields-apache2>>
|
||||
* <<exported-fields-auditd>>
|
||||
* <<exported-fields-beat>>
|
||||
* <<exported-fields-cloud>>
|
||||
* <<exported-fields-log>>
|
||||
@ -152,7 +153,7 @@ The minor version of the user agent.
|
||||
[float]
|
||||
=== apache2.access.user_agent.patch
|
||||
|
||||
type: long
|
||||
type: keyword
|
||||
|
||||
The patch version of the user agent.
|
||||
|
||||
@ -230,6 +231,22 @@ type: geo_point
|
||||
The longitude and latitude.
|
||||
|
||||
|
||||
[float]
|
||||
=== apache2.access.geoip.region_name
|
||||
|
||||
type: keyword
|
||||
|
||||
The region name.
|
||||
|
||||
|
||||
[float]
|
||||
=== apache2.access.geoip.city_name
|
||||
|
||||
type: keyword
|
||||
|
||||
The city name.
|
||||
|
||||
|
||||
[float]
|
||||
== error Fields
|
||||
|
||||
@ -285,6 +302,154 @@ type: keyword
|
||||
The module producing the logged message.
|
||||
|
||||
|
||||
[[exported-fields-auditd]]
|
||||
== Auditd Fields
|
||||
|
||||
Module for parsing auditd logs.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
== auditd Fields
|
||||
|
||||
Fields from the auditd logs.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
== log Fields
|
||||
|
||||
Fields from the Linux audit log. Not all fields are documented here because they are dynamic and vary by audit event type.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.record_type
|
||||
|
||||
The audit event type.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.old_auid
|
||||
|
||||
For login events this is the old audit ID used for the user prior to this login.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.new_auid
|
||||
|
||||
For login events this is the new audit ID. The audit ID can be used to trace future events to the user even if their identity changes (like becoming root).
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.old_ses
|
||||
|
||||
For login events this is the old session ID used for the user prior to this login.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.new_ses
|
||||
|
||||
For login events this is the new session ID. It can be used to tie a user to future events by session ID.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.sequence
|
||||
|
||||
type: long
|
||||
|
||||
The audit event sequence number.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.acct
|
||||
|
||||
The user account name associated with the event.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.pid
|
||||
|
||||
The ID of the process.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.ppid
|
||||
|
||||
The ID of the process.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.items
|
||||
|
||||
The number of items in an event.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.item
|
||||
|
||||
The item field indicates which item out of the total number of items. This number is zero-based; a value of 0 means it is the first item.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.a0
|
||||
|
||||
The first argument to the system call.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.res
|
||||
|
||||
The result of the system call (success or failure).
|
||||
|
||||
|
||||
[float]
|
||||
== geoip Fields
|
||||
|
||||
Contains GeoIP information gathered based on the `auditd.log.addr` field. Only present if the GeoIP Elasticsearch plugin is available and used.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.geoip.continent_name
|
||||
|
||||
type: keyword
|
||||
|
||||
The name of the continent.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.geoip.city_name
|
||||
|
||||
type: keyword
|
||||
|
||||
The name of the city.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.geoip.region_name
|
||||
|
||||
type: keyword
|
||||
|
||||
The name of the region.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.geoip.country_iso_code
|
||||
|
||||
type: keyword
|
||||
|
||||
Country ISO code.
|
||||
|
||||
|
||||
[float]
|
||||
=== auditd.log.geoip.location
|
||||
|
||||
type: geo_point
|
||||
|
||||
The longitude and latitude.
|
||||
|
||||
|
||||
[[exported-fields-beat]]
|
||||
== Beat Fields
|
||||
|
||||
@ -616,12 +781,20 @@ Contains fields for the Nginx access logs.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
=== nginx.access.remote_ip_list
|
||||
|
||||
type: list
|
||||
|
||||
An array of remote IP addresses. It is a list because it is common to include, besides the client IP address, IP addresses from headers like `X-Forwarded-For`. See also the `remote_ip` field.
|
||||
|
||||
|
||||
[float]
|
||||
=== nginx.access.remote_ip
|
||||
|
||||
type: keyword
|
||||
|
||||
Client IP address.
|
||||
Client IP address. The first public IP address from the `remote_ip_list` array. If no public IP addresses are present, this field contains the first private IP address from the `remote_ip_list` array.
|
||||
|
||||
|
||||
[float]
|
||||
@ -726,7 +899,7 @@ The minor version of the user agent.
|
||||
[float]
|
||||
=== nginx.access.user_agent.patch
|
||||
|
||||
type: long
|
||||
type: keyword
|
||||
|
||||
The patch version of the user agent.
|
||||
|
||||
@ -804,6 +977,22 @@ type: geo_point
|
||||
The longitude and latitude.
|
||||
|
||||
|
||||
[float]
|
||||
=== nginx.access.geoip.region_name
|
||||
|
||||
type: keyword
|
||||
|
||||
The region name.
|
||||
|
||||
|
||||
[float]
|
||||
=== nginx.access.geoip.city_name
|
||||
|
||||
type: keyword
|
||||
|
||||
The city name.
|
||||
|
||||
|
||||
[float]
|
||||
== error Fields
|
||||
|
||||
@ -865,6 +1054,246 @@ Fields from the system log files.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
== auth Fields
|
||||
|
||||
Fields from the Linux authorization logs.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.timestamp
|
||||
|
||||
The timestamp as read from the auth message.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.hostname
|
||||
|
||||
The hostname as read from the auth message.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.program
|
||||
|
||||
The process name as read from the auth message.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.pid
|
||||
|
||||
type: long
|
||||
|
||||
The PID of the process that sent the auth message.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.message
|
||||
|
||||
The message in the log line.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.user
|
||||
|
||||
The Unix user that this event refers to.
|
||||
|
||||
|
||||
[float]
|
||||
== ssh Fields
|
||||
|
||||
Fields specific to SSH login events.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.ssh.event
|
||||
|
||||
The SSH login event. Can be one of "Accepted", "Failed", or "Invalid". "Accepted" means a successful login. "Invalid" means that the user is not configured on the system. "Failed" means that the SSH login attempt has failed.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.ssh.method
|
||||
|
||||
The SSH authentication method. Can be one of "password" or "publickey".
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.ssh.ip
|
||||
|
||||
type: ip
|
||||
|
||||
The client IP from where the login attempt was made.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.ssh.dropped_ip
|
||||
|
||||
type: ip
|
||||
|
||||
The client IP from SSH connections that are open and immediately dropped.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.ssh.port
|
||||
|
||||
type: long
|
||||
|
||||
The client port from where the login attempt was made.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.ssh.signature
|
||||
|
||||
The signature of the client public key.
|
||||
|
||||
|
||||
[float]
|
||||
== geoip Fields
|
||||
|
||||
Contains GeoIP information gathered based on the `system.auth.ip` field. Only present if the GeoIP Elasticsearch plugin is available and used.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.ssh.geoip.continent_name
|
||||
|
||||
type: keyword
|
||||
|
||||
The name of the continent.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.ssh.geoip.city_name
|
||||
|
||||
type: keyword
|
||||
|
||||
The name of the city.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.ssh.geoip.region_name
|
||||
|
||||
type: keyword
|
||||
|
||||
The name of the region.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.ssh.geoip.country_iso_code
|
||||
|
||||
type: keyword
|
||||
|
||||
Country ISO code.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.ssh.geoip.location
|
||||
|
||||
type: geo_point
|
||||
|
||||
The longitude and latitude.
|
||||
|
||||
|
||||
[float]
|
||||
== sudo Fields
|
||||
|
||||
Fields specific to events created by the `sudo` command.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.sudo.error
|
||||
|
||||
example: user NOT in sudoers
|
||||
|
||||
The error message in case the sudo command failed.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.sudo.tty
|
||||
|
||||
The TTY where the sudo command is executed.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.sudo.pwd
|
||||
|
||||
The current directory where the sudo command is executed.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.sudo.user
|
||||
|
||||
example: root
|
||||
|
||||
The target user to which the sudo command is switching.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.sudo.command
|
||||
|
||||
The command executed via sudo.
|
||||
|
||||
|
||||
[float]
|
||||
== useradd Fields
|
||||
|
||||
Fields specific to events created by the `useradd` command.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.useradd.name
|
||||
|
||||
The user name being added.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.useradd.uid
|
||||
|
||||
type: long
|
||||
|
||||
The user ID.
|
||||
|
||||
[float]
|
||||
=== system.auth.useradd.gid
|
||||
|
||||
type: long
|
||||
|
||||
The group ID.
|
||||
|
||||
[float]
|
||||
=== system.auth.useradd.home
|
||||
|
||||
The home folder for the new user.
|
||||
|
||||
[float]
|
||||
=== system.auth.useradd.shell
|
||||
|
||||
The default shell for the new user.
|
||||
|
||||
[float]
|
||||
== groupadd Fields
|
||||
|
||||
Fields specific to events created by the `groupadd` command.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.groupadd.name
|
||||
|
||||
The name of the new group.
|
||||
|
||||
|
||||
[float]
|
||||
=== system.auth.groupadd.gid
|
||||
|
||||
type: long
|
||||
|
||||
The ID of the new group.
|
||||
|
||||
|
||||
[float]
|
||||
== syslog Fields
|
||||
|
||||
|
54
vendor/github.com/elastic/beats/filebeat/docs/getting-started.asciidoc
generated
vendored
54
vendor/github.com/elastic/beats/filebeat/docs/getting-started.asciidoc
generated
vendored
@ -18,7 +18,7 @@ After installing the Elastic Stack, read the following topics to learn how to in
|
||||
* <<filebeat-starting>>
|
||||
* <<filebeat-index-pattern>>
|
||||
* <<filebeat-modules-quickstart>>
|
||||
* <<filebeat-command-line>>
|
||||
* <<command-line-options>>
|
||||
* <<directory-layout>>
|
||||
|
||||
[[filebeat-installation]]
|
||||
@ -27,17 +27,7 @@ After installing the Elastic Stack, read the following topics to learn how to in
|
||||
Before running Filebeat, you need to install and configure the Elastic stack. See
|
||||
{libbeat}/getting-started.html[Getting Started with Beats and the Elastic Stack].
|
||||
|
||||
To download and install Filebeat, use the commands that work with your system
|
||||
(<<deb, deb>> for Debian/Ubuntu, <<rpm, rpm>> for Redhat/Centos/Fedora, <<mac,
|
||||
mac>> for OS X, and <<win, win>> for Windows).
|
||||
|
||||
[NOTE]
|
||||
==================================================
|
||||
If you use Apt or Yum, you can <<setup-repositories,install Filebeat from our repositories>> to update to the newest version more easily.
|
||||
|
||||
See our https://www.elastic.co/downloads/beats/filebeat[download page] for other installation options, such as 32-bit images.
|
||||
|
||||
==================================================
|
||||
include::../../libbeat/docs/shared-download-and-install.asciidoc[]
|
||||
|
||||
[[deb]]
|
||||
*deb:*
|
||||
@ -96,6 +86,24 @@ tar xzvf filebeat-{version}-darwin-x86_64.tar.gz
|
||||
|
||||
endif::[]
|
||||
|
||||
[[docker]]
|
||||
*docker:*
|
||||
|
||||
ifeval::["{release-state}"=="unreleased"]
|
||||
|
||||
Version {stack-version} of {beatname_uc} has not yet been released.
|
||||
|
||||
endif::[]
|
||||
|
||||
ifeval::["{release-state}"!="unreleased"]
|
||||
|
||||
["source", "shell", subs="attributes"]
|
||||
------------------------------------------------
|
||||
docker pull {dockerimage}
|
||||
------------------------------------------------
|
||||
|
||||
endif::[]
|
||||
|
||||
[[win]]
|
||||
*win:*
|
||||
|
||||
@ -116,7 +124,8 @@ https://www.elastic.co/downloads/beats/filebeat[downloads page].
|
||||
|
||||
. Open a PowerShell prompt as an Administrator (right-click the PowerShell icon and select *Run As Administrator*). If you are running Windows XP, you may need to download and install PowerShell.
|
||||
|
||||
. Run the following commands to install Filebeat as a Windows service:
|
||||
. From the PowerShell prompt, run the following commands to install Filebeat as a
|
||||
Windows service:
|
||||
+
|
||||
[source,shell]
|
||||
----------------------------------------------------------------------
|
||||
@ -138,15 +147,7 @@ started, you can skip the content in this section, including the remaining
|
||||
getting started steps, and go directly to the <<filebeat-modules-quickstart>>
|
||||
page.
|
||||
|
||||
To configure Filebeat manually, you edit the configuration file. For rpm and deb,
|
||||
you'll find the configuration file at `/etc/filebeat/filebeat.yml`. For mac and
|
||||
win, look in the archive that you just extracted. There’s also a full example
|
||||
configuration file called `filebeat.full.yml` that shows all non-deprecated
|
||||
options.
|
||||
|
||||
See the
|
||||
{libbeat}/config-file-format.html[Config File Format] section of the
|
||||
_Beats Platform Reference_ for more about the structure of the config file.
|
||||
include::../../libbeat/docs/shared-configuring.asciidoc[]
|
||||
|
||||
Here is a sample of the `filebeat` section of the `filebeat.yml` file. Filebeat uses predefined
|
||||
default values for most configuration options.
|
||||
@ -222,7 +223,7 @@ include::../../libbeat/docs/shared-template-load.asciidoc[]
|
||||
Start Filebeat by issuing the appropriate command for your platform.
|
||||
|
||||
NOTE: If you use an init.d script to start Filebeat on deb or rpm, you can't
|
||||
specify command line flags (see <<filebeat-command-line>>). To specify flags,
|
||||
specify command line flags (see <<command-line-options>>). To specify flags,
|
||||
start Filebeat in the foreground.
|
||||
|
||||
*deb:*
|
||||
@ -239,6 +240,13 @@ sudo /etc/init.d/filebeat start
|
||||
sudo /etc/init.d/filebeat start
|
||||
----------------------------------------------------------------------
|
||||
|
||||
*docker:*
|
||||
|
||||
["source", "shell", subs="attributes"]
|
||||
----------------------------------------------------------------------
|
||||
docker run {dockerimage}
|
||||
----------------------------------------------------------------------
|
||||
|
||||
*mac:*
|
||||
|
||||
[source,shell]
|
||||
|
4
vendor/github.com/elastic/beats/filebeat/docs/how-filebeat-works.asciidoc
generated
vendored
4
vendor/github.com/elastic/beats/filebeat/docs/how-filebeat-works.asciidoc
generated
vendored
@ -39,7 +39,9 @@ filebeat.prospectors:
|
||||
- /var/path2/*.log
|
||||
-------------------------------------------------------------------------------------
|
||||
|
||||
Filebeat currently supports two `prospector` types: `log` and `stdin`. Each prospector type can be defined multiple times. The `log` prospector checks each file to see whether a harvester needs to be started, whether one is already running, or whether the file can be ignored (see <<ignore-older,`ignore_older`>>). New files are only picked up if the size of the file has changed since the harvester was closed.
|
||||
Filebeat currently supports two `prospector` types: `log` and `stdin`. Each prospector type can be defined multiple times. The `log` prospector checks each file to see whether a harvester needs to be started, whether one is already running, or whether the file can be ignored (see <<ignore-older,`ignore_older`>>). New lines are only picked up if the size of the file has changed since the harvester was closed.
|
||||
|
||||
NOTE: Filebeat prospectors can only read local files. There is no functionality to connect to remote hosts to read stored files or logs.
|
||||
|
||||
[float]
|
||||
=== How Does Filebeat Keep the State of Files?
|
||||
|
BIN
vendor/github.com/elastic/beats/filebeat/docs/images/false-after-multi.png
generated
vendored
Normal file
BIN
vendor/github.com/elastic/beats/filebeat/docs/images/false-after-multi.png
generated
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.0 KiB |
BIN
vendor/github.com/elastic/beats/filebeat/docs/images/false-before-multi.png
generated
vendored
Normal file
BIN
vendor/github.com/elastic/beats/filebeat/docs/images/false-before-multi.png
generated
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.1 KiB |
BIN
vendor/github.com/elastic/beats/filebeat/docs/images/kibana-audit-auditd.png
generated
vendored
Normal file
BIN
vendor/github.com/elastic/beats/filebeat/docs/images/kibana-audit-auditd.png
generated
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 129 KiB |
BIN
vendor/github.com/elastic/beats/filebeat/docs/images/true-after-multi.png
generated
vendored
Normal file
BIN
vendor/github.com/elastic/beats/filebeat/docs/images/true-after-multi.png
generated
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.5 KiB |
BIN
vendor/github.com/elastic/beats/filebeat/docs/images/true-before-multi.png
generated
vendored
Normal file
BIN
vendor/github.com/elastic/beats/filebeat/docs/images/true-before-multi.png
generated
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.6 KiB |
4
vendor/github.com/elastic/beats/filebeat/docs/index.asciidoc
generated
vendored
4
vendor/github.com/elastic/beats/filebeat/docs/index.asciidoc
generated
vendored
@ -15,6 +15,7 @@ include::../../libbeat/docs/version.asciidoc[]
|
||||
:beatname_lc: filebeat
|
||||
:beatname_uc: Filebeat
|
||||
:security: X-Pack Security
|
||||
:dockerimage: docker.elastic.co/beats/{beatname_lc}:{version}
|
||||
|
||||
include::./overview.asciidoc[]
|
||||
|
||||
@ -28,6 +29,8 @@ include::../../libbeat/docs/shared-directory-layout.asciidoc[]
|
||||
|
||||
include::../../libbeat/docs/repositories.asciidoc[]
|
||||
|
||||
include::./running-on-docker.asciidoc[]
|
||||
|
||||
include::./upgrading.asciidoc[]
|
||||
|
||||
include::./how-filebeat-works.asciidoc[]
|
||||
@ -40,6 +43,7 @@ include::./multiline.asciidoc[]
|
||||
|
||||
include::../../libbeat/docs/shared-config-ingest.asciidoc[]
|
||||
|
||||
:standalone:
|
||||
include::../../libbeat/docs/shared-env-vars.asciidoc[]
|
||||
|
||||
include::./multiple-prospectors.asciidoc[]
|
||||
|
2
vendor/github.com/elastic/beats/filebeat/docs/migration.asciidoc
generated
vendored
2
vendor/github.com/elastic/beats/filebeat/docs/migration.asciidoc
generated
vendored
@ -304,7 +304,7 @@ options with Logstash Forwarder, make sure that you add your options to the
|
||||
configuration file. For naming changes, see <<renamed-options>>.
|
||||
|
||||
Filebeat does provide command line options that are common to all Beats. For more details about
|
||||
these options, see <<filebeat-command-line>>.
|
||||
these options, see <<command-line-options>>.
|
||||
|
||||
[[renamed-options]]
|
||||
[float]
|
||||
|
2
vendor/github.com/elastic/beats/filebeat/docs/modules-getting-started.asciidoc
generated
vendored
2
vendor/github.com/elastic/beats/filebeat/docs/modules-getting-started.asciidoc
generated
vendored
@ -116,4 +116,4 @@ Open the dashboard and explore the visualizations for your parsed logs.
|
||||
|
||||
Here's an example of the syslog dashboard:
|
||||
|
||||
image:./images/kibana-system.png[Sylog dashboard]
|
||||
image:./images/kibana-system.png[Syslog dashboard]
|
||||
|
9
vendor/github.com/elastic/beats/filebeat/docs/modules-overview.asciidoc
generated
vendored
9
vendor/github.com/elastic/beats/filebeat/docs/modules-overview.asciidoc
generated
vendored
@ -6,7 +6,7 @@ beta[]
|
||||
Filebeat modules simplify the collection, parsing, and visualization of common
|
||||
log formats.
|
||||
|
||||
A typical module (say, for the Nginx logs) is composed of one ore
|
||||
A typical module (say, for the Nginx logs) is composed of one or
|
||||
more filesets (in the case of Nginx, `access` and `error`). A fileset contains
|
||||
the following:
|
||||
|
||||
@ -32,6 +32,8 @@ NOTE: At the moment, Filebeat modules require using the Elasticsearch
|
||||
be able to also configure Logstash as a more powerful alternative to Ingest
|
||||
Node.
|
||||
|
||||
Filebeat modules require Elasticsearch 5.2 or later.
|
||||
|
||||
=== Tutorial
|
||||
|
||||
This tutorial assumes you have Elasticsearch and Kibana installed and
|
||||
@ -103,6 +105,7 @@ filebeat.modules:
|
||||
|
||||
Then you can start Filebeat simply with: `./filebeat -e`.
|
||||
|
||||
[[module-varialbe-overrides]]
|
||||
==== Variable overrides
|
||||
|
||||
Each module and fileset has a set of "variables" which allow adjusting their
|
||||
@ -129,10 +132,10 @@ Or via the configuration file:
|
||||
filebeat.modules:
|
||||
- module: nginx
|
||||
access:
|
||||
var.paths = ["/var/log/nginx/access.log*"]
|
||||
var.paths: ["/var/log/nginx/access.log*"]
|
||||
----------------------------------------------------------------------
|
||||
|
||||
The Nginx `access` fileset also has a `pipeline` variables which allows
|
||||
The Nginx `access` fileset also has a `pipeline` variable which allows
|
||||
selecting which of the available Ingest Node pipelines is used for parsing. At
|
||||
the moment, two such pipelines are available, one that requires the two ingest
|
||||
plugins (`ingest-geoip` and `ingest-user-agent`) and one that doesn't. If you
|
||||
|
2
vendor/github.com/elastic/beats/filebeat/docs/modules.asciidoc
generated
vendored
2
vendor/github.com/elastic/beats/filebeat/docs/modules.asciidoc
generated
vendored
@ -7,6 +7,8 @@ This section contains an <<filebeat-modules-overview,overview>> of the Filebeat
|
||||
modules feature as well as details about each of the currently supported
|
||||
modules.
|
||||
|
||||
Filebeat modules require Elasticsearch 5.2 or later.
|
||||
|
||||
//pass macro block used here to remove Edit links from modules documentation because it is generated
|
||||
pass::[<?edit_url?>]
|
||||
include::modules_list.asciidoc[]
|
||||
|
1
vendor/github.com/elastic/beats/filebeat/docs/modules/apache2.asciidoc
generated
vendored
1
vendor/github.com/elastic/beats/filebeat/docs/modules/apache2.asciidoc
generated
vendored
@ -47,6 +47,7 @@ An array of paths where to look for the log files. If left empty, Filebeat
|
||||
will choose the paths depending on your operating systems.
|
||||
|
||||
|
||||
[float]
|
||||
=== Fields
|
||||
|
||||
For a description of each field in the metricset, see the
|
||||
|
42
vendor/github.com/elastic/beats/filebeat/docs/modules/auditd.asciidoc
generated
vendored
Normal file
42
vendor/github.com/elastic/beats/filebeat/docs/modules/auditd.asciidoc
generated
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
////
|
||||
This file is generated! See scripts/docs_collector.py
|
||||
////
|
||||
|
||||
[[filebeat-module-auditd]]
|
||||
== Auditd module
|
||||
|
||||
This module collects and parses logs from the audit daemon (`auditd`).
|
||||
|
||||
[float]
|
||||
=== Compatibility
|
||||
|
||||
This module was tested with logs from `auditd` on OSes like CentOS 6 and
|
||||
CentOS 7.
|
||||
|
||||
This module is not available for Windows.
|
||||
|
||||
[float]
|
||||
=== Dashboard
|
||||
|
||||
This module comes with a sample dashboard showing an overview of the audit log
|
||||
data. You can build more specific dashboards that are tailored to the audit
|
||||
rules that you use on your systems.
|
||||
|
||||
image::./images/kibana-audit-auditd.png[]
|
||||
|
||||
[float]
|
||||
=== Syslog fileset settings
|
||||
|
||||
[float]
|
||||
==== var.paths
|
||||
|
||||
An array of paths where to look for the log files. If left empty, Filebeat
|
||||
will choose the paths depending on your operating systems.
|
||||
|
||||
|
||||
[float]
|
||||
=== Fields
|
||||
|
||||
For a description of each field in the metricset, see the
|
||||
<<exported-fields-auditd,exported fields>> section.
|
||||
|
3
vendor/github.com/elastic/beats/filebeat/docs/modules/mysql.asciidoc
generated
vendored
3
vendor/github.com/elastic/beats/filebeat/docs/modules/mysql.asciidoc
generated
vendored
@ -7,12 +7,14 @@ This file is generated! See scripts/docs_collector.py
|
||||
|
||||
This module collects and parses the slow logs and error logs created by https://www.mysql.com/[MySQL].
|
||||
|
||||
[float]
|
||||
=== Compatibility
|
||||
|
||||
The MySQL module was tested with logs from versions 5.5 and 5.7.
|
||||
|
||||
On Windows, the module was tested with MySQL installed from the Chocolatey repository.
|
||||
|
||||
[float]
|
||||
=== Dashboard
|
||||
|
||||
This module comes with a sample dashboard.
|
||||
@ -38,6 +40,7 @@ An array of paths where to look for the log files. If left empty, Filebeat
|
||||
will choose the paths depending on your operating systems.
|
||||
|
||||
|
||||
[float]
|
||||
=== Fields
|
||||
|
||||
For a description of each field in the metricset, see the
|
||||
|
1
vendor/github.com/elastic/beats/filebeat/docs/modules/nginx.asciidoc
generated
vendored
1
vendor/github.com/elastic/beats/filebeat/docs/modules/nginx.asciidoc
generated
vendored
@ -47,6 +47,7 @@ will choose the paths depending on your operating systems.
|
||||
|
||||
|
||||
|
||||
[float]
|
||||
=== Fields
|
||||
|
||||
For a description of each field in the metricset, see the
|
||||
|
12
vendor/github.com/elastic/beats/filebeat/docs/modules/system.asciidoc
generated
vendored
12
vendor/github.com/elastic/beats/filebeat/docs/modules/system.asciidoc
generated
vendored
@ -5,18 +5,21 @@ This file is generated! See scripts/docs_collector.py
|
||||
[[filebeat-module-system]]
|
||||
== System module
|
||||
|
||||
This module collects and parses logs created by system logging server of common Unix/Linux based
|
||||
distributions.
|
||||
This module collects and parses logs created by system logging server of common
|
||||
Unix/Linux based distributions.
|
||||
|
||||
[float]
|
||||
=== Compatibility
|
||||
|
||||
This module was tested with logs from OSes like Ubuntu 12.04, Centos 7, macOS Sierra, and others.
|
||||
This module was tested with logs from OSes like Ubuntu 12.04, Centos 7, and
|
||||
macOS Sierra.
|
||||
|
||||
This module is not available for Windows.
|
||||
|
||||
[float]
|
||||
=== Dashboard
|
||||
|
||||
This module comes with a sample dashboard.
|
||||
This module comes with a sample dashboard showing syslog data.
|
||||
|
||||
image::./images/kibana-system.png[]
|
||||
|
||||
@ -30,6 +33,7 @@ An array of paths where to look for the log files. If left empty, Filebeat
|
||||
will choose the paths depending on your operating systems.
|
||||
|
||||
|
||||
[float]
|
||||
=== Fields
|
||||
|
||||
For a description of each field in the metricset, see the
|
||||
|
2
vendor/github.com/elastic/beats/filebeat/docs/modules_list.asciidoc
generated
vendored
2
vendor/github.com/elastic/beats/filebeat/docs/modules_list.asciidoc
generated
vendored
@ -4,6 +4,7 @@ This file is generated! See scripts/docs_collector.py
|
||||
|
||||
* <<filebeat-modules-overview>>
|
||||
* <<filebeat-module-apache2>>
|
||||
* <<filebeat-module-auditd>>
|
||||
* <<filebeat-module-mysql>>
|
||||
* <<filebeat-module-nginx>>
|
||||
* <<filebeat-module-system>>
|
||||
@ -14,6 +15,7 @@ This file is generated! See scripts/docs_collector.py
|
||||
|
||||
include::modules-overview.asciidoc[]
|
||||
include::modules/apache2.asciidoc[]
|
||||
include::modules/auditd.asciidoc[]
|
||||
include::modules/mysql.asciidoc[]
|
||||
include::modules/nginx.asciidoc[]
|
||||
include::modules/system.asciidoc[]
|
||||
|
13
vendor/github.com/elastic/beats/filebeat/docs/multiline.asciidoc
generated
vendored
13
vendor/github.com/elastic/beats/filebeat/docs/multiline.asciidoc
generated
vendored
@ -1,8 +1,15 @@
|
||||
[[multiline-examples]]
|
||||
== Managing Multiline Messages
|
||||
|
||||
You can specify `multiline` settings in the +{beatname_lc}.yml+ file to control how Filebeat deals with messages that
|
||||
span multiple lines. At a minimum, you need to configure:
|
||||
The files harvested by {beatname_uc} may contain messages that span multiple lines of text. In order to correctly handle
|
||||
these multiline events, you need to configure `multiline` settings in the +{beatname_lc}.yml+ file to specify which
|
||||
lines are part of a single event.
|
||||
|
||||
IMPORTANT: If you are sending multiline events to Logstash, use the options described here to handle multiline events
|
||||
before sending the event data to Logstash. Trying to implement multiline event handling in Logstash (for example, by
|
||||
using the Logstash multiline codec) may result in the mixing of streams and corrupted data.
|
||||
|
||||
At a minimum, you need to configure these `multiline` options:
|
||||
|
||||
* the `pattern` option, which specifies a regular expression. Depending on how you configure other multiline options,
|
||||
lines that match the specified regular expression are considered either continuations of a previous line or the start of a new multiline event. You can set the `negate` option to negate the pattern.
|
||||
@ -73,7 +80,7 @@ To consolidate these lines into a single event in Filebeat, use the following mu
|
||||
|
||||
[source,yaml]
|
||||
-------------------------------------------------------------------------------------
|
||||
multiline.pattern: '^[[:space:]]+|^Caused by:'
|
||||
multiline.pattern: '^[[:space:]]+(at|\.{3})\b|^Caused by:'
|
||||
multiline.negate: false
|
||||
multiline.match: after
|
||||
-------------------------------------------------------------------------------------
|
||||
|
4
vendor/github.com/elastic/beats/filebeat/docs/overview.asciidoc
generated
vendored
4
vendor/github.com/elastic/beats/filebeat/docs/overview.asciidoc
generated
vendored
@ -1,10 +1,10 @@
|
||||
[[filebeat-overview]]
|
||||
== Overview
|
||||
|
||||
Filebeat is a log data shipper. Installed as an agent on your servers, Filebeat monitors the log directories or specific log files, tails the files,
|
||||
Filebeat is a log data shipper for local files. Installed as an agent on your servers, Filebeat monitors the log directories or specific log files, tails the files,
|
||||
and forwards them either to https://www.elastic.co/products/elasticsearch[Elasticsearch] or https://www.elastic.co/products/logstash[Logstash] for indexing.
|
||||
|
||||
Here's how Filebeat works: When you start Filebeat, it starts one or more prospectors that look in the paths you've specified for log files. For each log file that the prospector locates, Filebeat starts a harvester. Each harvester reads a single log file for new content and sends the new log data to the spooler, which aggregates the events and sends the aggregated data to the output that you've configured for Filebeat.
|
||||
Here's how Filebeat works: When you start Filebeat, it starts one or more prospectors that look in the local paths you've specified for log files. For each log file that the prospector locates, Filebeat starts a harvester. Each harvester reads a single log file for new content and sends the new log data to the spooler, which aggregates the events and sends the aggregated data to the output that you've configured for Filebeat.
|
||||
|
||||
image:./images/filebeat.png[Beats design]
|
||||
|
||||
|
@ -14,7 +14,6 @@ filebeat.prospectors:
|
||||
- input_type: log
|
||||
paths:
|
||||
- /var/log/apache/httpd-*.log
|
||||
document_type: apache
|
||||
|
||||
- input_type: log
|
||||
paths:
|
||||
@ -297,6 +296,8 @@ The default setting is 10s.
|
||||
[[filebeat-document-type]]
|
||||
===== document_type
|
||||
|
||||
deprecated[5.5,Use `fields` instead]
|
||||
|
||||
The event type to use for published lines read by harvesters. For Elasticsearch
|
||||
output, the value that you specify here is used to set the `type` field in the output
|
||||
document. The default value is `log`.
|
||||
@ -348,6 +349,10 @@ occur.
|
||||
[[multiline]]
|
||||
===== multiline
|
||||
|
||||
IMPORTANT: If you are sending multiline events to Logstash, use the options described here to handle multiline events
|
||||
before sending the event data to Logstash. Trying to implement multiline event handling in Logstash (for example, by
|
||||
using the Logstash multiline codec) may result in the mixing of streams and corrupted data.
|
||||
|
||||
Options that control how Filebeat deals with log messages that span multiple lines. Multiline messages are common in files that contain Java stack traces.
|
||||
|
||||
The following example shows how to configure Filebeat to handle a multiline message where the first line of the message begins with a bracket (`[`).
|
||||
@ -384,11 +389,11 @@ somewhat from the patterns supported by Logstash. See <<regexp-support>> for a l
|
||||
+
|
||||
[options="header"]
|
||||
|=======================
|
||||
|Setting for `negate` | Setting for `match` | Result
|
||||
|`false` | `after` | Consecutive lines that match the pattern are appended to the previous line that doesn't match.
|
||||
|`false` | `before` | Consecutive lines that match the pattern are prepended to the next line that doesn't match.
|
||||
|`true` | `after` | Consecutive lines that don't match the pattern are appended to the previous line that does match.
|
||||
|`true` | `before` | Consecutive lines that don't match the pattern are prepended to the next line that does match.
|
||||
|Setting for `negate` | Setting for `match` | Result | Example `pattern: ^b`
|
||||
|`false` | `after` | Consecutive lines that match the pattern are appended to the previous line that doesn't match. | image:./images/false-after-multi.png[Lines a b b c b b become "abb" and "cbb"]
|
||||
|`false` | `before` | Consecutive lines that match the pattern are prepended to the next line that doesn't match. | image:./images/false-before-multi.png[Lines b b a b b c become "bba" and "bbc"]
|
||||
|`true` | `after` | Consecutive lines that don't match the pattern are appended to the previous line that does match. | image:./images/true-after-multi.png[Lines b a c b d e become "bac" and "bde"]
|
||||
|`true` | `before` | Consecutive lines that don't match the pattern are prepended to the next line that does match. | image:./images/true-before-multi.png[Lines a c b d e b become "acb" and "deb"]
|
||||
|=======================
|
||||
+
|
||||
NOTE: The `after` setting is equivalent to `previous` in https://www.elastic.co/guide/en/logstash/current/plugins-codecs-multiline.html[Logstash], and `before` is equivalent to `next`.
|
||||
@ -427,7 +432,7 @@ Because this option may lead to data loss, it is disabled by default.
|
||||
|
||||
===== backoff
|
||||
|
||||
The backoff options specify how aggressively Filebeat crawls new files for updates.
|
||||
The backoff options specify how aggressively Filebeat crawls open files for updates.
|
||||
You can use the default values in most cases.
|
||||
|
||||
The `backoff` option defines how long Filebeat
|
||||
|
1
vendor/github.com/elastic/beats/filebeat/docs/running-on-docker.asciidoc
generated
vendored
Normal file
1
vendor/github.com/elastic/beats/filebeat/docs/running-on-docker.asciidoc
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
include::../../libbeat/docs/shared-docker.asciidoc[]
|
876
vendor/github.com/elastic/beats/filebeat/fields.yml
generated
vendored
876
vendor/github.com/elastic/beats/filebeat/fields.yml
generated
vendored
@ -1,876 +0,0 @@
|
||||
|
||||
- key: beat
|
||||
title: Beat
|
||||
description: >
|
||||
Contains common beat fields available in all event types.
|
||||
fields:
|
||||
|
||||
- name: beat.name
|
||||
description: >
|
||||
The name of the Beat sending the log messages. If the Beat name is
|
||||
set in the configuration file, then that value is used. If it is not
|
||||
set, the hostname is used. To set the Beat name, use the `name`
|
||||
option in the configuration file.
|
||||
- name: beat.hostname
|
||||
description: >
|
||||
The hostname as returned by the operating system on which the Beat is
|
||||
running.
|
||||
- name: beat.timezone
|
||||
description: >
|
||||
The timezone as returned by the operating system on which the Beat is
|
||||
running.
|
||||
- name: beat.version
|
||||
description: >
|
||||
The version of the beat that generated this event.
|
||||
|
||||
- name: "@timestamp"
|
||||
type: date
|
||||
required: true
|
||||
format: date
|
||||
example: August 26th 2016, 12:35:53.332
|
||||
description: >
|
||||
The timestamp when the event log record was generated.
|
||||
|
||||
- name: tags
|
||||
description: >
|
||||
Arbitrary tags that can be set per Beat and per transaction
|
||||
type.
|
||||
|
||||
- name: fields
|
||||
type: object
|
||||
object_type: keyword
|
||||
description: >
|
||||
Contains user configurable fields.
|
||||
|
||||
- name: error
|
||||
type: group
|
||||
description: >
|
||||
Error fields containing additional info in case of errors.
|
||||
fields:
|
||||
- name: message
|
||||
type: text
|
||||
description: >
|
||||
Error message.
|
||||
- name: code
|
||||
type: long
|
||||
description: >
|
||||
Error code.
|
||||
- name: type
|
||||
type: keyword
|
||||
description: >
|
||||
Error type.
|
||||
- key: cloud
|
||||
title: Cloud Provider Metadata
|
||||
description: >
|
||||
Metadata from cloud providers added by the add_cloud_metadata processor.
|
||||
fields:
|
||||
|
||||
- name: meta.cloud.provider
|
||||
example: ec2
|
||||
description: >
|
||||
Name of the cloud provider. Possible values are ec2, gce, or digitalocean.
|
||||
|
||||
- name: meta.cloud.instance_id
|
||||
description: >
|
||||
Instance ID of the host machine.
|
||||
|
||||
- name: meta.cloud.machine_type
|
||||
example: t2.medium
|
||||
description: >
|
||||
Machine type of the host machine.
|
||||
|
||||
- name: meta.cloud.availability_zone
|
||||
example: us-east-1c
|
||||
description: >
|
||||
Availability zone in which this host is running.
|
||||
|
||||
- name: meta.cloud.project_id
|
||||
example: project-x
|
||||
description: >
|
||||
Name of the project in Google Cloud.
|
||||
|
||||
- name: meta.cloud.region
|
||||
description: >
|
||||
Region in which this host is running.
|
||||
- key: kubernetes
|
||||
title: Kubernetes info
|
||||
description: >
|
||||
Kubernetes metadata added by the kubernetes processor
|
||||
fields:
|
||||
- name: kubernetes.pod.name
|
||||
type: keyword
|
||||
description: >
|
||||
Kubernetes pod name
|
||||
|
||||
- name: kubernetes.namespace
|
||||
type: keyword
|
||||
description: >
|
||||
Kubernetes namespace
|
||||
|
||||
- name: kubernetes.labels
|
||||
type: object
|
||||
description: >
|
||||
Kubernetes labels map
|
||||
|
||||
- name: kubernetes.annotations
|
||||
type: object
|
||||
description: >
|
||||
Kubernetes annotations map
|
||||
|
||||
- name: kubernetes.container.name
|
||||
type: keyword
|
||||
description: >
|
||||
Kubernetes container name
|
||||
- key: log
|
||||
title: Log File Content
|
||||
description: >
|
||||
Contains log file lines.
|
||||
fields:
|
||||
- name: source
|
||||
type: keyword
|
||||
required: true
|
||||
description: >
|
||||
The file from which the line was read. This field contains the absolute path to the file.
|
||||
For example: `/var/log/system.log`.
|
||||
|
||||
- name: offset
|
||||
type: long
|
||||
required: false
|
||||
description: >
|
||||
The file offset the reported line starts at.
|
||||
|
||||
- name: message
|
||||
type: text
|
||||
ignore_above: 0
|
||||
required: true
|
||||
description: >
|
||||
The content of the line read from the log file.
|
||||
|
||||
- name: type
|
||||
required: true
|
||||
description: >
|
||||
The name of the log event. This field is set to the value specified for the `document_type` option in the prospector section of the Filebeat config file.
|
||||
|
||||
- name: input_type
|
||||
required: true
|
||||
description: >
|
||||
The input type from which the event was generated. This field is set to the value specified for the `input_type` option in the prospector section of the Filebeat config file.
|
||||
|
||||
- name: error
|
||||
description: >
|
||||
Ingestion pipeline error message, added in case there are errors reported by
|
||||
the Ingest Node in Elasticsearch.
|
||||
|
||||
- name: read_timestamp
|
||||
description: >
|
||||
In case the ingest pipeline parses the timestamp from the log contents, it stores
|
||||
the original `@timestamp` (representing the time when the log line was read) in this
|
||||
field.
|
||||
|
||||
- name: fileset.module
|
||||
description: >
|
||||
The Filebeat module that generated this event.
|
||||
|
||||
- name: fileset.name
|
||||
description: >
|
||||
The Filebeat fileset that generated this event.
|
||||
- key: apache2
|
||||
title: "Apache2"
|
||||
description: >
|
||||
Apache2 Module
|
||||
short_config: true
|
||||
fields:
|
||||
- name: apache2
|
||||
type: group
|
||||
description: >
|
||||
Apache2 fields.
|
||||
fields:
|
||||
- name: access
|
||||
type: group
|
||||
description: >
|
||||
Contains fields for the Apache2 HTTPD access logs.
|
||||
fields:
|
||||
- name: remote_ip
|
||||
type: keyword
|
||||
description: >
|
||||
Client IP address.
|
||||
- name: user_name
|
||||
type: keyword
|
||||
description: >
|
||||
The user name used when basic authentication is used.
|
||||
- name: method
|
||||
type: keyword
|
||||
example: GET
|
||||
description: >
|
||||
The request HTTP method.
|
||||
- name: url
|
||||
type: keyword
|
||||
description: >
|
||||
The request HTTP URL.
|
||||
- name: http_version
|
||||
type: keyword
|
||||
description: >
|
||||
The HTTP version.
|
||||
- name: response_code
|
||||
type: long
|
||||
description: >
|
||||
The HTTP response code.
|
||||
- name: body_sent.bytes
|
||||
type: long
|
||||
format: bytes
|
||||
description: >
|
||||
The number of bytes of the server response body.
|
||||
- name: referrer
|
||||
type: keyword
|
||||
description: >
|
||||
The HTTP referrer.
|
||||
- name: agent
|
||||
type: text
|
||||
description: >
|
||||
Contains the un-parsed user agent string. Only present if the user
|
||||
agent Elasticsearch plugin is not available or not used.
|
||||
- name: user_agent
|
||||
type: group
|
||||
description: >
|
||||
Contains the parsed User agent field. Only present if the user
|
||||
agent Elasticsearch plugin is available and used.
|
||||
fields:
|
||||
- name: device
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the physical device.
|
||||
- name: major
|
||||
type: long
|
||||
description: >
|
||||
The major version of the user agent.
|
||||
- name: minor
|
||||
type: long
|
||||
description: >
|
||||
The minor version of the user agent.
|
||||
- name: patch
|
||||
type: long
|
||||
description: >
|
||||
The patch version of the user agent.
|
||||
- name: name
|
||||
type: keyword
|
||||
example: Chrome
|
||||
description: >
|
||||
The name of the user agent.
|
||||
- name: os
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the operating system.
|
||||
- name: os_major
|
||||
type: long
|
||||
description: >
|
||||
The major version of the operating system.
|
||||
- name: os_minor
|
||||
type: long
|
||||
description: >
|
||||
The minor version of the operating system.
|
||||
- name: os_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the operating system.
|
||||
- name: geoip
|
||||
type: group
|
||||
description: >
|
||||
Contains GeoIP information gathered based on the remote_ip field.
|
||||
Only present if the GeoIP Elasticsearch plugin is available and
|
||||
used.
|
||||
fields:
|
||||
- name: continent_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the continent.
|
||||
- name: country_iso_code
|
||||
type: keyword
|
||||
description: >
|
||||
Country ISO code.
|
||||
- name: location
|
||||
type: geo_point
|
||||
description: >
|
||||
The longitude and latitude.
|
||||
|
||||
|
||||
- name: error
|
||||
type: group
|
||||
description: >
|
||||
Fields from the Apache error logs.
|
||||
fields:
|
||||
- name: level
|
||||
type: keyword
|
||||
description: >
|
||||
The severity level of the message.
|
||||
- name: client
|
||||
type: keyword
|
||||
description: >
|
||||
The IP address of the client that generated the error.
|
||||
- name: message
|
||||
type: text
|
||||
description: >
|
||||
The logged message.
|
||||
- name: pid
|
||||
type: long
|
||||
description: >
|
||||
The process ID.
|
||||
- name: tid
|
||||
type: long
|
||||
description: >
|
||||
The thread ID.
|
||||
- name: module
|
||||
type: keyword
|
||||
description: >
|
||||
The module producing the logged message.
|
||||
|
||||
- key: auditd
|
||||
title: "Auditd"
|
||||
description: >
|
||||
Module for parsing auditd logs.
|
||||
short_config: true
|
||||
fields:
|
||||
- name: auditd
|
||||
type: group
|
||||
description: >
|
||||
Fields from the auditd logs.
|
||||
fields:
|
||||
- name: log
|
||||
type: group
|
||||
description: >
|
||||
Fields from the Linux audit log. Not all fields are documented here because
|
||||
they are dynamic and vary by audit event type.
|
||||
fields:
|
||||
- name: record_type
|
||||
description: >
|
||||
The audit event type.
|
||||
- name: old_auid
|
||||
description: >
|
||||
For login events this is the old audit ID used for the user prior to
|
||||
this login.
|
||||
- name: new_auid
|
||||
description: >
|
||||
For login events this is the new audit ID. The audit ID can be used to
|
||||
trace future events to the user even if their identity changes (like
|
||||
becoming root).
|
||||
- name: old_ses
|
||||
description: >
|
||||
For login events this is the old session ID used for the user prior to
|
||||
this login.
|
||||
- name: new_ses
|
||||
description: >
|
||||
For login events this is the new session ID. It can be used to tie a
|
||||
user to future events by session ID.
|
||||
- name: sequence
|
||||
type: long
|
||||
description: >
|
||||
The audit event sequence number.
|
||||
- name: acct
|
||||
description: >
|
||||
The user account name associated with the event.
|
||||
- name: pid
|
||||
description: >
|
||||
The ID of the process.
|
||||
- name: ppid
|
||||
description: >
|
||||
The ID of the process.
|
||||
- name: items
|
||||
description: >
|
||||
The number of items in an event.
|
||||
- name: item
|
||||
description: >
|
||||
The item field indicates which item out of the total number of items.
|
||||
This number is zero-based; a value of 0 means it is the first item.
|
||||
- name: a0
|
||||
description: >
|
||||
The first argument to the system call.
|
||||
- name: res
|
||||
description: >
|
||||
The result of the system call (success or failure).
|
||||
- name: geoip
|
||||
type: group
|
||||
description: >
|
||||
Contains GeoIP information gathered based on the `auditd.log.addr`
|
||||
field. Only present if the GeoIP Elasticsearch plugin is available and
|
||||
used.
|
||||
fields:
|
||||
- name: continent_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the continent.
|
||||
- name: city_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the city.
|
||||
- name: region_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the region.
|
||||
- name: country_iso_code
|
||||
type: keyword
|
||||
description: >
|
||||
Country ISO code.
|
||||
- name: location
|
||||
type: geo_point
|
||||
description: >
|
||||
The longitude and latitude.
|
||||
|
||||
- key: icinga
|
||||
title: "Icinga"
|
||||
description: >
|
||||
Icinga Module
|
||||
fields:
|
||||
- name: icinga
|
||||
type: group
|
||||
description: >
|
||||
fields:
|
||||
- name: debug
|
||||
type: group
|
||||
description: >
|
||||
Contains fields for the Icinga debug logs.
|
||||
fields:
|
||||
- name: facility
|
||||
type: keyword
|
||||
description: >
|
||||
Specifies what component of Icinga logged the message.
|
||||
- name: severity
|
||||
type: keyword
|
||||
description: >
|
||||
Possible values are "debug", "notice", "information", "warning" or
|
||||
"critical".
|
||||
- name: message
|
||||
type: text
|
||||
description: >
|
||||
The logged message.
|
||||
|
||||
- name: main
|
||||
type: group
|
||||
description: >
|
||||
Contains fields for the Icinga main logs.
|
||||
fields:
|
||||
- name: facility
|
||||
type: keyword
|
||||
description: >
|
||||
Specifies what component of Icinga logged the message.
|
||||
- name: severity
|
||||
type: keyword
|
||||
description: >
|
||||
Possible values are "debug", "notice", "information", "warning" or
|
||||
"critical".
|
||||
- name: message
|
||||
type: text
|
||||
description: >
|
||||
The logged message.
|
||||
|
||||
- name: startup
|
||||
type: group
|
||||
description: >
|
||||
Contains fields for the Icinga startup logs.
|
||||
fields:
|
||||
- name: facility
|
||||
type: keyword
|
||||
description: >
|
||||
Specifies what component of Icinga logged the message.
|
||||
- name: severity
|
||||
type: keyword
|
||||
description: >
|
||||
Possible values are "debug", "notice", "information", "warning" or
|
||||
"critical".
|
||||
- name: message
|
||||
type: text
|
||||
description: >
|
||||
The logged message.
|
||||
|
||||
- key: mysql
|
||||
title: "MySQL"
|
||||
description: >
|
||||
Module for parsing the MySQL log files.
|
||||
short_config: true
|
||||
fields:
|
||||
- name: mysql
|
||||
type: group
|
||||
description: >
|
||||
Fields from the MySQL log files.
|
||||
fields:
|
||||
- name: error
|
||||
type: group
|
||||
description: >
|
||||
Contains fields from the MySQL error logs.
|
||||
fields:
|
||||
- name: timestamp
|
||||
description: >
|
||||
The timestamp from the log line.
|
||||
- name: thread_id
|
||||
type: long
|
||||
description: >
|
||||
As of MySQL 5.7.2, this is the thread id. For MySQL versions prior to 5.7.2, this
|
||||
field contains the process id.
|
||||
- name: level
|
||||
example: "Warning"
|
||||
description:
|
||||
The log level.
|
||||
- name: message
|
||||
type: text
|
||||
description: >
|
||||
The logged message.
|
||||
|
||||
- name: slowlog
|
||||
type: group
|
||||
description: >
|
||||
Contains fields from the MySQL slow logs.
|
||||
fields:
|
||||
- name: user
|
||||
description: >
|
||||
The MySQL user that created the query.
|
||||
- name: host
|
||||
description: >
|
||||
The host from where the user that created the query logged in.
|
||||
- name: ip
|
||||
description: >
|
||||
The IP address from where the user that created the query logged in.
|
||||
- name: query_time.sec
|
||||
type: float
|
||||
description: >
|
||||
The total time the query took, in seconds, as a floating point number.
|
||||
- name: lock_time.sec
|
||||
type: float
|
||||
description: >
|
||||
The amount of time the query waited for the lock to be available. The
|
||||
value is in seconds, as a floating point number.
|
||||
- name: rows_sent
|
||||
type: long
|
||||
description: >
|
||||
The number of rows returned by the query.
|
||||
- name: rows_examined
|
||||
type: long
|
||||
description: >
|
||||
The number of rows scanned by the query.
|
||||
- name: timestamp
|
||||
type: long
|
||||
description: >
|
||||
The unix timestamp taken from the `SET timestamp` query.
|
||||
- name: query
|
||||
description: >
|
||||
The slow query.
|
||||
- name: id
|
||||
type: long
|
||||
description: >
|
||||
The connection ID for the query.
|
||||
|
||||
- key: nginx
|
||||
title: "Nginx"
|
||||
description: >
|
||||
Module for parsing the Nginx log files.
|
||||
short_config: true
|
||||
fields:
|
||||
- name: nginx
|
||||
type: group
|
||||
description: >
|
||||
Fields from the Nginx log files.
|
||||
fields:
|
||||
- name: access
|
||||
type: group
|
||||
description: >
|
||||
Contains fields for the Nginx access logs.
|
||||
fields:
|
||||
- name: remote_ip
|
||||
type: keyword
|
||||
description: >
|
||||
Client IP address.
|
||||
- name: user_name
|
||||
type: keyword
|
||||
description: >
|
||||
The user name used when basic authentication is used.
|
||||
- name: method
|
||||
type: keyword
|
||||
example: GET
|
||||
description: >
|
||||
The request HTTP method.
|
||||
- name: url
|
||||
type: keyword
|
||||
description: >
|
||||
The request HTTP URL.
|
||||
- name: http_version
|
||||
type: keyword
|
||||
description: >
|
||||
The HTTP version.
|
||||
- name: response_code
|
||||
type: long
|
||||
description: >
|
||||
The HTTP response code.
|
||||
- name: body_sent.bytes
|
||||
type: long
|
||||
format: bytes
|
||||
description: >
|
||||
The number of bytes of the server response body.
|
||||
- name: referrer
|
||||
type: keyword
|
||||
description: >
|
||||
The HTTP referrer.
|
||||
- name: agent
|
||||
type: text
|
||||
description: >
|
||||
Contains the un-parsed user agent string. Only present if the user
|
||||
agent Elasticsearch plugin is not available or not used.
|
||||
- name: user_agent
|
||||
type: group
|
||||
description: >
|
||||
Contains the parsed User agent field. Only present if the user
|
||||
agent Elasticsearch plugin is available and used.
|
||||
fields:
|
||||
- name: device
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the physical device.
|
||||
- name: major
|
||||
type: long
|
||||
description: >
|
||||
The major version of the user agent.
|
||||
- name: minor
|
||||
type: long
|
||||
description: >
|
||||
The minor version of the user agent.
|
||||
- name: patch
|
||||
type: long
|
||||
description: >
|
||||
The patch version of the user agent.
|
||||
- name: name
|
||||
type: keyword
|
||||
example: Chrome
|
||||
description: >
|
||||
The name of the user agent.
|
||||
- name: os
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the operating system.
|
||||
- name: os_major
|
||||
type: long
|
||||
description: >
|
||||
The major version of the operating system.
|
||||
- name: os_minor
|
||||
type: long
|
||||
description: >
|
||||
The minor version of the operating system.
|
||||
- name: os_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the operating system.
|
||||
- name: geoip
|
||||
type: group
|
||||
description: >
|
||||
Contains GeoIP information gathered based on the remote_ip field.
|
||||
Only present if the GeoIP Elasticsearch plugin is available and
|
||||
used.
|
||||
fields:
|
||||
- name: continent_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the continent.
|
||||
- name: country_iso_code
|
||||
type: keyword
|
||||
description: >
|
||||
Country ISO code.
|
||||
- name: location
|
||||
type: geo_point
|
||||
description: >
|
||||
The longitude and latitude.
|
||||
|
||||
|
||||
- name: error
|
||||
type: group
|
||||
description: >
|
||||
Contains fields for the Nginx error logs.
|
||||
fields:
|
||||
- name: level
|
||||
type: keyword
|
||||
description: >
|
||||
Error level (e.g. error, critical).
|
||||
- name: pid
|
||||
type: long
|
||||
description: >
|
||||
Process identifier (PID).
|
||||
- name: tid
|
||||
type: long
|
||||
description: >
|
||||
Thread identifier.
|
||||
- name: connection_id
|
||||
type: long
|
||||
description: >
|
||||
Connection identifier.
|
||||
- name: message
|
||||
type: text
|
||||
description: >
|
||||
The error message
|
||||
|
||||
- key: system
|
||||
title: "System"
|
||||
description: >
|
||||
Module for parsing system log files.
|
||||
short_config: true
|
||||
fields:
|
||||
- name: system
|
||||
type: group
|
||||
description: >
|
||||
Fields from the system log files.
|
||||
fields:
|
||||
- name: auth
|
||||
type: group
|
||||
description: >
|
||||
Fields from the Linux authorization logs.
|
||||
fields:
|
||||
- name: timestamp
|
||||
description: >
|
||||
The timestamp as read from the auth message.
|
||||
- name: hostname
|
||||
description: >
|
||||
The hostname as read from the auth message.
|
||||
- name: program
|
||||
description: >
|
||||
The process name as read from the auth message.
|
||||
- name: pid
|
||||
type: long
|
||||
description: >
|
||||
The PID of the process that sent the auth message.
|
||||
- name: message
|
||||
description: >
|
||||
The message in the log line.
|
||||
- name: user
|
||||
description: >
|
||||
The Unix user that this event refers to.
|
||||
|
||||
- name: ssh
|
||||
type: group
|
||||
description: >
|
||||
Fields specific to SSH login events.
|
||||
fields:
|
||||
- name: event
|
||||
description: >
|
||||
The SSH login event. Can be one of "Accepted", "Failed", or "Invalid". "Accepted"
|
||||
means a successful login. "Invalid" means that the user is not configured on the
|
||||
system. "Failed" means that the SSH login attempt has failed.
|
||||
- name: method
|
||||
description: >
|
||||
The SSH authentication method. Can be one of "password" or "publickey".
|
||||
- name: ip
|
||||
type: ip
|
||||
description: >
|
||||
The client IP from where the login attempt was made.
|
||||
- name: dropped_ip
|
||||
type: ip
|
||||
description: >
|
||||
The client IP from SSH connections that are open and immediately dropped.
|
||||
- name: port
|
||||
type: long
|
||||
description: >
|
||||
The client port from where the login attempt was made.
|
||||
- name: signature
|
||||
description: >
|
||||
The signature of the client public key.
|
||||
- name: geoip
|
||||
type: group
|
||||
description: >
|
||||
Contains GeoIP information gathered based on the `system.auth.ip` field.
|
||||
Only present if the GeoIP Elasticsearch plugin is available and
|
||||
used.
|
||||
fields:
|
||||
- name: continent_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the continent.
|
||||
- name: city_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the city.
|
||||
- name: region_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the region.
|
||||
- name: country_iso_code
|
||||
type: keyword
|
||||
description: >
|
||||
Country ISO code.
|
||||
- name: location
|
||||
type: geo_point
|
||||
description: >
|
||||
The longitude and latitude.
|
||||
|
||||
- name: sudo
|
||||
type: group
|
||||
description: >
|
||||
Fields specific to events created by the `sudo` command.
|
||||
fields:
|
||||
- name: error
|
||||
example: user NOT in sudoers
|
||||
description: >
|
||||
The error message in case the sudo command failed.
|
||||
- name: tty
|
||||
description: >
|
||||
The TTY where the sudo command is executed.
|
||||
- name: pwd
|
||||
description: >
|
||||
The current directory where the sudo command is executed.
|
||||
- name: user
|
||||
example: root
|
||||
description: >
|
||||
The target user to which the sudo command is switching.
|
||||
- name: command
|
||||
description: >
|
||||
The command executed via sudo.
|
||||
|
||||
- name: useradd
|
||||
type: group
|
||||
description: >
|
||||
Fields specific to events created by the `useradd` command.
|
||||
fields:
|
||||
- name: name
|
||||
description: >
|
||||
The user name being added.
|
||||
- name: uid
|
||||
type: long
|
||||
description:
|
||||
The user ID.
|
||||
- name: gid
|
||||
type: long
|
||||
description:
|
||||
The group ID.
|
||||
- name: home
|
||||
description:
|
||||
The home folder for the new user.
|
||||
- name: shell
|
||||
description:
|
||||
The default shell for the new user.
|
||||
|
||||
- name: groupadd
|
||||
type: group
|
||||
description: >
|
||||
Fields specific to events created by the `groupadd` command.
|
||||
fields:
|
||||
- name: name
|
||||
description: >
|
||||
The name of the new group.
|
||||
- name: gid
|
||||
type: long
|
||||
description: >
|
||||
The ID of the new group.
|
||||
|
||||
- name: syslog
|
||||
type: group
|
||||
description: >
|
||||
Contains fields from the syslog system logs.
|
||||
fields:
|
||||
- name: timestamp
|
||||
description: >
|
||||
The timestamp as read from the syslog message.
|
||||
- name: hostname
|
||||
description: >
|
||||
The hostname as read from the syslog message.
|
||||
- name: program
|
||||
description: >
|
||||
The process name as read from the syslog message.
|
||||
- name: pid
|
||||
description: >
|
||||
The PID of the process that sent the syslog message.
|
||||
- name: message
|
||||
description: >
|
||||
The message in the log line.
|
||||
|
||||
|
45
vendor/github.com/elastic/beats/filebeat/filebeat.full.yml
generated
vendored
45
vendor/github.com/elastic/beats/filebeat/filebeat.full.yml
generated
vendored
@ -56,6 +56,19 @@ filebeat.modules:
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
#------------------------------- Auditd Module -------------------------------
|
||||
#- module: auditd
|
||||
#log:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
||||
|
||||
#-------------------------------- MySQL Module -------------------------------
|
||||
#- module: mysql
|
||||
# Error logs
|
||||
@ -510,6 +523,14 @@ output.elasticsearch:
|
||||
# Path to the Elasticsearch 2.x version of the template file.
|
||||
#template.versions.2x.path: "${path.config}/filebeat.template-es2x.json"
|
||||
|
||||
# If set to true, filebeat checks the Elasticsearch version at connect time, and if it
|
||||
# is 6.x, it loads the file specified by the template.versions.6x.path setting. The
|
||||
# default is true.
|
||||
#template.versions.6x.enabled: true
|
||||
|
||||
# Path to the Elasticsearch 6.x version of the template file.
|
||||
#template.versions.6x.path: "${path.config}/filebeat.template-es6x.json"
|
||||
|
||||
# Use SSL settings for HTTPS. Default is true.
|
||||
#ssl.enabled: true
|
||||
|
||||
@ -542,6 +563,10 @@ output.elasticsearch:
|
||||
# Configure curve types for ECDHE based cipher suites
|
||||
#ssl.curve_types: []
|
||||
|
||||
# Configure what types of renegotiation are supported. Valid options are
|
||||
# never, once, and freely. Default is never.
|
||||
#ssl.renegotiation: never
|
||||
|
||||
|
||||
#----------------------------- Logstash output ---------------------------------
|
||||
#output.logstash:
|
||||
@ -564,6 +589,11 @@ output.elasticsearch:
|
||||
# new batches.
|
||||
#pipelining: 0
|
||||
|
||||
# If enabled only a subset of events in a batch of events is transferred per
|
||||
# transaction. The number of events to be sent increases up to `bulk_max_size`
|
||||
# if no error is encountered.
|
||||
#slow_start: false
|
||||
|
||||
# Optional index name. The default index name is set to name of the beat
|
||||
# in all lowercase.
|
||||
#index: 'filebeat'
|
||||
@ -606,6 +636,10 @@ output.elasticsearch:
|
||||
# Configure curve types for ECDHE based cipher suites
|
||||
#ssl.curve_types: []
|
||||
|
||||
# Configure what types of renegotiation are supported. Valid options are
|
||||
# never, once, and freely. Default is never.
|
||||
#ssl.renegotiation: never
|
||||
|
||||
#------------------------------- Kafka output ----------------------------------
|
||||
#output.kafka:
|
||||
# Boolean flag to enable or disable the output module.
|
||||
@ -741,6 +775,10 @@ output.elasticsearch:
|
||||
# Configure curve types for ECDHE based cipher suites
|
||||
#ssl.curve_types: []
|
||||
|
||||
# Configure what types of renegotiation are supported. Valid options are
|
||||
# never, once, and freely. Default is never.
|
||||
#ssl.renegotiation: never
|
||||
|
||||
#------------------------------- Redis output ----------------------------------
|
||||
#output.redis:
|
||||
# Boolean flag to enable or disable the output module.
|
||||
@ -838,6 +876,10 @@ output.elasticsearch:
|
||||
# Configure curve types for ECDHE based cipher suites
|
||||
#ssl.curve_types: []
|
||||
|
||||
# Configure what types of renegotiation are supported. Valid options are
|
||||
# never, once, and freely. Default is never.
|
||||
#ssl.renegotiation: never
|
||||
|
||||
|
||||
#------------------------------- File output -----------------------------------
|
||||
#output.file:
|
||||
@ -980,3 +1022,6 @@ logging.files:
|
||||
# Number of rotated log files to keep. Oldest files will be deleted first.
|
||||
#keepfiles: 7
|
||||
|
||||
# The permissions mask to apply when rotating log files. The default value is 0600.
|
||||
# Must be a valid Unix-style file permissions mask expressed in octal notation.
|
||||
#permissions: 0600
|
||||
|
288
vendor/github.com/elastic/beats/filebeat/filebeat.template-es2x.json
generated
vendored
288
vendor/github.com/elastic/beats/filebeat/filebeat.template-es2x.json
generated
vendored
@ -7,7 +7,7 @@
|
||||
}
|
||||
},
|
||||
"_meta": {
|
||||
"version": "5.3.2"
|
||||
"version": "5.6.6"
|
||||
},
|
||||
"date_detection": false,
|
||||
"dynamic_templates": [
|
||||
@ -46,6 +46,11 @@
|
||||
},
|
||||
"geoip": {
|
||||
"properties": {
|
||||
"city_name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"continent_name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
@ -58,6 +63,11 @@
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -124,7 +134,9 @@
|
||||
"type": "string"
|
||||
},
|
||||
"patch": {
|
||||
"type": "long"
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -169,6 +181,104 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"auditd": {
|
||||
"properties": {
|
||||
"log": {
|
||||
"properties": {
|
||||
"a0": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"acct": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"geoip": {
|
||||
"properties": {
|
||||
"city_name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"continent_name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"country_iso_code": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"item": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"new_auid": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"new_ses": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"old_auid": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"old_ses": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"pid": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"ppid": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"record_type": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"res": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"sequence": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"beat": {
|
||||
"properties": {
|
||||
"hostname": {
|
||||
@ -193,9 +303,6 @@
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"fields": {
|
||||
"properties": {}
|
||||
},
|
||||
"fileset": {
|
||||
"properties": {
|
||||
"module": {
|
||||
@ -358,6 +465,11 @@
|
||||
},
|
||||
"geoip": {
|
||||
"properties": {
|
||||
"city_name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"continent_name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
@ -370,6 +482,11 @@
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -436,7 +553,9 @@
|
||||
"type": "string"
|
||||
},
|
||||
"patch": {
|
||||
"type": "long"
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -489,6 +608,163 @@
|
||||
},
|
||||
"system": {
|
||||
"properties": {
|
||||
"auth": {
|
||||
"properties": {
|
||||
"groupadd": {
|
||||
"properties": {
|
||||
"gid": {
|
||||
"type": "long"
|
||||
},
|
||||
"name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"hostname": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"message": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"pid": {
|
||||
"type": "long"
|
||||
},
|
||||
"program": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"ssh": {
|
||||
"properties": {
|
||||
"dropped_ip": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"event": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"geoip": {
|
||||
"properties": {
|
||||
"city_name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"continent_name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"country_iso_code": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ip": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"method": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"port": {
|
||||
"type": "long"
|
||||
},
|
||||
"signature": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sudo": {
|
||||
"properties": {
|
||||
"command": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"error": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"pwd": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"tty": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"user": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"timestamp": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"user": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"useradd": {
|
||||
"properties": {
|
||||
"gid": {
|
||||
"type": "long"
|
||||
},
|
||||
"home": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"shell": {
|
||||
"ignore_above": 1024,
|
||||
"index": "not_analyzed",
|
||||
"type": "string"
|
||||
},
|
||||
"uid": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"syslog": {
|
||||
"properties": {
|
||||
"hostname": {
|
||||
|
691
vendor/github.com/elastic/beats/filebeat/filebeat.template-es6x.json
generated
vendored
Normal file
691
vendor/github.com/elastic/beats/filebeat/filebeat.template-es6x.json
generated
vendored
Normal file
@ -0,0 +1,691 @@
|
||||
{
|
||||
"mappings": {
|
||||
"_default_": {
|
||||
"_meta": {
|
||||
"version": "5.6.6"
|
||||
},
|
||||
"date_detection": false,
|
||||
"dynamic_templates": [
|
||||
{
|
||||
"strings_as_keyword": {
|
||||
"mapping": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"match_mapping_type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"properties": {
|
||||
"@timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"apache2": {
|
||||
"properties": {
|
||||
"access": {
|
||||
"properties": {
|
||||
"agent": {
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"body_sent": {
|
||||
"properties": {
|
||||
"bytes": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"geoip": {
|
||||
"properties": {
|
||||
"city_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"continent_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"country_iso_code": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"http_version": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"method": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"referrer": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"remote_ip": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"response_code": {
|
||||
"type": "long"
|
||||
},
|
||||
"url": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"user_agent": {
|
||||
"properties": {
|
||||
"device": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"major": {
|
||||
"type": "long"
|
||||
},
|
||||
"minor": {
|
||||
"type": "long"
|
||||
},
|
||||
"name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"os": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"os_major": {
|
||||
"type": "long"
|
||||
},
|
||||
"os_minor": {
|
||||
"type": "long"
|
||||
},
|
||||
"os_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"patch": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"user_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"properties": {
|
||||
"client": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"level": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"message": {
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"module": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"pid": {
|
||||
"type": "long"
|
||||
},
|
||||
"tid": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"auditd": {
|
||||
"properties": {
|
||||
"log": {
|
||||
"properties": {
|
||||
"a0": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"acct": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"geoip": {
|
||||
"properties": {
|
||||
"city_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"continent_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"country_iso_code": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"item": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"items": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"new_auid": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"new_ses": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"old_auid": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"old_ses": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"pid": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"ppid": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"record_type": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"res": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"sequence": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"beat": {
|
||||
"properties": {
|
||||
"hostname": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"version": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"fileset": {
|
||||
"properties": {
|
||||
"module": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"input_type": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"message": {
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"meta": {
|
||||
"properties": {
|
||||
"cloud": {
|
||||
"properties": {
|
||||
"availability_zone": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"instance_id": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"machine_type": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"project_id": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"provider": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"region": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"mysql": {
|
||||
"properties": {
|
||||
"error": {
|
||||
"properties": {
|
||||
"level": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"message": {
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"thread_id": {
|
||||
"type": "long"
|
||||
},
|
||||
"timestamp": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"slowlog": {
|
||||
"properties": {
|
||||
"host": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"id": {
|
||||
"type": "long"
|
||||
},
|
||||
"ip": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"lock_time": {
|
||||
"properties": {
|
||||
"sec": {
|
||||
"type": "float"
|
||||
}
|
||||
}
|
||||
},
|
||||
"query": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"query_time": {
|
||||
"properties": {
|
||||
"sec": {
|
||||
"type": "float"
|
||||
}
|
||||
}
|
||||
},
|
||||
"rows_examined": {
|
||||
"type": "long"
|
||||
},
|
||||
"rows_sent": {
|
||||
"type": "long"
|
||||
},
|
||||
"timestamp": {
|
||||
"type": "long"
|
||||
},
|
||||
"user": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"nginx": {
|
||||
"properties": {
|
||||
"access": {
|
||||
"properties": {
|
||||
"agent": {
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"body_sent": {
|
||||
"properties": {
|
||||
"bytes": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"geoip": {
|
||||
"properties": {
|
||||
"city_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"continent_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"country_iso_code": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"http_version": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"method": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"referrer": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"remote_ip": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"response_code": {
|
||||
"type": "long"
|
||||
},
|
||||
"url": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"user_agent": {
|
||||
"properties": {
|
||||
"device": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"major": {
|
||||
"type": "long"
|
||||
},
|
||||
"minor": {
|
||||
"type": "long"
|
||||
},
|
||||
"name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"os": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"os_major": {
|
||||
"type": "long"
|
||||
},
|
||||
"os_minor": {
|
||||
"type": "long"
|
||||
},
|
||||
"os_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"patch": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"user_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"properties": {
|
||||
"connection_id": {
|
||||
"type": "long"
|
||||
},
|
||||
"level": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"message": {
|
||||
"norms": false,
|
||||
"type": "text"
|
||||
},
|
||||
"pid": {
|
||||
"type": "long"
|
||||
},
|
||||
"tid": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"offset": {
|
||||
"type": "long"
|
||||
},
|
||||
"read_timestamp": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"source": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"system": {
|
||||
"properties": {
|
||||
"auth": {
|
||||
"properties": {
|
||||
"groupadd": {
|
||||
"properties": {
|
||||
"gid": {
|
||||
"type": "long"
|
||||
},
|
||||
"name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"hostname": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"message": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"pid": {
|
||||
"type": "long"
|
||||
},
|
||||
"program": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"ssh": {
|
||||
"properties": {
|
||||
"dropped_ip": {
|
||||
"type": "ip"
|
||||
},
|
||||
"event": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"geoip": {
|
||||
"properties": {
|
||||
"city_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"continent_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"country_iso_code": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ip": {
|
||||
"type": "ip"
|
||||
},
|
||||
"method": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"port": {
|
||||
"type": "long"
|
||||
},
|
||||
"signature": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sudo": {
|
||||
"properties": {
|
||||
"command": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"error": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"pwd": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"tty": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"user": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"timestamp": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"user": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"useradd": {
|
||||
"properties": {
|
||||
"gid": {
|
||||
"type": "long"
|
||||
},
|
||||
"home": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"shell": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"uid": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"syslog": {
|
||||
"properties": {
|
||||
"hostname": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"message": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"pid": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"program": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"timestamp": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"tags": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"type": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"order": 0,
|
||||
"settings": {
|
||||
"index.mapping.total_fields.limit": 10000,
|
||||
"index.refresh_interval": "5s"
|
||||
},
|
||||
"template": "filebeat-*"
|
||||
}
|
244
vendor/github.com/elastic/beats/filebeat/filebeat.template.json
generated
vendored
244
vendor/github.com/elastic/beats/filebeat/filebeat.template.json
generated
vendored
@ -1,11 +1,8 @@
|
||||
{
|
||||
"mappings": {
|
||||
"_default_": {
|
||||
"_all": {
|
||||
"norms": false
|
||||
},
|
||||
"_meta": {
|
||||
"version": "5.3.2"
|
||||
"version": "5.6.6"
|
||||
},
|
||||
"date_detection": false,
|
||||
"dynamic_templates": [
|
||||
@ -40,6 +37,10 @@
|
||||
},
|
||||
"geoip": {
|
||||
"properties": {
|
||||
"city_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"continent_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
@ -50,6 +51,10 @@
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -107,7 +112,8 @@
|
||||
"type": "keyword"
|
||||
},
|
||||
"patch": {
|
||||
"type": "long"
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -145,6 +151,88 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"auditd": {
|
||||
"properties": {
|
||||
"log": {
|
||||
"properties": {
|
||||
"a0": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"acct": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"geoip": {
|
||||
"properties": {
|
||||
"city_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"continent_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"country_iso_code": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"item": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"items": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"new_auid": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"new_ses": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"old_auid": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"old_ses": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"pid": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"ppid": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"record_type": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"res": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"sequence": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"beat": {
|
||||
"properties": {
|
||||
"hostname": {
|
||||
@ -165,9 +253,6 @@
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"fields": {
|
||||
"properties": {}
|
||||
},
|
||||
"fileset": {
|
||||
"properties": {
|
||||
"module": {
|
||||
@ -306,6 +391,10 @@
|
||||
},
|
||||
"geoip": {
|
||||
"properties": {
|
||||
"city_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"continent_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
@ -316,6 +405,10 @@
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -373,7 +466,8 @@
|
||||
"type": "keyword"
|
||||
},
|
||||
"patch": {
|
||||
"type": "long"
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -419,6 +513,138 @@
|
||||
},
|
||||
"system": {
|
||||
"properties": {
|
||||
"auth": {
|
||||
"properties": {
|
||||
"groupadd": {
|
||||
"properties": {
|
||||
"gid": {
|
||||
"type": "long"
|
||||
},
|
||||
"name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"hostname": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"message": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"pid": {
|
||||
"type": "long"
|
||||
},
|
||||
"program": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"ssh": {
|
||||
"properties": {
|
||||
"dropped_ip": {
|
||||
"type": "ip"
|
||||
},
|
||||
"event": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"geoip": {
|
||||
"properties": {
|
||||
"city_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"continent_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"country_iso_code": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ip": {
|
||||
"type": "ip"
|
||||
},
|
||||
"method": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"port": {
|
||||
"type": "long"
|
||||
},
|
||||
"signature": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sudo": {
|
||||
"properties": {
|
||||
"command": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"error": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"pwd": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"tty": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"user": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"timestamp": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"user": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"useradd": {
|
||||
"properties": {
|
||||
"gid": {
|
||||
"type": "long"
|
||||
},
|
||||
"home": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"shell": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"uid": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"syslog": {
|
||||
"properties": {
|
||||
"hostname": {
|
||||
|
2
vendor/github.com/elastic/beats/filebeat/fileset/config.go
generated
vendored
2
vendor/github.com/elastic/beats/filebeat/fileset/config.go
generated
vendored
@ -15,5 +15,3 @@ type FilesetConfig struct {
|
||||
Var map[string]interface{} `config:"var"`
|
||||
Prospector map[string]interface{} `config:"prospector"`
|
||||
}
|
||||
|
||||
var defaultFilesetConfig = FilesetConfig{}
|
||||
|
32
vendor/github.com/elastic/beats/filebeat/fileset/fileset.go
generated
vendored
32
vendor/github.com/elastic/beats/filebeat/fileset/fileset.go
generated
vendored
@ -17,6 +17,7 @@ import (
|
||||
"text/template"
|
||||
|
||||
"github.com/elastic/beats/libbeat/common"
|
||||
mlimporter "github.com/elastic/beats/libbeat/ml-importer"
|
||||
)
|
||||
|
||||
// Fileset struct is the representation of a fileset.
|
||||
@ -74,11 +75,17 @@ func (fs *Fileset) Read(beatVersion string) error {
|
||||
// manifest structure is the representation of the manifest.yml file from the
|
||||
// fileset.
|
||||
type manifest struct {
|
||||
ModuleVersion string `config:"module_version"`
|
||||
Vars []map[string]interface{} `config:"var"`
|
||||
IngestPipeline string `config:"ingest_pipeline"`
|
||||
Prospector string `config:"prospector"`
|
||||
Requires struct {
|
||||
ModuleVersion string `config:"module_version"`
|
||||
Vars []map[string]interface{} `config:"var"`
|
||||
IngestPipeline string `config:"ingest_pipeline"`
|
||||
Prospector string `config:"prospector"`
|
||||
MachineLearning []struct {
|
||||
Name string `config:"name"`
|
||||
Job string `config:"job"`
|
||||
Datafeed string `config:"datafeed"`
|
||||
MinVersion string `config:"min_version"`
|
||||
} `config:"machine_learning"`
|
||||
Requires struct {
|
||||
Processors []ProcessorRequirement `config:"processors"`
|
||||
} `config:"requires"`
|
||||
}
|
||||
@ -310,3 +317,18 @@ func removeExt(path string) string {
|
||||
func (fs *Fileset) GetRequiredProcessors() []ProcessorRequirement {
|
||||
return fs.manifest.Requires.Processors
|
||||
}
|
||||
|
||||
// GetMLConfigs returns the list of machine-learning configurations declared
|
||||
// by this fileset.
|
||||
func (fs *Fileset) GetMLConfigs() []mlimporter.MLConfig {
|
||||
var mlConfigs []mlimporter.MLConfig
|
||||
for _, ml := range fs.manifest.MachineLearning {
|
||||
mlConfigs = append(mlConfigs, mlimporter.MLConfig{
|
||||
ID: fmt.Sprintf("filebeat-%s-%s-%s", fs.mcfg.Module, fs.name, ml.Name),
|
||||
JobPath: filepath.Join(fs.modulePath, fs.name, ml.Job),
|
||||
DatafeedPath: filepath.Join(fs.modulePath, fs.name, ml.Datafeed),
|
||||
MinVersion: ml.MinVersion,
|
||||
})
|
||||
}
|
||||
return mlConfigs
|
||||
}
|
||||
|
72
vendor/github.com/elastic/beats/filebeat/fileset/modules.go
generated
vendored
72
vendor/github.com/elastic/beats/filebeat/fileset/modules.go
generated
vendored
@ -2,15 +2,17 @@ package fileset
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/elastic/beats/libbeat/common"
|
||||
"github.com/elastic/beats/libbeat/logp"
|
||||
mlimporter "github.com/elastic/beats/libbeat/ml-importer"
|
||||
"github.com/elastic/beats/libbeat/paths"
|
||||
)
|
||||
|
||||
@ -20,7 +22,7 @@ type ModuleRegistry struct {
|
||||
|
||||
// newModuleRegistry reads and loads the configured module into the registry.
|
||||
func newModuleRegistry(modulesPath string,
|
||||
moduleConfigs []ModuleConfig,
|
||||
moduleConfigs []*ModuleConfig,
|
||||
overrides *ModuleOverrides,
|
||||
beatVersion string) (*ModuleRegistry, error) {
|
||||
|
||||
@ -41,11 +43,7 @@ func newModuleRegistry(modulesPath string,
|
||||
for _, filesetName := range moduleFilesets {
|
||||
fcfg, exists := mcfg.Filesets[filesetName]
|
||||
if !exists {
|
||||
fcfg = &defaultFilesetConfig
|
||||
}
|
||||
|
||||
if fcfg.Enabled != nil && (*fcfg.Enabled) == false {
|
||||
continue
|
||||
fcfg = &FilesetConfig{}
|
||||
}
|
||||
|
||||
fcfg, err = applyOverrides(fcfg, mcfg.Module, filesetName, overrides)
|
||||
@ -53,7 +51,11 @@ func newModuleRegistry(modulesPath string,
|
||||
return nil, fmt.Errorf("Error applying overrides on fileset %s/%s: %v", mcfg.Module, filesetName, err)
|
||||
}
|
||||
|
||||
fileset, err := New(modulesPath, filesetName, &mcfg, fcfg)
|
||||
if fcfg.Enabled != nil && (*fcfg.Enabled) == false {
|
||||
continue
|
||||
}
|
||||
|
||||
fileset, err := New(modulesPath, filesetName, mcfg, fcfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -98,13 +100,13 @@ func NewModuleRegistry(moduleConfigs []*common.Config, beatVersion string) (*Mod
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
mcfgs := []ModuleConfig{}
|
||||
mcfgs := []*ModuleConfig{}
|
||||
for _, moduleConfig := range moduleConfigs {
|
||||
mcfg, err := mcfgFromConfig(moduleConfig)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error unpacking module config: %v", err)
|
||||
}
|
||||
mcfgs = append(mcfgs, *mcfg)
|
||||
mcfgs = append(mcfgs, mcfg)
|
||||
}
|
||||
mcfgs, err = appendWithoutDuplicates(mcfgs, modulesCLIList)
|
||||
if err != nil {
|
||||
@ -207,7 +209,7 @@ func applyOverrides(fcfg *FilesetConfig,
|
||||
|
||||
// appendWithoutDuplicates appends basic module configuration for each module in the
|
||||
// modules list, unless the same module is not already loaded.
|
||||
func appendWithoutDuplicates(moduleConfigs []ModuleConfig, modules []string) ([]ModuleConfig, error) {
|
||||
func appendWithoutDuplicates(moduleConfigs []*ModuleConfig, modules []string) ([]*ModuleConfig, error) {
|
||||
if len(modules) == 0 {
|
||||
return moduleConfigs, nil
|
||||
}
|
||||
@ -224,7 +226,7 @@ func appendWithoutDuplicates(moduleConfigs []ModuleConfig, modules []string) ([]
|
||||
// add the non duplicates to the list
|
||||
for _, module := range modules {
|
||||
if _, exists := modulesMap[module]; !exists {
|
||||
moduleConfigs = append(moduleConfigs, ModuleConfig{Module: module})
|
||||
moduleConfigs = append(moduleConfigs, &ModuleConfig{Module: module})
|
||||
}
|
||||
}
|
||||
return moduleConfigs, nil
|
||||
@ -250,6 +252,7 @@ func (reg *ModuleRegistry) GetProspectorConfigs() ([]*common.Config, error) {
|
||||
type PipelineLoader interface {
|
||||
LoadJSON(path string, json map[string]interface{}) ([]byte, error)
|
||||
Request(method, path string, pipeline string, params map[string]string, body interface{}) (int, []byte, error)
|
||||
GetVersion() string
|
||||
}
|
||||
|
||||
// LoadPipelines loads the pipelines for each configured fileset.
|
||||
@ -279,6 +282,26 @@ func (reg *ModuleRegistry) LoadPipelines(esClient PipelineLoader) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// InfoString returns the enabled modules and filesets in a single string, ready to
|
||||
// be shown to the user
|
||||
func (reg *ModuleRegistry) InfoString() string {
|
||||
var result string
|
||||
for module, filesets := range reg.registry {
|
||||
var filesetNames string
|
||||
for name, _ := range filesets {
|
||||
if filesetNames != "" {
|
||||
filesetNames += ", "
|
||||
}
|
||||
filesetNames += name
|
||||
}
|
||||
if result != "" {
|
||||
result += ", "
|
||||
}
|
||||
result += fmt.Sprintf("%s (%s)", module, filesetNames)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// checkAvailableProcessors calls the /_nodes/ingest API and verifies that all processors listed
|
||||
// in the requiredProcessors list are available in Elasticsearch. Returns nil if all required
|
||||
// processors are available.
|
||||
@ -418,6 +441,31 @@ func interpretError(initialErr error, body []byte) error {
|
||||
return fmt.Errorf("couldn't load pipeline: %v. Response body: %s", initialErr, body)
|
||||
}
|
||||
|
||||
// LoadML loads the machine-learning configurations into Elasticsearch, if Xpack is avaiable
|
||||
func (reg *ModuleRegistry) LoadML(esClient PipelineLoader) error {
|
||||
haveXpack, err := mlimporter.HaveXpackML(esClient)
|
||||
if err != nil {
|
||||
return errors.Errorf("Error checking if xpack is available: %v", err)
|
||||
}
|
||||
if !haveXpack {
|
||||
logp.Warn("Xpack Machine Learning is not enabled")
|
||||
return nil
|
||||
}
|
||||
|
||||
for module, filesets := range reg.registry {
|
||||
for name, fileset := range filesets {
|
||||
for _, mlConfig := range fileset.GetMLConfigs() {
|
||||
err = mlimporter.ImportMachineLearningJob(esClient, &mlConfig)
|
||||
if err != nil {
|
||||
return errors.Errorf("Error loading ML config from %s/%s: %v", module, name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (reg *ModuleRegistry) Empty() bool {
|
||||
count := 0
|
||||
for _, filesets := range reg.registry {
|
||||
|
4
vendor/github.com/elastic/beats/filebeat/fileset/modules_integration_test.go
generated
vendored
4
vendor/github.com/elastic/beats/filebeat/fileset/modules_integration_test.go
generated
vendored
@ -58,8 +58,8 @@ func TestSetupNginx(t *testing.T) {
|
||||
modulesPath, err := filepath.Abs("../module")
|
||||
assert.NoError(t, err)
|
||||
|
||||
configs := []ModuleConfig{
|
||||
{Module: "nginx"},
|
||||
configs := []*ModuleConfig{
|
||||
&ModuleConfig{Module: "nginx"},
|
||||
}
|
||||
|
||||
reg, err := newModuleRegistry(modulesPath, configs, nil, "5.2.0")
|
||||
|
70
vendor/github.com/elastic/beats/filebeat/fileset/modules_test.go
generated
vendored
70
vendor/github.com/elastic/beats/filebeat/fileset/modules_test.go
generated
vendored
@ -25,10 +25,11 @@ func TestNewModuleRegistry(t *testing.T) {
|
||||
modulesPath, err := filepath.Abs("../module")
|
||||
assert.NoError(t, err)
|
||||
|
||||
configs := []ModuleConfig{
|
||||
{Module: "nginx"},
|
||||
{Module: "mysql"},
|
||||
{Module: "system"},
|
||||
configs := []*ModuleConfig{
|
||||
&ModuleConfig{Module: "nginx"},
|
||||
&ModuleConfig{Module: "mysql"},
|
||||
&ModuleConfig{Module: "system"},
|
||||
&ModuleConfig{Module: "auditd"},
|
||||
}
|
||||
|
||||
reg, err := newModuleRegistry(modulesPath, configs, nil, "5.2.0")
|
||||
@ -36,9 +37,10 @@ func TestNewModuleRegistry(t *testing.T) {
|
||||
assert.NotNil(t, reg)
|
||||
|
||||
expectedModules := map[string][]string{
|
||||
"auditd": {"log"},
|
||||
"nginx": {"access", "error"},
|
||||
"mysql": {"slowlog", "error"},
|
||||
"system": {"syslog"},
|
||||
"system": {"syslog", "auth"},
|
||||
}
|
||||
|
||||
assert.Equal(t, len(expectedModules), len(reg.registry))
|
||||
@ -55,8 +57,16 @@ func TestNewModuleRegistry(t *testing.T) {
|
||||
|
||||
for module, filesets := range reg.registry {
|
||||
for name, fileset := range filesets {
|
||||
_, err = fileset.getProspectorConfig()
|
||||
cfg, err := fileset.getProspectorConfig()
|
||||
assert.NoError(t, err, fmt.Sprintf("module: %s, fileset: %s", module, name))
|
||||
|
||||
moduleName, err := cfg.String("_module_name", -1)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, module, moduleName)
|
||||
|
||||
filesetName, err := cfg.String("_fileset_name", -1)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, name, filesetName)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -67,8 +77,8 @@ func TestNewModuleRegistryConfig(t *testing.T) {
|
||||
|
||||
falseVar := false
|
||||
|
||||
configs := []ModuleConfig{
|
||||
{
|
||||
configs := []*ModuleConfig{
|
||||
&ModuleConfig{
|
||||
Module: "nginx",
|
||||
Filesets: map[string]*FilesetConfig{
|
||||
"access": {
|
||||
@ -81,7 +91,7 @@ func TestNewModuleRegistryConfig(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
&ModuleConfig{
|
||||
Module: "mysql",
|
||||
Enabled: &falseVar,
|
||||
},
|
||||
@ -98,7 +108,7 @@ func TestNewModuleRegistryConfig(t *testing.T) {
|
||||
assert.NotContains(t, reg.registry["nginx"], "error")
|
||||
}
|
||||
|
||||
func TestAppplyOverrides(t *testing.T) {
|
||||
func TestApplyOverrides(t *testing.T) {
|
||||
|
||||
falseVar := false
|
||||
trueVar := true
|
||||
@ -189,24 +199,24 @@ func TestAppendWithoutDuplicates(t *testing.T) {
|
||||
falseVar := false
|
||||
tests := []struct {
|
||||
name string
|
||||
configs []ModuleConfig
|
||||
configs []*ModuleConfig
|
||||
modules []string
|
||||
expected []ModuleConfig
|
||||
expected []*ModuleConfig
|
||||
}{
|
||||
{
|
||||
name: "just modules",
|
||||
configs: []ModuleConfig{},
|
||||
configs: []*ModuleConfig{},
|
||||
modules: []string{"moduleA", "moduleB", "moduleC"},
|
||||
expected: []ModuleConfig{
|
||||
{Module: "moduleA"},
|
||||
{Module: "moduleB"},
|
||||
{Module: "moduleC"},
|
||||
expected: []*ModuleConfig{
|
||||
&ModuleConfig{Module: "moduleA"},
|
||||
&ModuleConfig{Module: "moduleB"},
|
||||
&ModuleConfig{Module: "moduleC"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "eliminate a duplicate, no override",
|
||||
configs: []ModuleConfig{
|
||||
{
|
||||
configs: []*ModuleConfig{
|
||||
&ModuleConfig{
|
||||
Module: "moduleB",
|
||||
Filesets: map[string]*FilesetConfig{
|
||||
"fileset": {
|
||||
@ -218,8 +228,8 @@ func TestAppendWithoutDuplicates(t *testing.T) {
|
||||
},
|
||||
},
|
||||
modules: []string{"moduleA", "moduleB", "moduleC"},
|
||||
expected: []ModuleConfig{
|
||||
{
|
||||
expected: []*ModuleConfig{
|
||||
&ModuleConfig{
|
||||
Module: "moduleB",
|
||||
Filesets: map[string]*FilesetConfig{
|
||||
"fileset": {
|
||||
@ -229,14 +239,14 @@ func TestAppendWithoutDuplicates(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{Module: "moduleA"},
|
||||
{Module: "moduleC"},
|
||||
&ModuleConfig{Module: "moduleA"},
|
||||
&ModuleConfig{Module: "moduleC"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "disabled config",
|
||||
configs: []ModuleConfig{
|
||||
{
|
||||
configs: []*ModuleConfig{
|
||||
&ModuleConfig{
|
||||
Module: "moduleB",
|
||||
Enabled: &falseVar,
|
||||
Filesets: map[string]*FilesetConfig{
|
||||
@ -249,8 +259,8 @@ func TestAppendWithoutDuplicates(t *testing.T) {
|
||||
},
|
||||
},
|
||||
modules: []string{"moduleA", "moduleB", "moduleC"},
|
||||
expected: []ModuleConfig{
|
||||
{
|
||||
expected: []*ModuleConfig{
|
||||
&ModuleConfig{
|
||||
Module: "moduleB",
|
||||
Enabled: &falseVar,
|
||||
Filesets: map[string]*FilesetConfig{
|
||||
@ -261,9 +271,9 @@ func TestAppendWithoutDuplicates(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{Module: "moduleA"},
|
||||
{Module: "moduleB"},
|
||||
{Module: "moduleC"},
|
||||
&ModuleConfig{Module: "moduleA"},
|
||||
&ModuleConfig{Module: "moduleB"},
|
||||
&ModuleConfig{Module: "moduleC"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
8
vendor/github.com/elastic/beats/filebeat/harvester/config.go
generated
vendored
8
vendor/github.com/elastic/beats/filebeat/harvester/config.go
generated
vendored
@ -2,6 +2,7 @@ package harvester
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
cfg "github.com/elastic/beats/filebeat/config"
|
||||
@ -57,6 +58,8 @@ type harvesterConfig struct {
|
||||
Fileset string `config:"_fileset_name"` // hidden option to set the fileset name
|
||||
}
|
||||
|
||||
var onceCheck sync.Once
|
||||
|
||||
func (config *harvesterConfig) Validate() error {
|
||||
|
||||
// DEPRECATED: remove in 6.0
|
||||
@ -87,5 +90,10 @@ func (config *harvesterConfig) Validate() error {
|
||||
return fmt.Errorf("When using the JSON decoder and line filtering together, you need to specify a message_key value")
|
||||
}
|
||||
|
||||
if config.DocumentType != "log" {
|
||||
onceCheck.Do(func() {
|
||||
logp.Warn("DEPRECATED: document_type is deprecated. Use fields instead.")
|
||||
})
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
2
vendor/github.com/elastic/beats/filebeat/harvester/reader/multiline.go
generated
vendored
2
vendor/github.com/elastic/beats/filebeat/harvester/reader/multiline.go
generated
vendored
@ -66,7 +66,7 @@ func NewMultiline(
|
||||
return nil, fmt.Errorf("unknown matcher type: %s", config.Match)
|
||||
}
|
||||
|
||||
matcher, err := matcherType(config.Pattern)
|
||||
matcher, err := matcherType(*config.Pattern)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
7
vendor/github.com/elastic/beats/filebeat/harvester/reader/multiline_config.go
generated
vendored
7
vendor/github.com/elastic/beats/filebeat/harvester/reader/multiline_config.go
generated
vendored
@ -9,13 +9,14 @@ import (
|
||||
|
||||
type MultilineConfig struct {
|
||||
Negate bool `config:"negate"`
|
||||
Match string `config:"match" validate:"required"`
|
||||
Match string `config:"match" validate:"required"`
|
||||
MaxLines *int `config:"max_lines"`
|
||||
Pattern match.Matcher `config:"pattern"`
|
||||
Timeout *time.Duration `config:"timeout" validate:"positive"`
|
||||
Pattern *match.Matcher `config:"pattern" validate:"required"`
|
||||
Timeout *time.Duration `config:"timeout" validate:"positive"`
|
||||
}
|
||||
|
||||
func (c *MultilineConfig) Validate() error {
|
||||
|
||||
if c.Match != "after" && c.Match != "before" {
|
||||
return fmt.Errorf("unknown matcher type: %s", c.Match)
|
||||
}
|
||||
|
18
vendor/github.com/elastic/beats/filebeat/harvester/reader/multiline_test.go
generated
vendored
18
vendor/github.com/elastic/beats/filebeat/harvester/reader/multiline_test.go
generated
vendored
@ -24,9 +24,10 @@ func (p bufferSource) Stat() (os.FileInfo, error) { return nil, errors.New("unkn
|
||||
func (p bufferSource) Continuable() bool { return false }
|
||||
|
||||
func TestMultilineAfterOK(t *testing.T) {
|
||||
pattern := match.MustCompile(`^[ \t] +`) // next line is indented by spaces
|
||||
testMultilineOK(t,
|
||||
MultilineConfig{
|
||||
Pattern: match.MustCompile(`^[ \t] +`), // next line is indented by spaces
|
||||
Pattern: &pattern,
|
||||
Match: "after",
|
||||
},
|
||||
2,
|
||||
@ -36,9 +37,11 @@ func TestMultilineAfterOK(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMultilineBeforeOK(t *testing.T) {
|
||||
pattern := match.MustCompile(`\\$`) // previous line ends with \
|
||||
|
||||
testMultilineOK(t,
|
||||
MultilineConfig{
|
||||
Pattern: match.MustCompile(`\\$`), // previous line ends with \
|
||||
Pattern: &pattern,
|
||||
Match: "before",
|
||||
},
|
||||
2,
|
||||
@ -48,9 +51,11 @@ func TestMultilineBeforeOK(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMultilineAfterNegateOK(t *testing.T) {
|
||||
pattern := match.MustCompile(`^-`) // first line starts with '-' at beginning of line
|
||||
|
||||
testMultilineOK(t,
|
||||
MultilineConfig{
|
||||
Pattern: match.MustCompile(`^-`), // first line starts with '-' at beginning of line
|
||||
Pattern: &pattern,
|
||||
Negate: true,
|
||||
Match: "after",
|
||||
},
|
||||
@ -61,9 +66,11 @@ func TestMultilineAfterNegateOK(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMultilineBeforeNegateOK(t *testing.T) {
|
||||
pattern := match.MustCompile(`;$`) // last line ends with ';'
|
||||
|
||||
testMultilineOK(t,
|
||||
MultilineConfig{
|
||||
Pattern: match.MustCompile(`;$`), // last line ends with ';'
|
||||
Pattern: &pattern,
|
||||
Negate: true,
|
||||
Match: "before",
|
||||
},
|
||||
@ -74,9 +81,10 @@ func TestMultilineBeforeNegateOK(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestMultilineBeforeNegateOKWithEmptyLine(t *testing.T) {
|
||||
pattern := match.MustCompile(`;$`) // last line ends with ';'
|
||||
testMultilineOK(t,
|
||||
MultilineConfig{
|
||||
Pattern: match.MustCompile(`;$`), // last line ends with ';'
|
||||
Pattern: &pattern,
|
||||
Negate: true,
|
||||
Match: "before",
|
||||
},
|
||||
|
167
vendor/github.com/elastic/beats/filebeat/module/apache2/_meta/kibana/default/dashboard/Filebeat-apache2.json
generated
vendored
Normal file
167
vendor/github.com/elastic/beats/filebeat/module/apache2/_meta/kibana/default/dashboard/Filebeat-apache2.json
generated
vendored
Normal file
@ -0,0 +1,167 @@
|
||||
{
|
||||
"objects": [
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"filter\":[]}"
|
||||
},
|
||||
"savedSearchId": "Apache2-access-logs",
|
||||
"title": "Apache2 access unique IPs map",
|
||||
"uiStateJSON": "{\"mapCenter\":[14.944784875088372,5.09765625]}",
|
||||
"version": 1,
|
||||
"visState": "{\"title\":\"Apache2 access unique IPs map\",\"type\":\"tile_map\",\"params\":{\"mapType\":\"Scaled Circle Markers\",\"isDesaturated\":true,\"addTooltip\":true,\"heatMaxZoom\":16,\"heatMinOpacity\":0.1,\"heatRadius\":25,\"heatBlur\":15,\"heatNormalizeData\":true,\"legendPosition\":\"bottomright\",\"mapZoom\":2,\"mapCenter\":[15,5],\"wms\":{\"enabled\":false,\"url\":\"https://basemap.nationalmap.gov/arcgis/services/USGSTopo/MapServer/WMSServer\",\"options\":{\"version\":\"1.3.0\",\"layers\":\"0\",\"format\":\"image/png\",\"transparent\":true,\"attribution\":\"Maps provided by USGS\",\"styles\":\"\"}}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"apache2.access.remote_ip\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"geohash_grid\",\"schema\":\"segment\",\"params\":{\"field\":\"apache2.access.geoip.location\",\"autoPrecision\":true}}],\"listeners\":{}}"
|
||||
},
|
||||
"id": "Apache2-access-unique-IPs-map",
|
||||
"type": "visualization",
|
||||
"version": 4
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"filter\":[]}"
|
||||
},
|
||||
"savedSearchId": "Apache2-access-logs",
|
||||
"title": "Apache2 response codes of top URLs",
|
||||
"uiStateJSON": "{\"vis\":{\"colors\":{\"200\":\"#7EB26D\",\"404\":\"#EF843C\"}}}",
|
||||
"version": 1,
|
||||
"visState": "{\"title\":\"Apache2 response codes of top URLs\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"isDonut\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"split\",\"params\":{\"field\":\"apache2.access.url\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"URL\",\"row\":false}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"apache2.access.response_code\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}"
|
||||
},
|
||||
"id": "Apache2-response-codes-of-top-URLs",
|
||||
"type": "visualization",
|
||||
"version": 4
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"filter\":[]}"
|
||||
},
|
||||
"savedSearchId": "Apache2-access-logs",
|
||||
"title": "Apache2 browsers",
|
||||
"uiStateJSON": "{}",
|
||||
"version": 1,
|
||||
"visState": "{\"title\":\"Apache2 browsers\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"isDonut\":true},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"apache2.access.remote_ip\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"apache2.access.user_agent.name\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"apache2.access.user_agent.major\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}"
|
||||
},
|
||||
"id": "Apache2-browsers",
|
||||
"type": "visualization",
|
||||
"version": 4
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"filter\":[]}"
|
||||
},
|
||||
"savedSearchId": "Apache2-access-logs",
|
||||
"title": "Apache2 operating systems",
|
||||
"uiStateJSON": "{}",
|
||||
"version": 1,
|
||||
"visState": "{\"title\":\"Apache2 operating systems\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"isDonut\":true},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"cardinality\",\"schema\":\"metric\",\"params\":{\"field\":\"apache2.access.remote_ip\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"apache2.access.user_agent.os_name\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"apache2.access.user_agent.os_major\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}"
|
||||
},
|
||||
"id": "Apache2-operating-systems",
|
||||
"type": "visualization",
|
||||
"version": 4
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"filter\":[]}"
|
||||
},
|
||||
"savedSearchId": "Apache2-errors-log",
|
||||
"title": "Apache2 error logs over time",
|
||||
"uiStateJSON": "{}",
|
||||
"version": 1,
|
||||
"visState": "{\"title\":\"Apache2 error logs over time\",\"type\":\"histogram\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"scale\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"apache2.error.level\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}"
|
||||
},
|
||||
"id": "Apache2-error-logs-over-time",
|
||||
"type": "visualization",
|
||||
"version": 4
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"filter\":[]}"
|
||||
},
|
||||
"savedSearchId": "Apache2-access-logs",
|
||||
"title": "Apache2 response codes over time",
|
||||
"uiStateJSON": "{\"vis\":{\"colors\":{\"200\":\"#629E51\",\"404\":\"#EF843C\"}}}",
|
||||
"version": 1,
|
||||
"visState": "{\"title\":\"Apache2 response codes over time\",\"type\":\"histogram\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"scale\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"apache2.access.response_code\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}"
|
||||
},
|
||||
"id": "Apache2-response-codes-over-time",
|
||||
"type": "visualization",
|
||||
"version": 4
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"columns": [
|
||||
"apache2.error.client",
|
||||
"apache2.error.level",
|
||||
"apache2.error.module",
|
||||
"apache2.error.message"
|
||||
],
|
||||
"description": "",
|
||||
"hits": 0,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"_exists_:apache2.error\",\"analyze_wildcard\":true}},\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647}}"
|
||||
},
|
||||
"sort": [
|
||||
"@timestamp",
|
||||
"desc"
|
||||
],
|
||||
"title": "Apache2 errors log",
|
||||
"version": 1
|
||||
},
|
||||
"id": "Apache2-errors-log",
|
||||
"type": "search",
|
||||
"version": 8
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"columns": [
|
||||
"apache2.access.remote_ip",
|
||||
"apache2.access.method",
|
||||
"apache2.access.url",
|
||||
"apache2.access.response_code"
|
||||
],
|
||||
"description": "",
|
||||
"hits": 0,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"_exists_:apache2.access\",\"analyze_wildcard\":true}},\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647}}"
|
||||
},
|
||||
"sort": [
|
||||
"@timestamp",
|
||||
"desc"
|
||||
],
|
||||
"title": "Apache2 access logs",
|
||||
"version": 1
|
||||
},
|
||||
"id": "Apache2-access-logs",
|
||||
"type": "search",
|
||||
"version": 20
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"hits": 0,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}}}]}"
|
||||
},
|
||||
"optionsJSON": "{\"darkTheme\":false}",
|
||||
"panelsJSON": "[{\"col\":1,\"id\":\"Apache2-access-unique-IPs-map\",\"panelIndex\":1,\"row\":1,\"size_x\":12,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"Apache2-response-codes-of-top-URLs\",\"panelIndex\":2,\"row\":6,\"size_x\":8,\"size_y\":3,\"type\":\"visualization\"},{\"col\":9,\"id\":\"Apache2-browsers\",\"panelIndex\":3,\"row\":6,\"size_x\":4,\"size_y\":3,\"type\":\"visualization\"},{\"col\":11,\"id\":\"Apache2-operating-systems\",\"panelIndex\":4,\"row\":4,\"size_x\":2,\"size_y\":2,\"type\":\"visualization\"},{\"col\":1,\"id\":\"Apache2-error-logs-over-time\",\"panelIndex\":5,\"row\":9,\"size_x\":12,\"size_y\":2,\"type\":\"visualization\"},{\"col\":1,\"id\":\"Apache2-response-codes-over-time\",\"panelIndex\":6,\"row\":4,\"size_x\":10,\"size_y\":2,\"type\":\"visualization\"},{\"id\":\"Apache2-errors-log\",\"type\":\"search\",\"panelIndex\":7,\"size_x\":12,\"size_y\":3,\"col\":1,\"row\":11,\"columns\":[\"apache2.error.client\",\"apache2.error.level\",\"apache2.error.module\",\"apache2.error.message\"],\"sort\":[\"@timestamp\",\"desc\"]}]",
|
||||
"timeRestore": false,
|
||||
"title": "Filebeat Apache2 Dashboard",
|
||||
"uiStateJSON": "{\"P-1\":{\"mapCenter\":[40.713955826286046,-0.17578125]}}",
|
||||
"version": 1
|
||||
},
|
||||
"id": "Filebeat-Apache2-Dashboard",
|
||||
"type": "dashboard",
|
||||
"version": 4
|
||||
}
|
||||
],
|
||||
"version": "6.0.0-alpha3-SNAPSHOT"
|
||||
}
|
10
vendor/github.com/elastic/beats/filebeat/module/apache2/access/_meta/fields.yml
generated
vendored
10
vendor/github.com/elastic/beats/filebeat/module/apache2/access/_meta/fields.yml
generated
vendored
@ -61,7 +61,7 @@
|
||||
description: >
|
||||
The minor version of the user agent.
|
||||
- name: patch
|
||||
type: long
|
||||
type: keyword
|
||||
description: >
|
||||
The patch version of the user agent.
|
||||
- name: name
|
||||
@ -104,4 +104,12 @@
|
||||
type: geo_point
|
||||
description: >
|
||||
The longitude and latitude.
|
||||
- name: region_name
|
||||
type: keyword
|
||||
description: >
|
||||
The region name.
|
||||
- name: city_name
|
||||
type: keyword
|
||||
description: >
|
||||
The city name.
|
||||
|
||||
|
1
vendor/github.com/elastic/beats/filebeat/module/apache2/access/test/test.log
generated
vendored
1
vendor/github.com/elastic/beats/filebeat/module/apache2/access/test/test.log
generated
vendored
@ -1,3 +1,4 @@
|
||||
::1 - - [26/Dec/2016:16:16:29 +0200] "GET /favicon.ico HTTP/1.1" 404 209
|
||||
192.168.33.1 - - [26/Dec/2016:16:22:13 +0000] "GET /hello HTTP/1.1" 404 499 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:50.0) Gecko/20100101 Firefox/50.0"
|
||||
::1 - - [26/Dec/2016:16:16:48 +0200] "-" 408 -
|
||||
172.17.0.1 - - [29/May/2017:19:02:48 +0000] "GET /stringpatch HTTP/1.1" 404 612 "-" "Mozilla/5.0 (Windows NT 6.1; rv:15.0) Gecko/20120716 Firefox/15.0a2" "-"
|
||||
|
319
vendor/github.com/elastic/beats/filebeat/module/apache2/access/test/test.log-expected.json
generated
vendored
319
vendor/github.com/elastic/beats/filebeat/module/apache2/access/test/test.log-expected.json
generated
vendored
@ -1,140 +1,193 @@
|
||||
[
|
||||
{
|
||||
"_index": "filebeat-2016.12.27",
|
||||
"_type": "log",
|
||||
"_id": "AVlBCaYsqYg9cc5KQfcT",
|
||||
"_score": null,
|
||||
"_source": {
|
||||
"@timestamp": "2016-12-26T14:16:29.000Z",
|
||||
"offset": 73,
|
||||
"apache2": {
|
||||
"access": {
|
||||
"response_code": "404",
|
||||
"remote_ip": "::1",
|
||||
"method": "GET",
|
||||
"user_name": "-",
|
||||
"http_version": "1.1",
|
||||
"body_sent": {
|
||||
"bytes": "209"
|
||||
{
|
||||
"_id": "AVlBCaYsqYg9cc5KQfcT",
|
||||
"_index": "filebeat-2016.12.27",
|
||||
"_score": null,
|
||||
"_source": {
|
||||
"@timestamp": "2016-12-26T14:16:29.000Z",
|
||||
"apache2": {
|
||||
"access": {
|
||||
"body_sent": {
|
||||
"bytes": "209"
|
||||
},
|
||||
"http_version": "1.1",
|
||||
"method": "GET",
|
||||
"remote_ip": "::1",
|
||||
"response_code": "404",
|
||||
"url": "/favicon.ico",
|
||||
"user_name": "-"
|
||||
}
|
||||
},
|
||||
"beat": {
|
||||
"hostname": "192-168-0-7.rdsnet.ro",
|
||||
"name": "192-168-0-7.rdsnet.ro",
|
||||
"version": "6.0.0-alpha1"
|
||||
},
|
||||
"fields": {
|
||||
"pipeline_id": "apache2-access-with_plugins",
|
||||
"source_type": "apache2-access"
|
||||
},
|
||||
"offset": 73,
|
||||
"prospector": {
|
||||
"type": "log"
|
||||
},
|
||||
"read_timestamp": "2016-12-27T15:52:23.304Z",
|
||||
"source": "module/apache2/access/test/test.log"
|
||||
},
|
||||
"url": "/favicon.ico"
|
||||
}
|
||||
},
|
||||
"beat": {
|
||||
"hostname": "192-168-0-7.rdsnet.ro",
|
||||
"name": "192-168-0-7.rdsnet.ro",
|
||||
"version": "6.0.0-alpha1"
|
||||
},
|
||||
"input_type": "log",
|
||||
"read_timestamp": "2016-12-27T15:52:23.304Z",
|
||||
"source": "module/apache2/access/test/test.log",
|
||||
"fields": {
|
||||
"pipeline_id": "apache2-access-with_plugins",
|
||||
"source_type": "apache2-access"
|
||||
},
|
||||
"type": "log"
|
||||
},
|
||||
"fields": {
|
||||
"@timestamp": [
|
||||
1482761789000
|
||||
]
|
||||
},
|
||||
"sort": [
|
||||
1482761789000
|
||||
]
|
||||
},
|
||||
{
|
||||
"_index": "filebeat-2016.12.27",
|
||||
"_type": "log",
|
||||
"_id": "AVlBCaYsqYg9cc5KQfcU",
|
||||
"_score": null,
|
||||
"_source": {
|
||||
"@timestamp": "2016-12-26T16:22:13.000Z",
|
||||
"offset": 238,
|
||||
"apache2": {
|
||||
"access": {
|
||||
"referrer": "-",
|
||||
"response_code": "404",
|
||||
"remote_ip": "192.168.33.1",
|
||||
"method": "GET",
|
||||
"user_name": "-",
|
||||
"http_version": "1.1",
|
||||
"body_sent": {
|
||||
"bytes": "499"
|
||||
"_type": "log",
|
||||
"fields": {
|
||||
"@timestamp": [
|
||||
1482761789000
|
||||
]
|
||||
},
|
||||
"url": "/hello",
|
||||
"user_agent": {
|
||||
"major": "50",
|
||||
"minor": "0",
|
||||
"os": "Mac OS X 10.12",
|
||||
"os_minor": "12",
|
||||
"os_major": "10",
|
||||
"name": "Firefox",
|
||||
"os_name": "Mac OS X",
|
||||
"device": "Other"
|
||||
}
|
||||
}
|
||||
"sort": [
|
||||
1482761789000
|
||||
]
|
||||
},
|
||||
"beat": {
|
||||
"hostname": "192-168-0-7.rdsnet.ro",
|
||||
"name": "192-168-0-7.rdsnet.ro",
|
||||
"version": "6.0.0-alpha1"
|
||||
{
|
||||
"_id": "AVlBCaYsqYg9cc5KQfcU",
|
||||
"_index": "filebeat-2016.12.27",
|
||||
"_score": null,
|
||||
"_source": {
|
||||
"@timestamp": "2016-12-26T16:22:13.000Z",
|
||||
"apache2": {
|
||||
"access": {
|
||||
"body_sent": {
|
||||
"bytes": "499"
|
||||
},
|
||||
"http_version": "1.1",
|
||||
"method": "GET",
|
||||
"referrer": "-",
|
||||
"remote_ip": "192.168.33.1",
|
||||
"response_code": "404",
|
||||
"url": "/hello",
|
||||
"user_agent": {
|
||||
"device": "Other",
|
||||
"major": "50",
|
||||
"minor": "0",
|
||||
"name": "Firefox",
|
||||
"os": "Mac OS X 10.12",
|
||||
"os_major": "10",
|
||||
"os_minor": "12",
|
||||
"os_name": "Mac OS X"
|
||||
},
|
||||
"user_name": "-"
|
||||
}
|
||||
},
|
||||
"beat": {
|
||||
"hostname": "192-168-0-7.rdsnet.ro",
|
||||
"name": "192-168-0-7.rdsnet.ro",
|
||||
"version": "6.0.0-alpha1"
|
||||
},
|
||||
"fields": {
|
||||
"pipeline_id": "apache2-access-with_plugins",
|
||||
"source_type": "apache2-access"
|
||||
},
|
||||
"offset": 238,
|
||||
"prospector": {
|
||||
"type": "log"
|
||||
},
|
||||
"read_timestamp": "2016-12-27T15:52:23.304Z",
|
||||
"source": "module/apache2/access/test/test.log"
|
||||
},
|
||||
"_type": "log",
|
||||
"fields": {
|
||||
"@timestamp": [
|
||||
1482769333000
|
||||
]
|
||||
},
|
||||
"sort": [
|
||||
1482769333000
|
||||
]
|
||||
},
|
||||
"input_type": "log",
|
||||
"read_timestamp": "2016-12-27T15:52:23.304Z",
|
||||
"source": "module/apache2/access/test/test.log",
|
||||
"fields": {
|
||||
"pipeline_id": "apache2-access-with_plugins",
|
||||
"source_type": "apache2-access"
|
||||
{
|
||||
"_id": "AVlBCaYsqYg9cc5KQfc-",
|
||||
"_index": "filebeat-2016.12.27",
|
||||
"_score": null,
|
||||
"_source": {
|
||||
"@timestamp": "2016-12-26T14:16:48.000Z",
|
||||
"apache2": {
|
||||
"access": {
|
||||
"remote_ip": "::1",
|
||||
"response_code": "408",
|
||||
"user_name": "-"
|
||||
}
|
||||
},
|
||||
"beat": {
|
||||
"hostname": "192-168-0-7.rdsnet.ro",
|
||||
"name": "192-168-0-7.rdsnet.ro",
|
||||
"version": "6.0.0-alpha1"
|
||||
},
|
||||
"fields": {
|
||||
"pipeline_id": "apache2-access-with_plugins",
|
||||
"source_type": "apache2-access"
|
||||
},
|
||||
"offset": 285,
|
||||
"prospector": {
|
||||
"type": "log"
|
||||
},
|
||||
"read_timestamp": "2016-12-27T16:04:58.319Z",
|
||||
"source": "module/apache2/access/test/test.log"
|
||||
},
|
||||
"_type": "log",
|
||||
"fields": {
|
||||
"@timestamp": [
|
||||
1482761808000
|
||||
]
|
||||
},
|
||||
"sort": [
|
||||
1482761808000
|
||||
]
|
||||
},
|
||||
"type": "log"
|
||||
},
|
||||
"fields": {
|
||||
"@timestamp": [
|
||||
1482769333000
|
||||
]
|
||||
},
|
||||
"sort": [
|
||||
1482769333000
|
||||
]
|
||||
},
|
||||
{
|
||||
"_index": "filebeat-2016.12.27",
|
||||
"_type": "log",
|
||||
"_id": "AVlBCaYsqYg9cc5KQfc-",
|
||||
"_score": null,
|
||||
"_source": {
|
||||
"@timestamp": "2016-12-26T14:16:48.000Z",
|
||||
"offset": 285,
|
||||
"apache2": {
|
||||
"access": {
|
||||
"response_code": "408",
|
||||
"remote_ip": "::1",
|
||||
"user_name": "-"
|
||||
}
|
||||
},
|
||||
"beat": {
|
||||
"hostname": "192-168-0-7.rdsnet.ro",
|
||||
"name": "192-168-0-7.rdsnet.ro",
|
||||
"version": "6.0.0-alpha1"
|
||||
},
|
||||
"input_type": "log",
|
||||
"read_timestamp": "2016-12-27T16:04:58.319Z",
|
||||
"source": "module/apache2/access/test/test.log",
|
||||
"fields": {
|
||||
"pipeline_id": "apache2-access-with_plugins",
|
||||
"source_type": "apache2-access"
|
||||
},
|
||||
"type": "log"
|
||||
},
|
||||
"fields": {
|
||||
"@timestamp": [
|
||||
1482761808000
|
||||
]
|
||||
},
|
||||
"sort": [
|
||||
1482761808000
|
||||
]
|
||||
}
|
||||
{
|
||||
"_id": "AVxVs7QZsqw9BQCgtCgc",
|
||||
"_index": "filebeat-6.0.0-alpha2-2017.05.29",
|
||||
"_score": null,
|
||||
"_source": {
|
||||
"@timestamp": "2017-05-29T19:02:48.000Z",
|
||||
"apache2": {
|
||||
"access": {
|
||||
"body_sent": {
|
||||
"bytes": "612"
|
||||
},
|
||||
"http_version": "1.1",
|
||||
"method": "GET",
|
||||
"referrer": "-",
|
||||
"remote_ip": "172.17.0.1",
|
||||
"response_code": "404",
|
||||
"url": "/stringpatch",
|
||||
"user_agent": {
|
||||
"device": "Other",
|
||||
"major": "15",
|
||||
"minor": "0",
|
||||
"name": "Firefox Alpha",
|
||||
"os": "Windows 7",
|
||||
"os_name": "Windows 7",
|
||||
"patch": "a2"
|
||||
},
|
||||
"user_name": "-"
|
||||
}
|
||||
},
|
||||
"beat": {
|
||||
"hostname": "X1",
|
||||
"name": "X1",
|
||||
"version": "6.0.0-alpha2"
|
||||
},
|
||||
"offset": 443,
|
||||
"prospector": {
|
||||
"type": "log"
|
||||
},
|
||||
"read_timestamp": "2017-05-29T19:34:14.378Z",
|
||||
"source": "/home/exekias/go/src/github.com/elastic/beats/filebeat/apache2.log"
|
||||
},
|
||||
"_type": "doc",
|
||||
"fields": {
|
||||
"@timestamp": [
|
||||
1496084568000
|
||||
]
|
||||
},
|
||||
"sort": [
|
||||
1496084568000
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
|
11
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/config.full.yml
generated
vendored
Normal file
11
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/config.full.yml
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
#- module: auditd
|
||||
#log:
|
||||
#enabled: true
|
||||
|
||||
# Set custom paths for the log files. If left empty,
|
||||
# Filebeat will choose the paths depending on your OS.
|
||||
#var.paths:
|
||||
|
||||
# Prospector configuration (advanced). Any prospector configuration option
|
||||
# can be added under this section.
|
||||
#prospector:
|
1
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/config.yml
generated
vendored
Normal file
1
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/config.yml
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
#- module: auditd
|
29
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/docs.asciidoc
generated
vendored
Normal file
29
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/docs.asciidoc
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
== Auditd module
|
||||
|
||||
This module collects and parses logs from the audit daemon (`auditd`).
|
||||
|
||||
[float]
|
||||
=== Compatibility
|
||||
|
||||
This module was tested with logs from `auditd` on OSes like CentOS 6 and
|
||||
CentOS 7.
|
||||
|
||||
This module is not available for Windows.
|
||||
|
||||
[float]
|
||||
=== Dashboard
|
||||
|
||||
This module comes with a sample dashboard showing an overview of the audit log
|
||||
data. You can build more specific dashboards that are tailored to the audit
|
||||
rules that you use on your systems.
|
||||
|
||||
image::./images/kibana-audit-auditd.png[]
|
||||
|
||||
[float]
|
||||
=== Syslog fileset settings
|
||||
|
||||
[float]
|
||||
==== var.paths
|
||||
|
||||
An array of paths where to look for the log files. If left empty, Filebeat
|
||||
will choose the paths depending on your operating systems.
|
10
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/fields.yml
generated
vendored
Normal file
10
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/fields.yml
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
- key: auditd
|
||||
title: "Auditd"
|
||||
description: >
|
||||
Module for parsing auditd logs.
|
||||
fields:
|
||||
- name: auditd
|
||||
type: group
|
||||
description: >
|
||||
Fields from the auditd logs.
|
||||
fields:
|
13
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/dashboard/dfbb49f0-0a0f-11e7-8a62-2d05eaaac5cb.json
generated
vendored
Normal file
13
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/dashboard/dfbb49f0-0a0f-11e7-8a62-2d05eaaac5cb.json
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"hits": 0,
|
||||
"timeRestore": false,
|
||||
"description": "",
|
||||
"title": "Filebeat Auditd",
|
||||
"uiStateJSON": "{\"P-2\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}}",
|
||||
"panelsJSON": "[{\"col\":1,\"id\":\"6295bdd0-0a0e-11e7-825f-6748cda7d858\",\"panelIndex\":1,\"row\":1,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"col\":9,\"id\":\"5ebdbe50-0a0f-11e7-825f-6748cda7d858\",\"panelIndex\":2,\"row\":1,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"id\":\"2bb0fa70-0a11-11e7-9e84-43da493ad0c7\",\"panelIndex\":3,\"row\":5,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"d1726930-0a7f-11e7-8b04-eb22a5669f27\",\"panelIndex\":5,\"row\":5,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":5,\"id\":\"c5411910-0a87-11e7-8b04-eb22a5669f27\",\"panelIndex\":6,\"row\":1,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"size_x\":12,\"size_y\":3,\"panelIndex\":7,\"type\":\"search\",\"id\":\"4ac0a370-0a11-11e7-8b04-eb22a5669f27\",\"col\":1,\"row\":8,\"columns\":[\"auditd.log.record_type\",\"auditd.log.sequence\",\"auditd.log.acct\"],\"sort\":[\"@timestamp\",\"desc\"]}]",
|
||||
"optionsJSON": "{\"darkTheme\":false}",
|
||||
"version": 1,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}}}],\"highlightAll\":true,\"version\":true}"
|
||||
}
|
||||
}
|
18
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/search/4ac0a370-0a11-11e7-8b04-eb22a5669f27.json
generated
vendored
Normal file
18
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/search/4ac0a370-0a11-11e7-8b04-eb22a5669f27.json
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
{
|
||||
"sort": [
|
||||
"@timestamp",
|
||||
"desc"
|
||||
],
|
||||
"hits": 0,
|
||||
"description": "",
|
||||
"title": "Audit Events",
|
||||
"version": 1,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"highlightAll\":true,\"version\":true,\"query\":{\"query_string\":{\"query\":\"_exists_:auditd.log\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||
},
|
||||
"columns": [
|
||||
"auditd.log.record_type",
|
||||
"auditd.log.sequence",
|
||||
"auditd.log.acct"
|
||||
]
|
||||
}
|
10
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/visualization/2bb0fa70-0a11-11e7-9e84-43da493ad0c7.json
generated
vendored
Normal file
10
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/visualization/2bb0fa70-0a11-11e7-9e84-43da493ad0c7.json
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"visState": "{\"type\":\"timelion\",\"title\":\"Audit Event Results\",\"params\":{\"expression\":\".es(q=\\\"_exists_:auditd.log NOT auditd.log.res:failure\\\").label(\\\"Success\\\") .es(q=\\\"auditd.log.res:failed\\\").label(\\\"Failure\\\").title(\\\"Audit Event Results\\\")\",\"interval\":\"auto\"}}",
|
||||
"description": "",
|
||||
"title": "Audit Event Results",
|
||||
"uiStateJSON": "{}",
|
||||
"version": 1,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{}"
|
||||
}
|
||||
}
|
10
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/visualization/5ebdbe50-0a0f-11e7-825f-6748cda7d858.json
generated
vendored
Normal file
10
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/visualization/5ebdbe50-0a0f-11e7-825f-6748cda7d858.json
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"visState": "{\"title\":\"Audit Top Exec Commands\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"auditd.log.a0\",\"size\":30,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Command (arg 0)\"}}],\"listeners\":{}}",
|
||||
"description": "",
|
||||
"title": "Audit Top Exec Commands",
|
||||
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||
"version": 1,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"auditd.log.record_type:EXECVE\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||
}
|
||||
}
|
10
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/visualization/6295bdd0-0a0e-11e7-825f-6748cda7d858.json
generated
vendored
Normal file
10
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/visualization/6295bdd0-0a0e-11e7-825f-6748cda7d858.json
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"visState": "{\n \"title\": \"Audit Event Types\",\n \"type\": \"pie\",\n \"params\": {\n \"addTooltip\": true,\n \"addLegend\": true,\n \"legendPosition\": \"right\",\n \"isDonut\": true\n },\n \"aggs\": [\n {\n \"id\": \"1\",\n \"enabled\": true,\n \"type\": \"count\",\n \"schema\": \"metric\",\n \"params\": {}\n },\n {\n \"id\": \"2\",\n \"enabled\": true,\n \"type\": \"terms\",\n \"schema\": \"segment\",\n \"params\": {\n \"field\": \"auditd.log.record_type\",\n \"size\": 50,\n \"order\": \"desc\",\n \"orderBy\": \"1\"\n }\n }\n ],\n \"listeners\": {}\n}",
|
||||
"description": "",
|
||||
"title": "Audit Event Types",
|
||||
"uiStateJSON": "{}",
|
||||
"version": 1,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\n \"index\": \"filebeat-*\",\n \"query\": {\n \"query_string\": {\n \"query\": \"*\",\n \"analyze_wildcard\": true\n }\n },\n \"filter\": []\n}"
|
||||
}
|
||||
}
|
10
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/visualization/c5411910-0a87-11e7-8b04-eb22a5669f27.json
generated
vendored
Normal file
10
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/visualization/c5411910-0a87-11e7-8b04-eb22a5669f27.json
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"visState": "{\"title\":\"Audit Event Account Tag Cloud\",\"type\":\"tagcloud\",\"params\":{\"scale\":\"linear\",\"orientation\":\"single\",\"minFontSize\":15,\"maxFontSize\":42,\"hideLabel\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"auditd.log.acct\",\"size\":15,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
|
||||
"description": "",
|
||||
"title": "Audit Event Account Tag Cloud",
|
||||
"uiStateJSON": "{}",
|
||||
"version": 1,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||
}
|
||||
}
|
10
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/visualization/d1726930-0a7f-11e7-8b04-eb22a5669f27.json
generated
vendored
Normal file
10
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/5.x/visualization/d1726930-0a7f-11e7-8b04-eb22a5669f27.json
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"visState": "{\"title\":\"Audit Event Address Geo Location\",\"type\":\"tile_map\",\"params\":{\"mapType\":\"Scaled Circle Markers\",\"isDesaturated\":true,\"addTooltip\":true,\"heatMaxZoom\":16,\"heatMinOpacity\":0.1,\"heatRadius\":25,\"heatBlur\":15,\"heatNormalizeData\":true,\"legendPosition\":\"bottomright\",\"mapZoom\":2,\"mapCenter\":[15,5],\"wms\":{\"enabled\":false,\"url\":\"https://basemap.nationalmap.gov/arcgis/services/USGSTopo/MapServer/WMSServer\",\"options\":{\"version\":\"1.3.0\",\"layers\":\"0\",\"format\":\"image/png\",\"transparent\":true,\"attribution\":\"Maps provided by USGS\",\"styles\":\"\"}}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"geohash_grid\",\"schema\":\"segment\",\"params\":{\"field\":\"auditd.log.geoip.location\",\"autoPrecision\":true,\"precision\":2}}],\"listeners\":{}}",
|
||||
"description": "",
|
||||
"title": "Audit Event Address Geo Location",
|
||||
"uiStateJSON": "{}",
|
||||
"version": 1,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||
}
|
||||
}
|
121
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/default/dashboard/Filebeat-auditd.json
generated
vendored
Normal file
121
vendor/github.com/elastic/beats/filebeat/module/auditd/_meta/kibana/default/dashboard/Filebeat-auditd.json
generated
vendored
Normal file
@ -0,0 +1,121 @@
|
||||
{
|
||||
"objects": [
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\n \"index\": \"filebeat-*\",\n \"query\": {\n \"query_string\": {\n \"query\": \"*\",\n \"analyze_wildcard\": true\n }\n },\n \"filter\": []\n}"
|
||||
},
|
||||
"title": "Audit Event Types",
|
||||
"uiStateJSON": "{}",
|
||||
"version": 1,
|
||||
"visState": "{\n \"title\": \"Audit Event Types\",\n \"type\": \"pie\",\n \"params\": {\n \"addTooltip\": true,\n \"addLegend\": true,\n \"legendPosition\": \"right\",\n \"isDonut\": true\n },\n \"aggs\": [\n {\n \"id\": \"1\",\n \"enabled\": true,\n \"type\": \"count\",\n \"schema\": \"metric\",\n \"params\": {}\n },\n {\n \"id\": \"2\",\n \"enabled\": true,\n \"type\": \"terms\",\n \"schema\": \"segment\",\n \"params\": {\n \"field\": \"auditd.log.record_type\",\n \"size\": 50,\n \"order\": \"desc\",\n \"orderBy\": \"1\"\n }\n }\n ],\n \"listeners\": {}\n}"
|
||||
},
|
||||
"id": "6295bdd0-0a0e-11e7-825f-6748cda7d858",
|
||||
"type": "visualization",
|
||||
"version": 4
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"auditd.log.record_type:EXECVE\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||
},
|
||||
"title": "Audit Top Exec Commands",
|
||||
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
|
||||
"version": 1,
|
||||
"visState": "{\"title\":\"Audit Top Exec Commands\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"auditd.log.a0\",\"size\":30,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Command (arg 0)\"}}],\"listeners\":{}}"
|
||||
},
|
||||
"id": "5ebdbe50-0a0f-11e7-825f-6748cda7d858",
|
||||
"type": "visualization",
|
||||
"version": 4
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{}"
|
||||
},
|
||||
"title": "Audit Event Results",
|
||||
"uiStateJSON": "{}",
|
||||
"version": 1,
|
||||
"visState": "{\"type\":\"timelion\",\"title\":\"Audit Event Results\",\"params\":{\"expression\":\".es(q=\\\"_exists_:auditd.log NOT auditd.log.res:failure\\\").label(\\\"Success\\\") .es(q=\\\"auditd.log.res:failed\\\").label(\\\"Failure\\\").title(\\\"Audit Event Results\\\")\",\"interval\":\"auto\"}}"
|
||||
},
|
||||
"id": "2bb0fa70-0a11-11e7-9e84-43da493ad0c7",
|
||||
"type": "visualization",
|
||||
"version": 4
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||
},
|
||||
"title": "Audit Event Address Geo Location",
|
||||
"uiStateJSON": "{}",
|
||||
"version": 1,
|
||||
"visState": "{\"title\":\"Audit Event Address Geo Location\",\"type\":\"tile_map\",\"params\":{\"mapType\":\"Scaled Circle Markers\",\"isDesaturated\":true,\"addTooltip\":true,\"heatMaxZoom\":16,\"heatMinOpacity\":0.1,\"heatRadius\":25,\"heatBlur\":15,\"heatNormalizeData\":true,\"legendPosition\":\"bottomright\",\"mapZoom\":2,\"mapCenter\":[15,5],\"wms\":{\"enabled\":false,\"url\":\"https://basemap.nationalmap.gov/arcgis/services/USGSTopo/MapServer/WMSServer\",\"options\":{\"version\":\"1.3.0\",\"layers\":\"0\",\"format\":\"image/png\",\"transparent\":true,\"attribution\":\"Maps provided by USGS\",\"styles\":\"\"}}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"geohash_grid\",\"schema\":\"segment\",\"params\":{\"field\":\"auditd.log.geoip.location\",\"autoPrecision\":true,\"precision\":2}}],\"listeners\":{}}"
|
||||
},
|
||||
"id": "d1726930-0a7f-11e7-8b04-eb22a5669f27",
|
||||
"type": "visualization",
|
||||
"version": 4
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||
},
|
||||
"title": "Audit Event Account Tag Cloud",
|
||||
"uiStateJSON": "{}",
|
||||
"version": 1,
|
||||
"visState": "{\"title\":\"Audit Event Account Tag Cloud\",\"type\":\"tagcloud\",\"params\":{\"scale\":\"linear\",\"orientation\":\"single\",\"minFontSize\":15,\"maxFontSize\":42,\"hideLabel\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"auditd.log.acct\",\"size\":15,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}"
|
||||
},
|
||||
"id": "c5411910-0a87-11e7-8b04-eb22a5669f27",
|
||||
"type": "visualization",
|
||||
"version": 4
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"columns": [
|
||||
"auditd.log.record_type",
|
||||
"auditd.log.sequence",
|
||||
"auditd.log.acct"
|
||||
],
|
||||
"description": "",
|
||||
"hits": 0,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"highlightAll\":true,\"version\":true,\"query\":{\"query_string\":{\"query\":\"_exists_:auditd.log\",\"analyze_wildcard\":true}},\"filter\":[]}"
|
||||
},
|
||||
"sort": [
|
||||
"@timestamp",
|
||||
"desc"
|
||||
],
|
||||
"title": "Audit Events",
|
||||
"version": 1
|
||||
},
|
||||
"id": "4ac0a370-0a11-11e7-8b04-eb22a5669f27",
|
||||
"type": "search",
|
||||
"version": 4
|
||||
},
|
||||
{
|
||||
"attributes": {
|
||||
"description": "",
|
||||
"hits": 0,
|
||||
"kibanaSavedObjectMeta": {
|
||||
"searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}}}],\"highlightAll\":true,\"version\":true}"
|
||||
},
|
||||
"optionsJSON": "{\"darkTheme\":false}",
|
||||
"panelsJSON": "[{\"col\":1,\"id\":\"6295bdd0-0a0e-11e7-825f-6748cda7d858\",\"panelIndex\":1,\"row\":1,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"col\":9,\"id\":\"5ebdbe50-0a0f-11e7-825f-6748cda7d858\",\"panelIndex\":2,\"row\":1,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"id\":\"2bb0fa70-0a11-11e7-9e84-43da493ad0c7\",\"panelIndex\":3,\"row\":5,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"d1726930-0a7f-11e7-8b04-eb22a5669f27\",\"panelIndex\":5,\"row\":5,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":5,\"id\":\"c5411910-0a87-11e7-8b04-eb22a5669f27\",\"panelIndex\":6,\"row\":1,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"size_x\":12,\"size_y\":3,\"panelIndex\":7,\"type\":\"search\",\"id\":\"4ac0a370-0a11-11e7-8b04-eb22a5669f27\",\"col\":1,\"row\":8,\"columns\":[\"auditd.log.record_type\",\"auditd.log.sequence\",\"auditd.log.acct\"],\"sort\":[\"@timestamp\",\"desc\"]}]",
|
||||
"timeRestore": false,
|
||||
"title": "Filebeat Auditd",
|
||||
"uiStateJSON": "{\"P-2\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}}",
|
||||
"version": 1
|
||||
},
|
||||
"id": "dfbb49f0-0a0f-11e7-8a62-2d05eaaac5cb",
|
||||
"type": "dashboard",
|
||||
"version": 4
|
||||
}
|
||||
],
|
||||
"version": "6.0.0-alpha3-SNAPSHOT"
|
||||
}
|
79
vendor/github.com/elastic/beats/filebeat/module/auditd/log/_meta/fields.yml
generated
vendored
Normal file
79
vendor/github.com/elastic/beats/filebeat/module/auditd/log/_meta/fields.yml
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
- name: log
|
||||
type: group
|
||||
description: >
|
||||
Fields from the Linux audit log. Not all fields are documented here because
|
||||
they are dynamic and vary by audit event type.
|
||||
fields:
|
||||
- name: record_type
|
||||
description: >
|
||||
The audit event type.
|
||||
- name: old_auid
|
||||
description: >
|
||||
For login events this is the old audit ID used for the user prior to
|
||||
this login.
|
||||
- name: new_auid
|
||||
description: >
|
||||
For login events this is the new audit ID. The audit ID can be used to
|
||||
trace future events to the user even if their identity changes (like
|
||||
becoming root).
|
||||
- name: old_ses
|
||||
description: >
|
||||
For login events this is the old session ID used for the user prior to
|
||||
this login.
|
||||
- name: new_ses
|
||||
description: >
|
||||
For login events this is the new session ID. It can be used to tie a
|
||||
user to future events by session ID.
|
||||
- name: sequence
|
||||
type: long
|
||||
description: >
|
||||
The audit event sequence number.
|
||||
- name: acct
|
||||
description: >
|
||||
The user account name associated with the event.
|
||||
- name: pid
|
||||
description: >
|
||||
The ID of the process.
|
||||
- name: ppid
|
||||
description: >
|
||||
The ID of the process.
|
||||
- name: items
|
||||
description: >
|
||||
The number of items in an event.
|
||||
- name: item
|
||||
description: >
|
||||
The item field indicates which item out of the total number of items.
|
||||
This number is zero-based; a value of 0 means it is the first item.
|
||||
- name: a0
|
||||
description: >
|
||||
The first argument to the system call.
|
||||
- name: res
|
||||
description: >
|
||||
The result of the system call (success or failure).
|
||||
- name: geoip
|
||||
type: group
|
||||
description: >
|
||||
Contains GeoIP information gathered based on the `auditd.log.addr`
|
||||
field. Only present if the GeoIP Elasticsearch plugin is available and
|
||||
used.
|
||||
fields:
|
||||
- name: continent_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the continent.
|
||||
- name: city_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the city.
|
||||
- name: region_name
|
||||
type: keyword
|
||||
description: >
|
||||
The name of the region.
|
||||
- name: country_iso_code
|
||||
type: keyword
|
||||
description: >
|
||||
Country ISO code.
|
||||
- name: location
|
||||
type: geo_point
|
||||
description: >
|
||||
The longitude and latitude.
|
6
vendor/github.com/elastic/beats/filebeat/module/auditd/log/config/log.yml
generated
vendored
Normal file
6
vendor/github.com/elastic/beats/filebeat/module/auditd/log/config/log.yml
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
input_type: log
|
||||
paths:
|
||||
{{ range $i, $path := .paths }}
|
||||
- {{$path}}
|
||||
{{ end }}
|
||||
exclude_files: [".gz$"]
|
101
vendor/github.com/elastic/beats/filebeat/module/auditd/log/ingest/pipeline.json
generated
vendored
Normal file
101
vendor/github.com/elastic/beats/filebeat/module/auditd/log/ingest/pipeline.json
generated
vendored
Normal file
@ -0,0 +1,101 @@
|
||||
{
|
||||
"description": "Pipeline for parsing Linux auditd logs",
|
||||
"processors": [
|
||||
{
|
||||
"grok": {
|
||||
"field": "message",
|
||||
"pattern_definitions": {
|
||||
"AUDIT_TYPE": "^type=%{NOTSPACE:auditd.log.record_type}",
|
||||
"AUDIT_PREFIX": "%{AUDIT_TYPE} msg=audit\\(%{NUMBER:auditd.log.epoch}:%{NUMBER:auditd.log.sequence}\\):(%{DATA})?",
|
||||
"AUDIT_KEY_VALUES": "%{WORD}=%{GREEDYDATA}"
|
||||
},
|
||||
"patterns": [
|
||||
"%{AUDIT_PREFIX} %{AUDIT_KEY_VALUES:auditd.log.kv} old auid=%{NUMBER:auditd.log.old_auid} new auid=%{NUMBER:auditd.log.new_auid} old ses=%{NUMBER:auditd.log.old_ses} new ses=%{NUMBER:auditd.log.new_ses}",
|
||||
"%{AUDIT_PREFIX} %{AUDIT_KEY_VALUES:auditd.log.kv} msg=['\"](%{DATA:auditd.log.msg}\\s+)?%{AUDIT_KEY_VALUES:auditd.log.sub_kv}['\"]",
|
||||
"%{AUDIT_PREFIX} %{AUDIT_KEY_VALUES:auditd.log.kv}",
|
||||
"%{AUDIT_PREFIX}",
|
||||
"%{AUDIT_TYPE} %{AUDIT_KEY_VALUES:auditd.log.kv}"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"kv": {
|
||||
"field": "auditd.log.kv",
|
||||
"field_split": "\\s+",
|
||||
"value_split": "=",
|
||||
"target_field": "auditd.log"
|
||||
}
|
||||
},
|
||||
{
|
||||
"kv": {
|
||||
"field": "auditd.log.sub_kv",
|
||||
"field_split": "\\s+",
|
||||
"value_split": "=",
|
||||
"target_field": "auditd.log",
|
||||
"ignore_missing": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"remove": {
|
||||
"field": "auditd.log.kv",
|
||||
"ignore_failure": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"remove": {
|
||||
"field": "auditd.log.sub_kv",
|
||||
"ignore_failure": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"remove": {
|
||||
"field": "message",
|
||||
"ignore_failure": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"date": {
|
||||
"field": "auditd.log.epoch",
|
||||
"target_field": "@timestamp",
|
||||
"formats": [
|
||||
"UNIX"
|
||||
],
|
||||
"ignore_failure": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"remove": {
|
||||
"field": "auditd.log.epoch",
|
||||
"ignore_failure": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field" : "auditd.log.sequence",
|
||||
"type": "integer",
|
||||
"ignore_missing": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"script": {
|
||||
"lang": "painless",
|
||||
"inline": " String trimQuotes(def v) {\n if (v.startsWith(\"'\") || v.startsWith('\"')) {\n v = v.substring(1, v.length());\n }\n if (v.endsWith(\"'\") || v.endsWith('\"')) {\n v = v.substring(0, v.length()-1);\n } \n return v;\n }\n \n boolean isHexAscii(String v) {\n def len = v.length();\n if (len == 0 || len % 2 != 0) {\n return false; \n }\n \n for (int i = 0 ; i < len ; i++) {\n if (Character.digit(v.charAt(i), 16) == -1) {\n return false;\n }\n }\n\n return true;\n }\n \n String convertHexToString(String hex) {\n\t StringBuilder sb = new StringBuilder();\n\n for (int i=0; i < hex.length() - 1; i+=2) {\n String output = hex.substring(i, (i + 2));\n int decimal = Integer.parseInt(output, 16);\n sb.append((char)decimal);\n }\n\n return sb.toString();\n }\n \n def possibleHexKeys = ['exe', 'cmd'];\n \n def audit = ctx.auditd.get(\"log\");\n Iterator entries = audit.entrySet().iterator();\n while (entries.hasNext()) {\n def e = entries.next();\n def k = e.getKey();\n def v = e.getValue(); \n\n // Remove entries whose value is ?\n if (v == \"?\" || v == \"(null)\" || v == \"\") {\n entries.remove();\n continue;\n }\n \n // Convert hex values to ASCII.\n if (possibleHexKeys.contains(k) && isHexAscii(v)) {\n v = convertHexToString(v);\n audit.put(k, v);\n }\n \n // Trim quotes.\n if (v instanceof String) {\n v = trimQuotes(v);\n audit.put(k, v);\n }\n \n // Convert arch.\n if (k == \"arch\" && v == \"c000003e\") {\n audit.put(k, \"x86_64\");\n }\n }"
|
||||
}
|
||||
},
|
||||
{
|
||||
"geoip": {
|
||||
"field": "auditd.log.addr",
|
||||
"target_field": "auditd.log.geoip",
|
||||
"ignore_failure": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"on_failure": [
|
||||
{
|
||||
"set": {
|
||||
"field": "error",
|
||||
"value": "{{ _ingest.on_failure_message }}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
15
vendor/github.com/elastic/beats/filebeat/module/auditd/log/manifest.yml
generated
vendored
Normal file
15
vendor/github.com/elastic/beats/filebeat/module/auditd/log/manifest.yml
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
module_version: 1.0
|
||||
|
||||
var:
|
||||
- name: paths
|
||||
default:
|
||||
- /var/log/audit/audit.log*
|
||||
os.darwin: [""]
|
||||
os.windows: []
|
||||
|
||||
ingest_pipeline: ingest/pipeline.json
|
||||
prospector: config/log.yml
|
||||
|
||||
requires.processors:
|
||||
- name: geoip
|
||||
plugin: ingest-geoip
|
12
vendor/github.com/elastic/beats/filebeat/module/auditd/log/test/audit-rhel6.log
generated
vendored
Normal file
12
vendor/github.com/elastic/beats/filebeat/module/auditd/log/test/audit-rhel6.log
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
type=USER_END msg=audit(1489519230.178:19600327): user pid=4121 uid=0 auid=700 ses=11988 msg='op=PAM:session_close acct="root" exe="/usr/bin/sudo" hostname=? addr=? terminal=? res=success'
|
||||
type=CRED_DISP msg=audit(1489519230.178:19600328): user pid=4121 uid=0 auid=700 ses=11988 msg='op=PAM:setcred acct="root" exe="/usr/bin/sudo" hostname=? addr=? terminal=? res=success'
|
||||
type=USER_CMD msg=audit(1489519256.192:19600329): user pid=4151 uid=497 auid=700 ses=11988 msg='cwd="/" cmd=2F7573722F6C696236342F6E6167696F732F706C7567696E732F636865636B5F617374657269736B5F7369705F7065657273202D7020323032 terminal=? res=success'
|
||||
type=CRED_ACQ msg=audit(1489519256.193:19600330): user pid=4151 uid=0 auid=700 ses=11988 msg='op=PAM:setcred acct="root" exe="/usr/bin/sudo" hostname=? addr=? terminal=? res=success'
|
||||
type=USER_START msg=audit(1489519256.193:19600331): user pid=4151 uid=0 auid=700 ses=11988 msg='op=PAM:session_open acct="root" exe="/usr/bin/sudo" hostname=? addr=? terminal=? res=success'
|
||||
type=MAC_IPSEC_EVENT msg=audit(1489519382.529:19600354): op=SPD-add auid=4294967295 ses=4294967295 res=1 src=10.100.0.0 src_prefixlen=16 dst=10.100.4.0 dst_prefixlen=22
|
||||
type=SYSCALL msg=audit(1489519382.529:19600354): arch=c000003e syscall=44 success=yes exit=184 a0=9 a1=7f564ee6d2a0 a2=b8 a3=0 items=0 ppid=1240 pid=1275 auid=4294967295 uid=0 gid=0 euid=0 suid=0 fsuid=0 egid=0 sgid=0 fsgid=0 tty=(none) ses=4294967295 comm="charon" exe=2F7573722F6C6962657865632F7374726F6E677377616E2F636861726F6E202864656C6574656429 key=(null)
|
||||
type=LOGIN msg=audit(1489636960.072:19623791): pid=28281 uid=0 old auid=700 new auid=700 old ses=6793 new ses=12286
|
||||
type=CRYPTO_KEY_USER msg=audit(1489636960.070:19623788): user pid=28281 uid=0 auid=700 ses=6793 msg='op=destroy kind=session fp=? direction=both spid=28282 suid=74 rport=58994 laddr=107.170.139.210 lport=50022 exe="/usr/sbin/sshd" hostname=? addr=96.241.146.97 terminal=? res=success'
|
||||
type=USER_AUTH msg=audit(1489636960.072:19623789): user pid=28281 uid=0 auid=700 ses=6793 msg='op=success acct="admin" exe="/usr/sbin/sshd" hostname=? addr=96.241.146.97 terminal=ssh res=success'
|
||||
type=USER_AUTH msg=audit(1489636977.804:19623807): user pid=28395 uid=0 auid=700 ses=12286 msg='op=PAM:authentication acct="root" exe="/bin/su" hostname=? addr=? terminal=pts/0 res=success'
|
||||
type=USER_ACCT msg=audit(1489636977.805:19623808): user pid=28395 uid=0 auid=700 ses=12286 msg='op=PAM:accounting acct="root" exe="/bin/su" hostname=? addr=? terminal=pts/0 res=success'
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user