Update to libbeat 5.2.2

This commit is contained in:
Blerim Sheqa 2017-03-15 09:54:31 +01:00
parent a1233e99c0
commit 40d092ef76
489 changed files with 25336 additions and 2105 deletions

View File

@ -1,13 +1,17 @@
BEATNAME=icingabeat BEATNAME=icingabeat
BEAT_DIR=github.com/icinga/icingabeat BEAT_DIR=github.com/icinga/icingabeat
BEAT_DESCRIPTION=Icingabeat ships Icinga 2 events and states to Elasticsearch or Logstash. BEAT_DESCRIPTION=Icingabeat ships Icinga 2 events and states to Elasticsearch or Logstash.
BEAT_VENDOR=Icinga
BEAT_DOC_URL=https://github.com/Icinga/icingabeat
SYSTEM_TESTS=false SYSTEM_TESTS=false
TEST_ENVIRONMENT=false TEST_ENVIRONMENT=false
ES_BEATS?=./vendor/github.com/elastic/beats ES_BEATS?=./vendor/github.com/elastic/beats
GOPACKAGES=$(shell glide novendor) GOPACKAGES=$(shell glide novendor)
PREFIX?=. PREFIX?=.
SNAPSHOT=false #TARGETS="linux/amd64 linux/386 windows/amd64 windows/386 darwin/amd64"
#PACKAGES=${BEATNAME}/deb ${BEATNAME}/rpm ${BEATNAME}/darwin ${BEATNAME}/win ${BEATNAME}/bin
#SNAPSHOT=false
# Path to the libbeat Makefile # Path to the libbeat Makefile
-include $(ES_BEATS)/libbeat/scripts/Makefile -include $(ES_BEATS)/libbeat/scripts/Makefile

View File

@ -6,7 +6,7 @@ os: Windows Server 2012 R2
# Environment variables # Environment variables
environment: environment:
GOROOT: c:\go1.7.1 GOROOT: c:\go1.7.4
GOPATH: c:\gopath GOPATH: c:\gopath
PYWIN_DL: https://beats-files.s3.amazonaws.com/deps/pywin32-220.win32-py2.7.exe PYWIN_DL: https://beats-files.s3.amazonaws.com/deps/pywin32-220.win32-py2.7.exe
matrix: matrix:
@ -24,13 +24,13 @@ clone_folder: c:\gopath\src\github.com\elastic\beats
cache: cache:
- C:\ProgramData\chocolatey\bin -> .appveyor.yml - C:\ProgramData\chocolatey\bin -> .appveyor.yml
- C:\ProgramData\chocolatey\lib -> .appveyor.yml - C:\ProgramData\chocolatey\lib -> .appveyor.yml
- C:\go1.7.1 -> .appveyor.yml - C:\go1.7.4 -> .appveyor.yml
- C:\tools\mingw64 -> .appveyor.yml - C:\tools\mingw64 -> .appveyor.yml
- C:\pywin_inst.exe -> .appveyor.yml - C:\pywin_inst.exe -> .appveyor.yml
# Scripts that run after cloning repository # Scripts that run after cloning repository
install: install:
- ps: c:\gopath\src\github.com\elastic\beats\libbeat\scripts\install-go.ps1 -version 1.7.1 - ps: c:\gopath\src\github.com\elastic\beats\libbeat\scripts\install-go.ps1 -version 1.7.4
- set PATH=%GOROOT%\bin;%PATH% - set PATH=%GOROOT%\bin;%PATH%
# AppVeyor installed mingw is 32-bit only. # AppVeyor installed mingw is 32-bit only.
- ps: >- - ps: >-
@ -51,7 +51,7 @@ install:
- set PYTHONPATH=C:\Python27 - set PYTHONPATH=C:\Python27
- set PATH=%PYTHONPATH%;%PYTHONPATH%\Scripts;%PATH% - set PATH=%PYTHONPATH%;%PYTHONPATH%\Scripts;%PATH%
- python --version - python --version
- pip install jinja2 nose nose-timer PyYAML redis - pip install jinja2 nose nose-timer PyYAML redis elasticsearch
- easy_install C:/pywin_inst.exe - easy_install C:/pywin_inst.exe
# To run your custom scripts instead of automatic MSBuild # To run your custom scripts instead of automatic MSBuild

View File

@ -5,63 +5,80 @@ services:
language: go language: go
go:
- 1.7.1
# Make sure project can also be built on travis for clones of the repo # Make sure project can also be built on travis for clones of the repo
go_import_path: github.com/elastic/beats go_import_path: github.com/elastic/beats
os:
- linux
- osx
env: env:
matrix:
- TARGETS="check"
- TARGETS="-C filebeat testsuite"
- TARGETS="-C heartbeat testsuite"
- TARGETS="-C libbeat testsuite"
- TARGETS="-C metricbeat testsuite"
- TARGETS="-C packetbeat testsuite"
- TARGETS="-C libbeat crosscompile"
- TARGETS="-C metricbeat crosscompile"
- TARGETS="-C winlogbeat crosscompile"
- TARGETS="-C libbeat/dashboards"
- TARGETS="-C generate/metricbeat/metricset test"
- TARGETS="-C generate/beat test"
global: global:
# Cross-compile for amd64 only to speed up testing. # Cross-compile for amd64 only to speed up testing.
- GOX_FLAGS="-arch amd64" - GOX_FLAGS="-arch amd64"
- DOCKER_COMPOSE_VERSION: 1.8.1 - DOCKER_COMPOSE_VERSION: 1.9.0
- &go_version 1.7.4
matrix: matrix:
exclude: include:
- os: osx # General checks
- os: linux
env: TARGETS="check" env: TARGETS="check"
go: *go_version
# Filebeat
- os: linux
env: TARGETS="-C filebeat testsuite"
go: *go_version
- os: osx - os: osx
env: TARGETS="-C filebeat crosscompile" env: TARGETS="TEST_ENVIRONMENT=0 -C filebeat testsuite"
- os: osx go: *go_version
env: TARGETS="-C libbeat crosscompile"
- os: osx # Heartbeat
env: TARGETS="-C metricbeat crosscompile" - os: linux
- os: osx env: TARGETS="-C heartbeat testsuite"
env: TARGETS="-C winlogbeat crosscompile" go: *go_version
- os: osx - os: osx
env: TARGETS="TEST_ENVIRONMENT=0 -C heartbeat testsuite"
go: *go_version
# Libbeat
- os: linux
env: TARGETS="-C libbeat testsuite" env: TARGETS="-C libbeat testsuite"
- os: osx go: *go_version
env: TARGETS="-C hearbeat testsuite" - os: linux
- os: osx env: TARGETS="-C libbeat crosscompile"
go: *go_version
# Metricbeat
- os: linux
env: TARGETS="-C metricbeat testsuite" env: TARGETS="-C metricbeat testsuite"
go: *go_version
- os: osx - os: osx
env: TARGETS="TEST_ENVIRONMENT=0 -C metricbeat testsuite"
go: *go_version
- os: linux
env: TARGETS="-C metricbeat crosscompile"
go: *go_version
# Packetbeat
- os: linux
env: TARGETS="-C packetbeat testsuite"
go: *go_version
# Winlogbeat
- os: linux
env: TARGETS="-C winlogbeat crosscompile"
go: *go_version
# Dashboards
- os: linux
env: TARGETS="-C libbeat/dashboards" env: TARGETS="-C libbeat/dashboards"
- os: osx go: *go_version
# Generators
- os: linux
env: TARGETS="-C generate/metricbeat/metricset test" env: TARGETS="-C generate/metricbeat/metricset test"
- os: osx go: *go_version
- os: linux
env: TARGETS="-C generate/beat test" env: TARGETS="-C generate/beat test"
fast_finish: true go: *go_version
allow_failures:
- env: TARGETS="-C libbeat crosscompile"
- env: TARGETS="-C filebeat crosscompile"
addons: addons:
apt: apt:
@ -71,20 +88,14 @@ addons:
- geoip-database - geoip-database
before_install: before_install:
# Update to most recent docker version
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then
sudo apt-get update;
sudo apt-cache search docker;
sudo apt-get -o Dpkg::Options::="--force-confnew" install -y docker-engine;
fi
# Docker-compose installation # Docker-compose installation
- sudo rm /usr/local/bin/docker-compose || true - sudo rm /usr/local/bin/docker-compose || true
- curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose
- chmod +x docker-compose - chmod +x docker-compose
- sudo mv docker-compose /usr/local/bin - sudo mv docker-compose /usr/local/bin
install: # Skips installations step
- true install: true
script: script:
- make $TARGETS - make $TARGETS
@ -97,6 +108,7 @@ notifications:
after_success: after_success:
# Copy full.cov to coverage.txt because codecov.io requires this file # Copy full.cov to coverage.txt because codecov.io requires this file
- test -f filebeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f filebeat/build/coverage/full.cov - test -f filebeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f filebeat/build/coverage/full.cov
- test -f packetbeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f packetbeat/build/coverage/full.cov - test -f heartbeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f heartbeat/build/coverage/full.cov
- test -f libbeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f libbeat/build/coverage/full.cov - test -f libbeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f libbeat/build/coverage/full.cov
- test -f metricbeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f metricbeat/build/coverage/full.cov - test -f metricbeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f metricbeat/build/coverage/full.cov
- test -f packetbeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f packetbeat/build/coverage/full.cov

View File

@ -8,31 +8,37 @@
// Template, add newest changes here // Template, add newest changes here
=== Beats version HEAD === Beats version HEAD
https://github.com/elastic/beats/compare/v5.1.2...5.1[Check the HEAD diff] https://github.com/elastic/beats/compare/v5.2.0...master[Check the HEAD diff]
==== Breaking changes ==== Breaking changes
*Affecting all Beats* *Affecting all Beats*
*Filebeat*
*Heartbeat*
*Metricbeat* *Metricbeat*
*Packetbeat* *Packetbeat*
*Filebeat*
*Winlogbeat* *Winlogbeat*
==== Bugfixes ==== Bugfixes
*Affecting all Beats* *Affecting all Beats*
*Filebeat*
*Heartbeat*
*Metricbeat* *Metricbeat*
*Packetbeat* - Fix go routine leak in docker module. {pull}3492[3492]
- Fix bug docker module hanging when docker container killed. {issue}3610[3610]
- Set timeout to period instead of 1s by default as documented.
*Filebeat* *Packetbeat*
- Fix registry migration issue from old states were files were only harvested after second restart. {pull}3322[3322]
*Winlogbeat* *Winlogbeat*
@ -40,34 +46,118 @@ https://github.com/elastic/beats/compare/v5.1.2...5.1[Check the HEAD diff]
*Affecting all Beats* *Affecting all Beats*
*Heartbeat*
*Metricbeat* *Metricbeat*
*Packetbeat*
*Filebeat* *Filebeat*
*Packetbeat*
*Winlogbeat* *Winlogbeat*
==== Deprecated ==== Deprecated
*Affecting all Beats* *Affecting all Beats*
*Filebeat*
*Heartbeat*
*Metricbeat* *Metricbeat*
*Packetbeat* *Packetbeat*
*Filebeat*
*Winlogbeat* *Winlogbeat*
//////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////
[[release-notes-5.2.1]]
=== Beats version 5.2.1
https://github.com/elastic/beats/compare/v5.2.0...v5.2.1[View commits]
==== Bugfixes
*Metricbeat*
- Fix go routine leak in docker module. {pull}3492[3492]
*Packetbeat*
- Fix error in the NFS sample dashboard. {pull}3548[3548]
*Winlogbeat*
- Fix error in the Winlogbeat sample dashboard. {pull}3548[3548]
[[release-notes-5.2.0]]
=== Beats version 5.2.0
https://github.com/elastic/beats/compare/v5.1.2...v5.2.0[View commits]
==== Bugfixes
*Affecting all Beats*
- Fix overwriting explicit empty config sections. {issue}2918[2918]
*Filebeat*
- Fix alignment issue were Filebeat compiled with Go 1.7.4 was crashing on 32 bits system. {issue}3273[3273]
*Metricbeat*
- Fix service times-out at startup. {pull}3056[3056]
- Kafka module case sensitive host name matching. {pull}3193[3193]
- Fix interface conversion panic in couchbase module {pull}3272[3272]
*Packetbeat*
- Fix issue where some Cassandra visualizations were showing data from all protocols. {issue}3314[3314]
==== Added
*Affecting all Beats*
- Add support for passing list and dictionary settings via -E flag.
- Support for parsing list and dictionary setting from environment variables.
- Added new flags to import_dashboards (-cacert, -cert, -key, -insecure). {pull}3139[3139] {pull}3163[3163]
- The limit for the number of fields is increased via the mapping template. {pull}3275[3275]
- Updated to Go 1.7.4. {pull}3277[3277]
- Added a NOTICE file containing the notices and licenses of the dependencies. {pull}3334[3334].
*Heartbeat*
- First release, containing monitors for ICMP, TCP, and HTTP.
*Filebeat*
- Add enabled config option to prospectors. {pull}3157[3157]
- Add target option for decoded_json_field. {pull}3169[3169]
*Metricbeat*
- Kafka module broker matching enhancements. {pull}3129[3129]
- Add a couchbase module with metricsets for node, cluster and bucket. {pull}3081[3081]
- Export number of cores for CPU module. {pull}3192[3192]
- Experimental Prometheus module. {pull}3202[3202]
- Add system socket module that reports all TCP sockets. {pull}3246[3246]
- Kafka consumer groups metricset. {pull}3240[3240]
*Winlogbeat*
- Reduced amount of memory allocated while reading event log records. {pull}3113[3113] {pull}3118[3118]
[[release-notes-5.1.2]] [[release-notes-5.1.2]]
=== Beats version 5.1.2 === Beats version 5.1.2
https://github.com/elastic/beats/compare/v5.1.1...v5.1.2[View commits] https://github.com/elastic/beats/compare/v5.1.1...v5.1.2[View commits]
==== Bugfixes ==== Bugfixes
*Filebeat*
- Fix registry migration issue from old states where files were only harvested after second restart. {pull}3322[3322]
*Packetbeat* *Packetbeat*
- Fix error on importing dashboards due to colons in the Cassandra dashboard. {issue}3140[3140] - Fix error on importing dashboards due to colons in the Cassandra dashboard. {issue}3140[3140]
@ -77,6 +167,7 @@ https://github.com/elastic/beats/compare/v5.1.1...v5.1.2[View commits]
- Fix for "The array bounds are invalid" error when reading large events. {issue}3076[3076] - Fix for "The array bounds are invalid" error when reading large events. {issue}3076[3076]
[[release-notes-5.1.1]] [[release-notes-5.1.1]]
=== Beats version 5.1.1 === Beats version 5.1.1
https://github.com/elastic/beats/compare/v5.0.2...v5.1.1[View commits] https://github.com/elastic/beats/compare/v5.0.2...v5.1.1[View commits]
@ -118,6 +209,7 @@ https://github.com/elastic/beats/compare/v5.0.2...v5.1.1[View commits]
- Add support for MongoDB 3.4 and WiredTiger metrics. {pull}2999[2999] - Add support for MongoDB 3.4 and WiredTiger metrics. {pull}2999[2999]
- Add experimental kafka module with partition metricset. {pull}2969[2969] - Add experimental kafka module with partition metricset. {pull}2969[2969]
- Add raw config option for mysql/status metricset. {pull}3001[3001] - Add raw config option for mysql/status metricset. {pull}3001[3001]
- Add command fileds for mysql/status metricset. {pull}3251[3251]
*Filebeat* *Filebeat*
@ -177,7 +269,7 @@ https://github.com/elastic/beats/compare/v5.0.0...v5.0.1[View commits]
*Metricbeat* *Metricbeat*
- Add username and password config options to the PostgreSQL module. {pull}2890[2890] - Add username and password config options to the PostgreSQL module. {pull}2889[2890]
- Add username and password config options to the MongoDB module. {pull}2889[2889] - Add username and password config options to the MongoDB module. {pull}2889[2889]
- Add system core metricset for Windows. {pull}2883[2883] - Add system core metricset for Windows. {pull}2883[2883]

View File

@ -51,7 +51,7 @@ Beats](https://github.com/elastic/beats/blob/master/libbeat/docs/communitybeats.
The Beats are Go programs, so install the latest version of The Beats are Go programs, so install the latest version of
[golang](http://golang.org/) if you don't have it already. The current Go version [golang](http://golang.org/) if you don't have it already. The current Go version
used for development is Golang 1.7.1. used for development is Golang 1.7.4.
The location where you clone is important. Please clone under the source The location where you clone is important. Please clone under the source
directory of your `GOPATH`. If you don't have `GOPATH` already set, you can directory of your `GOPATH`. If you don't have `GOPATH` already set, you can

View File

@ -1,4 +1,4 @@
FROM golang:1.7.1 FROM golang:1.7.4
MAINTAINER Nicolas Ruflin <ruflin@elastic.co> MAINTAINER Nicolas Ruflin <ruflin@elastic.co>
RUN set -x && \ RUN set -x && \
@ -11,5 +11,5 @@ COPY libbeat/scripts/docker-entrypoint.sh /entrypoint.sh
RUN mkdir -p /etc/pki/tls/certs RUN mkdir -p /etc/pki/tls/certs
COPY testing/environments/docker/logstash/pki/tls/certs/logstash.crt /etc/pki/tls/certs/logstash.crt COPY testing/environments/docker/logstash/pki/tls/certs/logstash.crt /etc/pki/tls/certs/logstash.crt
# Create a copy of the respository inside the container. # Create a copy of the repository inside the container.
COPY . /go/src/github.com/elastic/beats/ COPY . /go/src/github.com/elastic/beats/

View File

@ -3,7 +3,7 @@ BUILD_DIR=build
COVERAGE_DIR=${BUILD_DIR}/coverage COVERAGE_DIR=${BUILD_DIR}/coverage
BEATS=packetbeat filebeat winlogbeat metricbeat heartbeat BEATS=packetbeat filebeat winlogbeat metricbeat heartbeat
PROJECTS=libbeat ${BEATS} PROJECTS=libbeat ${BEATS}
PROJECTS_ENV=libbeat metricbeat PROJECTS_ENV=libbeat filebeat metricbeat
SNAPSHOT?=yes SNAPSHOT?=yes
# Runs complete testsuites (unit, system, integration) for all beats with coverage and race detection. # Runs complete testsuites (unit, system, integration) for all beats with coverage and race detection.
@ -113,3 +113,9 @@ upload-package:
.PHONY: release-upload .PHONY: release-upload
upload-release: upload-release:
aws s3 cp --recursive --acl public-read build/upload s3://download.elasticsearch.org/beats/ aws s3 cp --recursive --acl public-read build/upload s3://download.elasticsearch.org/beats/
.PHONY: notice
notice:
python dev-tools/generate_notice.py .

1483
vendor/github.com/elastic/beats/NOTICE generated vendored Normal file

File diff suppressed because it is too large Load Diff

80
vendor/github.com/elastic/beats/dev-tools/common.bash generated vendored Normal file
View File

@ -0,0 +1,80 @@
#
# File: common.bash
#
# Common bash routines.
#
# Script directory:
_sdir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# debug "msg"
# Write a debug message to stderr.
debug()
{
if [ "$VERBOSE" == "true" ]; then
echo "DEBUG: $1" >&2
fi
}
# err "msg"
# Write and error message to stderr.
err()
{
echo "ERROR: $1" >&2
}
# get_go_version
# Read the project's Go version and return it in the GO_VERSION variable.
# On failure it will exit.
get_go_version() {
GO_VERSION=$(awk '/^:go-version:/{print $NF}' "${_sdir}/../libbeat/docs/version.asciidoc")
if [ -z "$GO_VERSION" ]; then
err "Failed to detect the project's Go version"
exit 1
fi
}
# install_gimme
# Install gimme to HOME/bin.
install_gimme() {
# Install gimme
if [ ! -f "${HOME}/bin/gimme" ]; then
mkdir -p ${HOME}/bin
curl -sL -o ${HOME}/bin/gimme https://raw.githubusercontent.com/travis-ci/gimme/v1.1.0/gimme
chmod +x ${HOME}/bin/gimme
fi
GIMME="${HOME}/bin/gimme"
debug "Gimme version $(${GIMME} version)"
}
# setup_go_root "version"
# This configures the Go version being used. It sets GOROOT and adds
# GOROOT/bin to the PATH. It uses gimme to download the Go version if
# it does not already exist in the ~/.gimme dir.
setup_go_root() {
local version=${1}
install_gimme
# Setup GOROOT and add go to the PATH.
${GIMME} "${version}" > /dev/null
source "${HOME}/.gimme/envs/go${version}.env" 2> /dev/null
debug "$(go version)"
}
# setup_go_path "gopath"
# This sets GOPATH and adds GOPATH/bin to the PATH.
setup_go_path() {
local gopath="${1}"
if [ -z "$gopath" ]; then return; fi
# Setup GOPATH.
export GOPATH="${gopath}"
# Add GOPATH to PATH.
export PATH="${GOPATH}/bin:${PATH}"
debug "GOPATH=${GOPATH}"
}

View File

@ -0,0 +1,98 @@
import glob
import sys
import os
import datetime
import argparse
def read_file(filename):
if not os.path.isfile(filename):
print("File not found {}".format(filename))
return ""
with open(filename, 'r') as f:
file_content = f.read()
return file_content
def get_library_name(license):
lib = ""
path = os.path.dirname(license)
# get the last three directories
for i in range(0, 3):
path, x = os.path.split(path)
if len(lib) == 0:
lib = x
elif len(x) > 0:
lib = x + "/" + lib
return lib
def add_licenses(f, licenses):
for license in licenses:
for license_file in glob.glob(license):
f.write("\n--------------------------------------------------------------------\n")
f.write("{}\n".format(get_library_name(license_file)))
f.write("--------------------------------------------------------------------\n")
copyright = read_file(license_file)
if "Apache License" not in copyright:
f.write(copyright)
else:
# it's an Apache License, so include only the NOTICE file
f.write("Apache License\n\n")
for notice_file in glob.glob(os.path.join(os.path.dirname(license_file), "NOTICE*")):
f.write("-------{}-----\n".format(os.path.basename(notice_file)))
f.write(read_file(notice_file))
def create_notice(filename, beat, copyright, licenses):
now = datetime.datetime.now()
with open(filename, "w+") as f:
# Add header
f.write("{}\n".format(beat))
f.write("Copyright 2014-{0} {1}\n".format(now.year, copyright))
f.write("\n")
f.write("This product includes software developed by The Apache Software \nFoundation (http://www.apache.org/).\n\n")
# Add licenses for 3rd party libraries
f.write("==========================================================================\n")
f.write("Third party libraries used by the Beats project:\n")
f.write("==========================================================================\n\n")
add_licenses(f, licenses)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Generate the NOTICE file from all vendor directories available in a given directory")
parser.add_argument("vendor",
help="directory where to search for vendor directories")
parser.add_argument("-b", "--beat", default="Elastic Beats",
help="Beat name")
parser.add_argument("-c", "--copyright", default="Elasticsearch BV",
help="copyright owner")
args = parser.parse_args()
cwd = os.getcwd()
notice = os.path.join(cwd, "NOTICE")
licenses = []
for root, dirs, files in os.walk(args.vendor):
if 'vendor' in dirs:
license = os.path.join(os.path.join(root, 'vendor'),
'**/**/**/LICENSE*')
licenses.append(license)
print("Get the licenses available from {}".format(licenses))
create_notice(notice, args.beat, args.copyright, licenses)
print("Available at {}\n".format(notice))

View File

@ -1,20 +1,37 @@
#!/usr/bin/env python #!/usr/bin/env python
import os import os
import re
import argparse import argparse
pattern = '''const defaultBeatVersion = "''' pattern = re.compile(r'(const\s|)\w*(v|V)ersion\s=\s"(?P<version>.*)"')
vendored_libbeat = os.path.normpath("vendor/github.com/elastic/beats")
def get_filepath(filename):
script_directory = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
index = script_directory.find(vendored_libbeat)
if index > 0:
# Community beat detected
filename = os.path.join(script_directory[:index], filename)
if os.path.exists(filename):
return filename # Community beat version exists
return os.path.abspath(os.path.join(script_directory, os.pardir, "libbeat","beat","version.go"))
def main(): def main():
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Prints the current version at stdout.") description="Prints the current version at stdout.")
parser.parse_args() args = parser.parse_args()
dir = os.path.dirname(os.path.realpath(__file__)) goversion_filepath = get_filepath("version.go")
with open(dir + "/../libbeat/beat/version.go", "r") as f:
with open(goversion_filepath, "r") as f:
for line in f: for line in f:
if line.startswith(pattern): match = pattern.match(line)
print(line[len(pattern):-2]) # -2 for \n and the final quote if match:
print(match.group('version'))
return
print ("No version found in file {}".format(goversion_filepath))
if __name__ == "__main__": if __name__ == "__main__":
main() main()

143
vendor/github.com/elastic/beats/dev-tools/jenkins_ci generated vendored Executable file
View File

@ -0,0 +1,143 @@
#!/usr/bin/env bash
set -e
# Script directory:
SDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
SNAME="$(basename "$0")"
source "${SDIR}/common.bash"
usage() {
cat << EOF
Usage: $SNAME [-d] [-h] [-v] [-r] [-w=WORKSPACE] (-g|-b|-c)
Description: Executes a build using the project's Go version.
Options:
-w=WORKSPACE Required. Specifies the path to the Jenkins workspace.
If not set then the WORKSPACE environment variable is
used. The workspace will be treated as the GOPATH.
-b | --build Perform a build which includes make targets: check,
testsuite, coverage-report, and docs.
-c | --cleanup Clean up after the build by removing the checkout of
elastic/docs and stopping any running containers
started by the build. This cannot be specified with
--build.
-g | --go-version Optional. Write the project's Go version to stdout
and then exits. Can be used to setup Go with
eval "\$(gimme \$(./jenkins_ci -g))".
-i | --install-gimme Optional. Installs gimme to HOME/bin.
-r | --race Optional. Enable the Go race detector for tests that
are run.
-d | --debug Optional. Runs the script with 'set -x' to log a trace
of all commands and their arguments being executed.
-v | --verbose Optional. Enable verbose logging from this script to stderr.
-h | --help Optional. Print this usage information.
Examples:
Print project Go version: ./$SNAME --go-version
Build with race detector: ./$SNAME -b -r
Stop test environment: ./$SNAME -c
Jenkins Setup:
1) Jenkins should be setup to checkout elastic/beats into
\$WORKSPACE/src/github.com/elastic/
2) The single build script should be added that executes
\$WORKSPACE/src/github.com/elastic/beats/dev-tools/$SNAME -d -v -b --race
3) A post build action should be added that executes
\$WORKSPACE/src/github.com/elastic/beats/dev-tools/$SNAME -d -v -c
EOF
}
# Parse command line arguments.
parse_args() {
for i in "$@"
do
case $i in
-b|--build)
BUILD=true
shift
;;
-c|--cleanup)
CLEANUP=true
shift
;;
-d|--debug)
set -x
shift
;;
-g|--go-version)
get_go_version
echo "${GO_VERSION}"
exit 0
;;
-h|--help)
usage
exit 1
;;
-i|--install-gimme)
install_gimme
exit 0
;;
-r|--race)
export RACE_DETECTOR=1
shift
;;
-v|--verbose)
VERBOSE=true
shift
;;
-w=*|--workspace=*)
WORKSPACE="${i#*=}"
shift
;;
*)
echo "Invalid argument: $i"
usage
exit 1
;;
esac
done
if [ -z "$WORKSPACE" ]; then
err "WORKSPACE env var must be set or --workspace must be specified"
exit 1
fi
}
build() {
make check
make testsuite
make coverage-report
make docs
}
cleanup() {
# Remove the checkout of elastic/docs if it exists.
rm -rf "${SDIR}/../build/docs"
make stop-environments
}
main() {
cd "${SDIR}/.."
parse_args $*
get_go_version
setup_go_root ${GO_VERSION}
setup_go_path ${WORKSPACE}
if [ "$BUILD" == "true" ] && [ "$CLEANUP" == "true" ]; then
err "--build and --cleanup cannot be used together"
exit 1
elif [ "$BUILD" == "true" ]; then
build
elif [ "$CLEANUP" == "true" ]; then
cleanup
else
err "Use either --build or --cleanup"
exit 1
fi
}
main $*

View File

@ -1,6 +1,9 @@
BUILDID?=$(shell git rev-parse HEAD) BUILDID?=$(shell git rev-parse HEAD)
SNAPSHOT?=yes SNAPSHOT?=yes
BEATS_BUILDER_IMAGE?=tudorg/beats-builder
BEATS_BUILDER_DEB6_IMAGE?=tudorg/beats-builder-deb6
makefile_abspath:=$(abspath $(lastword $(MAKEFILE_LIST))) makefile_abspath:=$(abspath $(lastword $(MAKEFILE_LIST)))
packer_absdir=$(shell dirname ${makefile_abspath}) packer_absdir=$(shell dirname ${makefile_abspath})
beat_abspath=${GOPATH}/src/${BEAT_DIR} beat_abspath=${GOPATH}/src/${BEAT_DIR}
@ -73,29 +76,29 @@ run-interactive-builder-deb6:
docker run -t -i -v $(shell pwd)/build:/build \ docker run -t -i -v $(shell pwd)/build:/build \
-v $(shell pwd)/xgo-scripts/:/scripts \ -v $(shell pwd)/xgo-scripts/:/scripts \
-v $(shell pwd)/../..:/source \ -v $(shell pwd)/../..:/source \
--entrypoint=bash tudorg/beats-builder-deb6 --entrypoint=bash ${BEATS_BUILDER_DEB6_IMAGE}
.PHONY: run-interactive-builder .PHONY: run-interactive-builder
run-interactive-builder: run-interactive-builder:
docker run -t -i -v $(shell pwd)/build:/build \ docker run -t -i -v $(shell pwd)/build:/build \
-v $(packer_absdir)/xgo-scripts/:/scripts \ -v $(packer_absdir)/xgo-scripts/:/scripts \
-v $(shell pwd)/../..:/source \ -v $(shell pwd)/../..:/source \
--entrypoint=bash tudorg/beats-builder --entrypoint=bash ${BEATS_BUILDER_IMAGE}
.PHONY: images .PHONY: images
images: xgo-image fpm-image go-daemon-image images: xgo-image fpm-image go-daemon-image
.PHONY: push-images .PHONY: push-images
push-images: push-images:
docker push tudorg/beats-builder docker push ${BEATS_BUILDER_IMAGE}
docker push tudorg/beats-builder-deb6 docker push ${BEATS_BUILDER_DEB6_IMAGE}
docker push tudorg/fpm docker push tudorg/fpm
docker push tudorg/go-daemon docker push tudorg/go-daemon
.PHONY: pull-images .PHONY: pull-images
pull-images: pull-images:
docker pull tudorg/beats-builder docker pull ${BEATS_BUILDER_IMAGE}
docker pull tudorg/beats-builder-deb6 docker pull ${BEATS_BUILDER_DEB6_IMAGE}
docker pull tudorg/fpm docker pull tudorg/fpm
docker pull tudorg/go-daemon docker pull tudorg/go-daemon
@ -115,8 +118,8 @@ endef
.PHONY: clean-images .PHONY: clean-images
clean-images: clean-images:
@$(call rm-image,tudorg/beats-builder-deb6) @$(call rm-image, ${BEATS_BUILDER_DEB6_IMAGE})
@$(call rm-image,tudorg/beats-builder) @$(call rm-image, ${BEATS_BUILDER_IMAGE})
.PHONY: clean .PHONY: clean
clean: clean:

View File

@ -1,4 +1,4 @@
FROM tudorg/xgo-deb6-1.7.1 FROM tudorg/xgo-deb6-1.7.4
MAINTAINER Tudor Golubenco <tudor@elastic.co> MAINTAINER Tudor Golubenco <tudor@elastic.co>

View File

@ -1,5 +1,5 @@
#!/bin/sh #!/bin/sh
docker build --rm=true -t tudorg/xgo-deb6-base base/ && \ docker build --rm=true -t tudorg/xgo-deb6-base base/ && \
docker build --rm=true -t tudorg/xgo-deb6-1.7.1 go-1.7.1/ && docker build --rm=true -t tudorg/xgo-deb6-1.7.4 go-1.7.4/ &&
docker build --rm=true -t tudorg/beats-builder-deb6 beats-builder docker build --rm=true -t tudorg/beats-builder-deb6 beats-builder

View File

@ -0,0 +1,15 @@
# Go cross compiler (xgo): Go 1.7.4 layer
# Copyright (c) 2014 Péter Szilágyi. All rights reserved.
#
# Released under the MIT license.
FROM tudorg/xgo-deb6-base
MAINTAINER Tudor Golubenco <tudor@elastic.co>
# Configure the root Go distribution and bootstrap based on it
RUN \
export ROOT_DIST="https://storage.googleapis.com/golang/go1.7.4.linux-amd64.tar.gz" && \
export ROOT_DIST_SHA1="2e5baf03d1590e048c84d1d5b4b6f2540efaaea1" && \
\
$BOOTSTRAP_PURE

View File

@ -121,7 +121,6 @@ if [ "$TARGETS" == "" ]; then
fi fi
built_targets=0
for TARGET in $TARGETS; do for TARGET in $TARGETS; do
# Split the target into platform and architecture # Split the target into platform and architecture
XGOOS=`echo $TARGET | cut -d '/' -f 1` XGOOS=`echo $TARGET | cut -d '/' -f 1`
@ -134,23 +133,20 @@ for TARGET in $TARGETS; do
export PKG_CONFIG_PATH=/usr/aarch64-linux-gnu/lib/pkgconfig export PKG_CONFIG_PATH=/usr/aarch64-linux-gnu/lib/pkgconfig
GOOS=linux GOARCH=amd64 CGO_ENABLED=${CGO_ENABLED} go get -d ./$PACK GOOS=linux GOARCH=amd64 CGO_ENABLED=${CGO_ENABLED} go get -d ./$PACK
sh -c "GOOS=linux GOARCH=amd64 CGO_ENABLED=${CGO_ENABLED} go build $V $R $LDARGS -o $NAME-linux-amd64$R ./$PACK" sh -c "GOOS=linux GOARCH=amd64 CGO_ENABLED=${CGO_ENABLED} go build $V $R $LDARGS -o /build/$NAME-linux-amd64$R ./$PACK"
built_targets=$((built_targets+1))
fi fi
if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "386" ]); then if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "386" ]); then
echo "Compiling $PACK for linux/386..." echo "Compiling $PACK for linux/386..."
CFLAGS=-m32 CXXFLAGS=-m32 LDFLAGS=-m32 HOST=i686-linux PREFIX=/usr/local $BUILD_DEPS /deps $LIST_DEPS CFLAGS=-m32 CXXFLAGS=-m32 LDFLAGS=-m32 HOST=i686-linux PREFIX=/usr/local $BUILD_DEPS /deps $LIST_DEPS
GOOS=linux GOARCH=386 CGO_ENABLED=${CGO_ENABLED} go get -d ./$PACK GOOS=linux GOARCH=386 CGO_ENABLED=${CGO_ENABLED} go get -d ./$PACK
sh -c "GOOS=linux GOARCH=386 CGO_ENABLED=${CGO_ENABLED} go build $V $R $LDARGS -o $NAME-linux-386$R ./$PACK" sh -c "GOOS=linux GOARCH=386 CGO_ENABLED=${CGO_ENABLED} go build $V $R $LDARGS -o /build/$NAME-linux-386$R ./$PACK"
built_targets=$((built_targets+1))
fi fi
if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "arm" ]); then if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "arm" ]); then
echo "Compiling $PACK for linux/arm..." echo "Compiling $PACK for linux/arm..."
CC=arm-linux-gnueabi-gcc CXX=rm-linux-gnueabi-g++ HOST=arm-linux PREFIX=/usr/local/arm $BUILD_DEPS /deps $LIST_DEPS CC=arm-linux-gnueabi-gcc CXX=rm-linux-gnueabi-g++ HOST=arm-linux PREFIX=/usr/local/arm $BUILD_DEPS /deps $LIST_DEPS
CC=arm-linux-gnueabi-gcc CXX=rm-linux-gnueabi-g++ GOOS=linux GOARCH=arm CGO_ENABLED=${CGO_ENABLED} GOARM=5 go get -d ./$PACK CC=arm-linux-gnueabi-gcc CXX=rm-linux-gnueabi-g++ GOOS=linux GOARCH=arm CGO_ENABLED=${CGO_ENABLED} GOARM=5 go get -d ./$PACK
CC=arm-linux-gnueabi-gcc CXX=rm-linux-gnueabi-g++ GOOS=linux GOARCH=arm CGO_ENABLED=${CGO_ENABLED} GOARM=5 go build $V -o $NAME-linux-arm ./$PACK CC=arm-linux-gnueabi-gcc CXX=rm-linux-gnueabi-g++ GOOS=linux GOARCH=arm CGO_ENABLED=${CGO_ENABLED} GOARM=5 go build $V -o /build/$NAME-linux-arm ./$PACK
built_targets=$((built_targets+1))
fi fi
# Check and build for Windows targets # Check and build for Windows targets
@ -174,8 +170,7 @@ for TARGET in $TARGETS; do
export PKG_CONFIG_PATH=/usr/x86_64-w64-mingw32/lib/pkgconfig export PKG_CONFIG_PATH=/usr/x86_64-w64-mingw32/lib/pkgconfig
CC=x86_64-w64-mingw32-gcc CXX=x86_64-w64-mingw32-g++ GOOS=windows GOARCH=amd64 CGO_ENABLED=${CGO_ENABLED} CGO_CFLAGS="$CGO_NTDEF" CGO_CXXFLAGS="$CGO_NTDEF" go get -d ./$PACK CC=x86_64-w64-mingw32-gcc CXX=x86_64-w64-mingw32-g++ GOOS=windows GOARCH=amd64 CGO_ENABLED=${CGO_ENABLED} CGO_CFLAGS="$CGO_NTDEF" CGO_CXXFLAGS="$CGO_NTDEF" go get -d ./$PACK
CC=x86_64-w64-mingw32-gcc CXX=x86_64-w64-mingw32-g++ GOOS=windows GOARCH=amd64 CGO_ENABLED=${CGO_ENABLED} CGO_CFLAGS="$CGO_NTDEF" CGO_CXXFLAGS="$CGO_NTDEF" go build $V $R -o $NAME-windows-amd64$R.exe ./$PACK CC=x86_64-w64-mingw32-gcc CXX=x86_64-w64-mingw32-g++ GOOS=windows GOARCH=amd64 CGO_ENABLED=${CGO_ENABLED} CGO_CFLAGS="$CGO_NTDEF" CGO_CXXFLAGS="$CGO_NTDEF" go build $V $R -o /build/$NAME-windows-amd64$R.exe ./$PACK
built_targets=$((built_targets+1))
fi fi
if [ $XGOARCH == "." ] || [ $XGOARCH == "386" ]; then if [ $XGOARCH == "." ] || [ $XGOARCH == "386" ]; then
@ -184,8 +179,7 @@ for TARGET in $TARGETS; do
export PKG_CONFIG_PATH=/usr/i686-w64-mingw32/lib/pkgconfig export PKG_CONFIG_PATH=/usr/i686-w64-mingw32/lib/pkgconfig
CC=i686-w64-mingw32-gcc CXX=i686-w64-mingw32-g++ GOOS=windows GOARCH=386 CGO_ENABLED=${CGO_ENABLED} CGO_CFLAGS="$CGO_NTDEF" CGO_CXXFLAGS="$CGO_NTDEF" go get -d ./$PACK CC=i686-w64-mingw32-gcc CXX=i686-w64-mingw32-g++ GOOS=windows GOARCH=386 CGO_ENABLED=${CGO_ENABLED} CGO_CFLAGS="$CGO_NTDEF" CGO_CXXFLAGS="$CGO_NTDEF" go get -d ./$PACK
CC=i686-w64-mingw32-gcc CXX=i686-w64-mingw32-g++ GOOS=windows GOARCH=386 CGO_ENABLED=${CGO_ENABLED} CGO_CFLAGS="$CGO_NTDEF" CGO_CXXFLAGS="$CGO_NTDEF" go build $V -o $NAME-windows-386.exe ./$PACK CC=i686-w64-mingw32-gcc CXX=i686-w64-mingw32-g++ GOOS=windows GOARCH=386 CGO_ENABLED=${CGO_ENABLED} CGO_CFLAGS="$CGO_NTDEF" CGO_CXXFLAGS="$CGO_NTDEF" go build $V -o /build/$NAME-windows-386.exe ./$PACK
built_targets=$((built_targets+1))
fi fi
fi fi
@ -194,22 +188,14 @@ for TARGET in $TARGETS; do
echo "Compiling $PACK for darwin/amd64..." echo "Compiling $PACK for darwin/amd64..."
CC=o64-clang CXX=o64-clang++ HOST=x86_64-apple-darwin10 PREFIX=/usr/local $BUILD_DEPS /deps $LIST_DEPS CC=o64-clang CXX=o64-clang++ HOST=x86_64-apple-darwin10 PREFIX=/usr/local $BUILD_DEPS /deps $LIST_DEPS
CC=o64-clang CXX=o64-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=${CGO_ENABLED} go get -d ./$PACK CC=o64-clang CXX=o64-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=${CGO_ENABLED} go get -d ./$PACK
CC=o64-clang CXX=o64-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=${CGO_ENABLED} go build -ldflags=-s $V $R -o $NAME-darwin-amd64$R ./$PACK CC=o64-clang CXX=o64-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=${CGO_ENABLED} go build -ldflags=-s $V $R -o /build/$NAME-darwin-amd64$R ./$PACK
built_targets=$((built_targets+1))
fi fi
if ([ $XGOOS == "." ] || [ $XGOOS == "darwin" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "386" ]); then if ([ $XGOOS == "." ] || [ $XGOOS == "darwin" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "386" ]); then
echo "Compiling for darwin/386..." echo "Compiling for darwin/386..."
CC=o32-clang CXX=o32-clang++ HOST=i386-apple-darwin10 PREFIX=/usr/local $BUILD_DEPS /deps $LIST_DEPS CC=o32-clang CXX=o32-clang++ HOST=i386-apple-darwin10 PREFIX=/usr/local $BUILD_DEPS /deps $LIST_DEPS
CC=o32-clang CXX=o32-clang++ GOOS=darwin GOARCH=386 CGO_ENABLED=${CGO_ENABLED} go get -d ./$PACK CC=o32-clang CXX=o32-clang++ GOOS=darwin GOARCH=386 CGO_ENABLED=${CGO_ENABLED} go get -d ./$PACK
CC=o32-clang CXX=o32-clang++ GOOS=darwin GOARCH=386 CGO_ENABLED=${CGO_ENABLED} go build $V -o $NAME-darwin-386 ./$PACK CC=o32-clang CXX=o32-clang++ GOOS=darwin GOARCH=386 CGO_ENABLED=${CGO_ENABLED} go build $V -o /build/$NAME-darwin-386 ./$PACK
built_targets=$((built_targets+1))
fi fi
done done
# The binary files are the last created files
echo "Moving $built_targets $PACK binaries to host folder..."
ls -t | head -n $built_targets
cp `ls -t | head -n $built_targets ` /build
echo "Build process completed" echo "Build process completed"

View File

@ -1,4 +1,4 @@
FROM tudorg/xgo-1.7.1 FROM tudorg/xgo-1.7.4
MAINTAINER Tudor Golubenco <tudor@elastic.co> MAINTAINER Tudor Golubenco <tudor@elastic.co>

View File

@ -1,5 +1,5 @@
#!/bin/sh #!/bin/sh
docker build --rm=true -t tudorg/xgo-base base/ && \ docker build --rm=true -t tudorg/xgo-base base/ && \
docker build --rm=true -t tudorg/xgo-1.7.1 go-1.7.1/ && docker build --rm=true -t tudorg/xgo-1.7.4 go-1.7.4/ &&
docker build --rm=true -t tudorg/beats-builder beats-builder docker build --rm=true -t tudorg/beats-builder beats-builder

View File

@ -0,0 +1,15 @@
# Go cross compiler (xgo): Go 1.7.4 layer
# Copyright (c) 2014 Péter Szilágyi. All rights reserved.
#
# Released under the MIT license.
FROM tudorg/xgo-base
MAINTAINER Tudor Golubenco <tudor@elastic.co>
# Configure the root Go distribution and bootstrap based on it
RUN \
export ROOT_DIST="https://storage.googleapis.com/golang/go1.7.4.linux-amd64.tar.gz" && \
export ROOT_DIST_SHA1="2e5baf03d1590e048c84d1d5b4b6f2540efaaea1" && \
\
$BOOTSTRAP_PURE

View File

@ -11,7 +11,7 @@ if [ "$SNAPSHOT" = "yes" ]; then
fi fi
mkdir /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}} mkdir /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}
cp -R homedir/. /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/ cp -a homedir/. /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/
install -D -m 755 import_dashboards-linux-{{.arch}} /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/scripts/import_dashboards install -D -m 755 import_dashboards-linux-{{.arch}} /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/scripts/import_dashboards
cp {{.beat_name}}-linux-{{.arch}} /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/{{.beat_name}} cp {{.beat_name}}-linux-{{.arch}} /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/{{.beat_name}}
cp {{.beat_name}}-linux.yml /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/{{.beat_name}}.yml cp {{.beat_name}}-linux.yml /{{.beat_name}}-${VERSION}-linux-{{.bin_arch}}/{{.beat_name}}.yml

View File

@ -24,8 +24,8 @@ RPM_VERSION=`echo ${VERSION} | sed 's/-/_/g'`
fpm --force -s dir -t rpm \ fpm --force -s dir -t rpm \
-n {{.beat_name}} -v ${RPM_VERSION} \ -n {{.beat_name}} -v ${RPM_VERSION} \
--architecture {{.rpm_arch}} \ --architecture {{.rpm_arch}} \
--vendor "Elastic" \ --vendor "{{.beat_vendor}}" \
--license "ASL 2.0" \ --license "{{.beat_license}}" \
--description "{{.beat_description}}" \ --description "{{.beat_description}}" \
--url {{.beat_url}} \ --url {{.beat_url}} \
--rpm-init /tmp/{{.beat_name}}.init \ --rpm-init /tmp/{{.beat_name}}.init \

View File

@ -1,6 +1,6 @@
[Unit] [Unit]
Description={{.beat_name}} Description={{.beat_name}}
Documentation=https://www.elastic.co/guide/en/beats/{{.beat_name}}/current/index.html Documentation={{.beat_doc_url}}
Wants=network-online.target Wants=network-online.target
After=network-online.target After=network-online.target

View File

@ -11,7 +11,7 @@ if [ "$SNAPSHOT" = "yes" ]; then
fi fi
mkdir /{{.beat_name}}-${VERSION}-darwin-x86_64 mkdir /{{.beat_name}}-${VERSION}-darwin-x86_64
cp -R homedir/. /{{.beat_name}}-${VERSION}-darwin-x86_64/ cp -a homedir/. /{{.beat_name}}-${VERSION}-darwin-x86_64/
install -D -m 755 import_dashboards-darwin-{{.arch}} /{{.beat_name}}-${VERSION}-darwin-x86_64/scripts/import_dashboards install -D -m 755 import_dashboards-darwin-{{.arch}} /{{.beat_name}}-${VERSION}-darwin-x86_64/scripts/import_dashboards
cp {{.beat_name}}-darwin-amd64 /{{.beat_name}}-${VERSION}-darwin-x86_64/{{.beat_name}} cp {{.beat_name}}-darwin-amd64 /{{.beat_name}}-${VERSION}-darwin-x86_64/{{.beat_name}}
cp {{.beat_name}}-darwin.yml /{{.beat_name}}-${VERSION}-darwin-x86_64/{{.beat_name}}.yml cp {{.beat_name}}-darwin.yml /{{.beat_name}}-${VERSION}-darwin-x86_64/{{.beat_name}}.yml

View File

@ -20,8 +20,8 @@ fi
# create deb # create deb
fpm --force -s dir -t deb \ fpm --force -s dir -t deb \
-n {{.beat_name}} -v ${VERSION} \ -n {{.beat_name}} -v ${VERSION} \
--vendor "Elastic" \ --vendor "{{.beat_vendor}}" \
--license "ASL 2.0" \ --license "{{.beat_license}}" \
--architecture {{.deb_arch}} \ --architecture {{.deb_arch}} \
--description "{{.beat_description}}" \ --description "{{.beat_description}}" \
--url {{.beat_url}} \ --url {{.beat_url}} \

View File

@ -1,6 +1,6 @@
[Unit] [Unit]
Description={{.beat_name}} Description={{.beat_name}}
Documentation=https://www.elastic.co/guide/en/beats/{{.beat_name}}/current/index.html Documentation={{.beat_doc_url}}
Wants=network-online.target Wants=network-online.target
After=network-online.target After=network-online.target

View File

@ -11,7 +11,7 @@ if [ "$SNAPSHOT" = "yes" ]; then
fi fi
mkdir /{{.beat_name}}-${VERSION}-windows-{{.win_arch}} mkdir /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}
cp -R homedir/. /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/ cp -a homedir/. /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/
install -D -m 755 import_dashboards-windows-{{.arch}} /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/scripts/import_dashboards.exe install -D -m 755 import_dashboards-windows-{{.arch}} /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/scripts/import_dashboards.exe
cp {{.beat_name}}-windows-{{.arch}}.exe /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/{{.beat_name}}.exe cp {{.beat_name}}-windows-{{.arch}}.exe /{{.beat_name}}-${VERSION}-windows-{{.win_arch}}/{{.beat_name}}.exe
unix2dos {{.beat_name}}-win.yml unix2dos {{.beat_name}}-win.yml

View File

@ -1 +1 @@
version: "0.2.0" version: "5.2.2"

View File

@ -46,7 +46,7 @@ fi
# with git 1.7 # with git 1.7
git clone https://github.com/tsg/gotpl.git /go/src/github.com/tsg/gotpl git clone https://github.com/tsg/gotpl.git /go/src/github.com/tsg/gotpl
mkdir -p /go/src/gopkg.in/yaml.v2 mkdir -p /go/src/gopkg.in/yaml.v2
#cp -r /go/src/github.com/elastic/beats/vendor/gopkg.in/yaml.v2 /go/src/gopkg.in/
cp -r $LIBBEAT_PATH/../vendor/gopkg.in/yaml.v2 /go/src/gopkg.in/ cp -r $LIBBEAT_PATH/../vendor/gopkg.in/yaml.v2 /go/src/gopkg.in/
go install github.com/tsg/gotpl go install github.com/tsg/gotpl
@ -54,7 +54,7 @@ go install github.com/tsg/gotpl
cat ${LIBBEAT_PATH}/docs/version.asciidoc >> ${PREFIX}/package.yml cat ${LIBBEAT_PATH}/docs/version.asciidoc >> ${PREFIX}/package.yml
# Make variable naming of doc-branch compatible with gotpl. Generate and copy README.md into homedir # Make variable naming of doc-branch compatible with gotpl. Generate and copy README.md into homedir
sed -i -e 's/:doc-branch/doc_branch/g' ${PREFIX}/package.yml sed -i -e 's/:doc-branch/doc_branch/g' ${PREFIX}/package.yml
/go/bin/gotpl ${LIBBEAT_PATH}/../dev-tools/packer/readme.md.j2 < ${PREFIX}/package.yml > ${PREFIX}/homedir/README.md /go/bin/gotpl /templates/readme.md.j2 < ${PREFIX}/package.yml > ${PREFIX}/homedir/README.md
# Copy template # Copy template
cp $BEATNAME.template.json $PREFIX/$BEATNAME.template.json cp $BEATNAME.template.json $PREFIX/$BEATNAME.template.json

17
vendor/github.com/elastic/beats/dev-tools/run_with_go_ver generated vendored Executable file
View File

@ -0,0 +1,17 @@
#!/usr/bin/env bash
#
# This sets up Go based on the project's Go version. It will configure
# GOROOT and add GOROOT/bin to PATH before executing the given command.
#
# Example usage: ./run_with_go_ver go version
#
set -e
# Script directory:
SDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "${SDIR}/common.bash"
get_go_version
setup_go_root ${GO_VERSION}
bash -c "$*"

View File

@ -1,38 +1,70 @@
#!/usr/bin/env python #!/usr/bin/env python
import os
import argparse import argparse
import os
import re
import sys
from subprocess import check_call from subprocess import check_call
template_go = '''package beat vendored_libbeat = os.path.normpath("vendor/github.com/elastic/beats")
const defaultBeatVersion = "{}"
goversion_template = '''package main
const appVersion = "{version}"
''' '''
template_packer = '''version: "{version}" goversion_template_libbeat = '''package beat
const defaultBeatVersion = "{version}"
''' '''
yamlversion_template = '''version: "{version}"
'''
def get_rootfolder():
vendored_libbeat = os.path.normpath("vendor/github.com/elastic/beats")
script_directory = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
index = script_directory.find(vendored_libbeat)
if index > 0:
# Community beat detected, version files are stored at the root folder of the project
return os.path.abspath(script_directory[:index])
# Libbeat detected
return os.path.dirname(script_directory)
def create_from_template(filename, template, version):
with open(filename, "w") as f:
f.write(template.format(version=version))
print ("Set version {} in file {}".format(version, filename))
def main(): def main():
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Used to set the current version. Doesn't commit changes.") description="Used to set the current version. Doesn't commit changes.")
parser.add_argument("version", parser.add_argument("version",
help="The new version") help="The new version")
args = parser.parse_args() args = parser.parse_args()
dir = os.path.dirname(os.path.realpath(__file__))
with open(dir + "/../libbeat/beat/version.go", "w") as f:
f.write(template_go.format(args.version))
version = args.version version = args.version
with open(dir + "/packer/version.yml", "w") as f:
f.write(template_packer.format(
version=version,
))
# Updates all files with the new templates is_libbeat = vendored_libbeat not in os.path.realpath(__file__)
os.chdir(dir + "/../") if is_libbeat:
goversion_filepath = os.path.join(get_rootfolder(), "libbeat","beat", "version.go")
ymlversion_filepath = os.path.join(get_rootfolder(), "dev-tools", "packer", "version.yml")
go_template = goversion_template_libbeat
else:
goversion_filepath = os.path.join(get_rootfolder(), "version.go")
ymlversion_filepath = os.path.join(get_rootfolder(), "version.yml")
go_template = goversion_template
# Create version.go and version.yml files
create_from_template(goversion_filepath, go_template, version)
create_from_template(ymlversion_filepath, yamlversion_template, version)
# Updates all version files with the new templates
os.chdir(get_rootfolder())
print("Update build files") print("Update build files")
check_call("make update", shell=True) check_call("make update", shell=True)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@ -5,4 +5,5 @@
filebeat filebeat
build build
_meta/kibana
/tests/load/logs /tests/load/logs

14
vendor/github.com/elastic/beats/filebeat/Dockerfile generated vendored Normal file
View File

@ -0,0 +1,14 @@
FROM golang:1.7.4
MAINTAINER Nicolas Ruflin <ruflin@elastic.co>
RUN set -x && \
apt-get update && \
apt-get install -y --no-install-recommends \
netcat python-pip virtualenv && \
apt-get clean
# Setup work environment
ENV FILEBEAT_PATH /go/src/github.com/elastic/beats/filebeat
RUN mkdir -p $FILEBEAT_PATH/build/coverage
WORKDIR $FILEBEAT_PATH

View File

@ -3,7 +3,7 @@
BEATNAME?=filebeat BEATNAME?=filebeat
BEAT_DESCRIPTION?=Filebeat sends log files to Logstash or directly to Elasticsearch. BEAT_DESCRIPTION?=Filebeat sends log files to Logstash or directly to Elasticsearch.
SYSTEM_TESTS=true SYSTEM_TESTS=true
TEST_ENVIRONMENT=false TEST_ENVIRONMENT?=true
GOX_FLAGS='-arch=amd64 386 arm ppc64 ppc64le' GOX_FLAGS='-arch=amd64 386 arm ppc64 ppc64le'
include ../libbeat/scripts/Makefile include ../libbeat/scripts/Makefile
@ -12,6 +12,32 @@ include ../libbeat/scripts/Makefile
.PHONY: before-build .PHONY: before-build
before-build: before-build:
# Collects all dependencies and then calls update # Collects all module dashboards
.PHONY: kibana
kibana:
# To not remove index-pattern as generated by update
-rm -r _meta/kibana/dashboard _meta/kibana/search _meta/kibana/visualization
mkdir -p _meta/kibana
-cp -r module/*/_meta/kibana _meta/
# Collects all module and dataset fields
.PHONY: fields
fields:
mkdir -p _meta/
cat ${ES_BEATS}/filebeat/_meta/fields.common.yml > _meta/fields.generated.yml
. ${PYTHON_ENV}/bin/activate; python ${ES_BEATS}/metricbeat/scripts/fields_collector.py >> _meta/fields.generated.yml
# Runs all collection steps and updates afterwards
.PHONY: collect .PHONY: collect
collect: collect:
# dummy implementation to avoid adding the modules until released
mkdir -p _meta/
cat ${ES_BEATS}/filebeat/_meta/fields.common.yml > _meta/fields.generated.yml
# Creates a new fileset. Requires the params MODULE and FILESET
.PHONY: create-fileset
create-fileset: python-env
. ${PYTHON_ENV}/bin/activate; python ${ES_BEATS}/filebeat/scripts/create_fileset.py --path=$(PWD) --es_beats=$(ES_BEATS) --module=$(MODULE) --fileset=$(FILESET)

View File

@ -205,6 +205,9 @@ filebeat.prospectors:
# Note: Potential data loss. Make sure to read and understand the docs for this option. # Note: Potential data loss. Make sure to read and understand the docs for this option.
#close_timeout: 0 #close_timeout: 0
# Defines if prospectors is enabled
#enabled: true
#----------------------------- Stdin prospector ------------------------------- #----------------------------- Stdin prospector -------------------------------
# Configuration to use stdin input # Configuration to use stdin input
#- input_type: stdin #- input_type: stdin

View File

@ -0,0 +1,34 @@
- key: log
title: Log File Content
description: >
Contains log file lines.
fields:
- name: source
type: keyword
required: true
description: >
The file from which the line was read. This field contains the full path to the file.
For example: `/var/log/system.log`.
- name: offset
type: long
required: false
description: >
The file offset the reported line starts at.
- name: message
type: text
ignore_above: 0
required: true
description: >
The content of the line read from the log file.
- name: type
required: true
description: >
The name of the log event. This field is set to the value specified for the `document_type` option in the prospector section of the Filebeat config file.
- name: input_type
required: true
description: >
The input type from which the event was generated. This field is set to the value specified for the `input_type` option in the prospector section of the Filebeat config file.

View File

@ -40,7 +40,9 @@ func (c *Crawler) Start(states file.States, once bool) error {
if err != nil { if err != nil {
return fmt.Errorf("Error in initing prospector: %s", err) return fmt.Errorf("Error in initing prospector: %s", err)
} }
c.prospectors = append(c.prospectors, prospector) if prospector.IsEnabled() {
c.prospectors = append(c.prospectors, prospector)
}
} }
logp.Info("Loading Prospectors completed. Number of prospectors: %v", len(c.prospectors)) logp.Info("Loading Prospectors completed. Number of prospectors: %v", len(c.prospectors))

View File

@ -0,0 +1,18 @@
version: '2'
services:
beat:
build: ${PWD}/.
depends_on:
- elasticsearch
env_file:
- ${PWD}/build/test.env
working_dir: /go/src/github.com/elastic/beats/filebeat
volumes:
- ${PWD}/..:/go/src/github.com/elastic/beats/
command: make
entrypoint: /go/src/github.com/elastic/beats/filebeat/docker-entrypoint.sh
elasticsearch:
extends:
file: ../testing/environments/${TESTING_ENVIRONMENT}.yml
service: elasticsearch

View File

@ -0,0 +1,69 @@
#!/bin/bash
set -e
# This script is the entrypoint to the filebeat Docker container. This will
# verify that all services are running before executing the command provided
# to the docker container.
setDefaults() {
# Use default ports and hosts if not specified.
: ${ES_HOST:=localhost}
: ${ES_PORT:=9200}
}
es_url() {
local auth
auth=""
if [ -n "$ES_USER" ]; then
auth="$ES_USER"
if [ -n "$ES_PASS" ]; then
auth="$auth:$ES_PASS"
fi
auth="$auth@"
fi
if [ "$SHIELD" == "true" ]; then
code=$(curl --write-out "%{http_code}\n" --silent --output /dev/null "http://${ES_HOST}:${ES_PORT}/")
if [ $code != 401 ]; then
echo "Shield does not seem to be running"
exit 1
fi
fi
echo "http://${auth}${ES_HOST}:${ES_PORT}"
}
# Wait for elasticsearch to start. It requires that the status be either
# green or yellow.
waitForElasticsearch() {
echo -n "Waiting on elasticsearch($(es_url)) to start."
for ((i=1;i<=60;i++))
do
health=$(curl --silent "$(es_url)/_cat/health" | awk '{print $4}')
if [[ "$health" == "green" ]] || [[ "$health" == "yellow" ]]
then
echo
echo "Elasticsearch is ready!"
return 0
fi
echo -n '.'
sleep 1
done
echo
echo >&2 'Elasticsearch is not running or is not healthy.'
echo >&2 "Address: $(es_url)"
echo >&2 "$health"
exit 1
}
# Main
setDefaults
# Services need to test outputs
# Wait until all services are started
waitForElasticsearch
exec "$@"

View File

@ -30,7 +30,7 @@ mac>> for OS X, and <<win, win>> for Windows).
[NOTE] [NOTE]
================================================== ==================================================
If you use Apt or Yum, you can {libbeat}/setup-repositories.html[install Filebeat from our repositories] to update to the newest version more easily. If you use Apt or Yum, you can <<setup-repositories,install Filebeat from our repositories>> to update to the newest version more easily.
See our https://www.elastic.co/downloads/beats/filebeat[download page] for other installation options, such as 32-bit images. See our https://www.elastic.co/downloads/beats/filebeat[download page] for other installation options, such as 32-bit images.
@ -94,7 +94,8 @@ see <<filebeat-configuration-details>>.
To configure Filebeat, you edit the configuration file. For rpm and deb, you'll To configure Filebeat, you edit the configuration file. For rpm and deb, you'll
find the configuration file at `/etc/filebeat/filebeat.yml`. For mac and win, look in find the configuration file at `/etc/filebeat/filebeat.yml`. For mac and win, look in
the archive that you just extracted. the archive that you just extracted. Theres also a full example configuration file
called `filebeat.full.yml` that shows all non-deprecated options.
Here is a sample of the `filebeat` section of the `filebeat.yml` file. Filebeat uses predefined Here is a sample of the `filebeat` section of the `filebeat.yml` file. Filebeat uses predefined
default values for most configuration options. default values for most configuration options.
@ -166,6 +167,10 @@ include::../../libbeat/docs/shared-template-load.asciidoc[]
Start Filebeat by issuing the appropriate command for your platform. Start Filebeat by issuing the appropriate command for your platform.
NOTE: If you use an init.d script to start Filebeat on deb or rpm, you can't
specify command line flags (see <<filebeat-command-line>>). To specify flags,
start Filebeat in the foreground.
*deb:* *deb:*
[source,shell] [source,shell]
@ -203,7 +208,12 @@ Filebeat is now ready to send log files to your defined output.
We don't offer prebuilt dashboards for visualizing Filebeat data. However, to make it easier for you to explore Filebeat We don't offer prebuilt dashboards for visualizing Filebeat data. However, to make it easier for you to explore Filebeat
data in Kibana, we've created a Filebeat index pattern: `filebeat-*`. To load this pattern, you can use the script data in Kibana, we've created a Filebeat index pattern: `filebeat-*`. To load this pattern, you can use the script
that's provided for importing dashboards. that's provided for importing dashboards:
[source,shell]
----------------------------------------------------------------------
./scripts/import_dashboards -only-index
----------------------------------------------------------------------
For more information about running the script, see For more information about running the script, see
{libbeat}/import-dashboards.html[Importing Existing Beat Dashboards] in the _Beats Platform Reference_. {libbeat}/import-dashboards.html[Importing Existing Beat Dashboards] in the _Beats Platform Reference_.

View File

@ -1,6 +1,6 @@
= Filebeat Reference = Filebeat Reference
include::./version.asciidoc[] include::../../libbeat/docs/version.asciidoc[]
:libbeat: http://www.elastic.co/guide/en/beats/libbeat/{doc-branch} :libbeat: http://www.elastic.co/guide/en/beats/libbeat/{doc-branch}
:packetbeat: http://www.elastic.co/guide/en/beats/packetbeat/{doc-branch} :packetbeat: http://www.elastic.co/guide/en/beats/packetbeat/{doc-branch}
@ -8,7 +8,7 @@ include::./version.asciidoc[]
:filebeat: http://www.elastic.co/guide/en/beats/filebeat/{doc-branch} :filebeat: http://www.elastic.co/guide/en/beats/filebeat/{doc-branch}
:winlogbeat: http://www.elastic.co/guide/en/beats/winlogbeat/{doc-branch} :winlogbeat: http://www.elastic.co/guide/en/beats/winlogbeat/{doc-branch}
:elasticsearch: https://www.elastic.co/guide/en/elasticsearch/reference/{doc-branch} :elasticsearch: https://www.elastic.co/guide/en/elasticsearch/reference/{doc-branch}
:securitydoc: https://www.elastic.co/guide/en/x-pack/5.0 :securitydoc: https://www.elastic.co/guide/en/x-pack/5.2
:version: {stack-version} :version: {stack-version}
:beatname_lc: filebeat :beatname_lc: filebeat
:beatname_uc: Filebeat :beatname_uc: Filebeat
@ -22,6 +22,8 @@ include::./command-line.asciidoc[]
include::../../libbeat/docs/shared-directory-layout.asciidoc[] include::../../libbeat/docs/shared-directory-layout.asciidoc[]
include::../../libbeat/docs/repositories.asciidoc[]
include::./upgrading.asciidoc[] include::./upgrading.asciidoc[]
include::./how-filebeat-works.asciidoc[] include::./how-filebeat-works.asciidoc[]

View File

@ -240,7 +240,9 @@ When this option is enabled, Filebeat closes a file as soon as the end of a file
WARNING: Only use this option if you understand that data loss is a potential side effect. Another side effect is that multiline events might not be completely sent before the timeout expires. WARNING: Only use this option if you understand that data loss is a potential side effect. Another side effect is that multiline events might not be completely sent before the timeout expires.
When this option is enabled, Filebeat gives every harvester a predefined lifetime. Regardless of where the reader is in the file, reading will stop after the `close_timeout` period has elapsed. This option can be useful for older log files when you want to spend only a predefined amount of time on the files. If you set `close_timeout` to equal `ignore_older`, the file will not be picked up if it's modified while the harvester is closed. This combination of settings normally leads to data loss, and the complete file is not sent. When this option is enabled, Filebeat gives every harvester a predefined lifetime. Regardless of where the reader is in the file, reading will stop after the `close_timeout` period has elapsed. This option can be useful for older log files when you want to spend only a predefined amount of time on the files. While `close_timeout` will close the file after the predefined timeout, if the file is still being updated, the prospector will start a new harvester again per the defined `scan_frequency`. And the close_timeout for this harvester will start again with the countdown for the timeout.
If you set `close_timeout` to equal `ignore_older`, the file will not be picked up if it's modified while the harvester is closed. This combination of settings normally leads to data loss, and the complete file is not sent.
When you use `close_timeout` for logs that contain multiline events, the harvester might stop in the middle of a multiline event, which means that only parts of the event will be sent. If the harvester is started again and the file still exists, only the second part of the event will be sent. When you use `close_timeout` for logs that contain multiline events, the harvester might stop in the middle of a multiline event, which means that only parts of the event will be sent. If the harvester is started again and the file still exists, only the second part of the event will be sent.
@ -320,16 +322,11 @@ Example configuration:
[source,yaml] [source,yaml]
------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------
json.message_key: log
json.keys_under_root: true json.keys_under_root: true
json.add_error_key: true json.add_error_key: true
json.message_key: log
------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------
*`message_key`*:: JSON key on which to apply the line filtering and multiline settings. This key must be top level
and its value must be string, otherwise it is ignored. If no text key is defined, the line
filtering and multiline features cannot be used.
*`keys_under_root`*:: By default, the decoded JSON is placed under a "json" key in the output document. *`keys_under_root`*:: By default, the decoded JSON is placed under a "json" key in the output document.
If you enable this setting, the keys are copied top level in the output document. The default is false. If you enable this setting, the keys are copied top level in the output document. The default is false.
@ -337,7 +334,14 @@ If you enable this setting, the keys are copied top level in the output document
JSON object overwrite the fields that Filebeat normally adds (type, source, offset, etc.) in case of conflicts. JSON object overwrite the fields that Filebeat normally adds (type, source, offset, etc.) in case of conflicts.
*`add_error_key`*:: If this setting is enabled, Filebeat adds a "json_error" key in case of JSON *`add_error_key`*:: If this setting is enabled, Filebeat adds a "json_error" key in case of JSON
unmarshaling errors or when a text key is defined in the configuration but cannot be used. unmarshalling errors or when a `message_key` is defined in the configuration but cannot be used.
*`message_key`*:: An optional configuration setting that specifies a JSON key on
which to apply the line filtering and multiline settings. If specified the
key must be at the top level in the JSON object and the value associated with
the key must be a string, otherwise no filtering or multiline aggregation will
occur.
[[multiline]] [[multiline]]
===== multiline ===== multiline
@ -460,6 +464,10 @@ Currently if a new harvester can be started again, the harvester is picked rando
This configuration option applies per prospector. You can use this option to indirectly set higher priorities on certain prospectors This configuration option applies per prospector. You can use this option to indirectly set higher priorities on certain prospectors
by assigning a higher limit of harvesters. by assigning a higher limit of harvesters.
===== enabled
The `enabled` option can be used with each prospector to define if a prospector is enabled or not. By default, enabled is set to true.
[[configuration-global-options]] [[configuration-global-options]]
=== Filebeat Global Configuration === Filebeat Global Configuration
@ -503,7 +511,7 @@ filebeat.idle_timeout: 5s
===== registry_file ===== registry_file
The name of the registry file. If a relative path is used, it is considered relative to the The name of the registry file. If a relative path is used, it is considered relative to the
data path. See the <<directory-layout>> section for details. The default is `registry`. data path. See the <<directory-layout>> section for details. The default is `${path.data}/registry`.
[source,yaml] [source,yaml]
------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------
@ -512,6 +520,9 @@ filebeat.registry_file: registry
It is not possible to use a symlink as registry file. It is not possible to use a symlink as registry file.
NOTE: The registry file is only updated when new events are flushed and not on a predefined period.
That means in case there are some states where the TTL expired, these are only removed when new event are processed.
===== config_dir ===== config_dir

View File

@ -9,12 +9,18 @@ The following topics describe how to secure communication between Filebeat and o
* <<securing-communication-elasticsearch>> * <<securing-communication-elasticsearch>>
* <<configuring-ssl-logstash>> * <<configuring-ssl-logstash>>
//sets block macro for https.asciidoc included in next section
-- --
[[securing-communication-elasticsearch]] [[securing-communication-elasticsearch]]
== Securing Communication With Elasticsearch == Securing Communication With Elasticsearch
include::../../libbeat/docs/https.asciidoc[] include::../../libbeat/docs/https.asciidoc[]
//sets block macro for shared-ssl-logstash-config.asciidoc included in next section
[[configuring-ssl-logstash]] [[configuring-ssl-logstash]]
== Securing Communication With Logstash by Using SSL == Securing Communication With Logstash by Using SSL
include::../../libbeat/docs/shared-ssl-logstash-config.asciidoc[] include::../../libbeat/docs/shared-ssl-logstash-config.asciidoc[]

View File

@ -10,6 +10,8 @@ following tips:
* <<enable-filebeat-debugging>> * <<enable-filebeat-debugging>>
* <<faq>> * <<faq>>
//sets block macro for getting-help.asciidoc included in next section
-- --
[[getting-help]] [[getting-help]]
@ -17,6 +19,8 @@ following tips:
include::../../libbeat/docs/getting-help.asciidoc[] include::../../libbeat/docs/getting-help.asciidoc[]
//sets block macro for debugging.asciidoc included in next section
[[enable-filebeat-debugging]] [[enable-filebeat-debugging]]
== Debugging == Debugging

View File

@ -205,6 +205,9 @@ filebeat.prospectors:
# Note: Potential data loss. Make sure to read and understand the docs for this option. # Note: Potential data loss. Make sure to read and understand the docs for this option.
#close_timeout: 0 #close_timeout: 0
# Defines if prospectors is enabled
#enabled: true
#----------------------------- Stdin prospector ------------------------------- #----------------------------- Stdin prospector -------------------------------
# Configuration to use stdin input # Configuration to use stdin input
#- input_type: stdin #- input_type: stdin

197
vendor/github.com/elastic/beats/filebeat/filebeat.py generated vendored Executable file
View File

@ -0,0 +1,197 @@
#!/usr/bin/env python
import argparse
import sys
import os
import yaml
import requests
import tempfile
import subprocess
import socket
from jinja2 import Template
def main():
parser = argparse.ArgumentParser(
description="PROTOTYPE: start filebeat with a module configuration")
parser.add_argument("--modules", default="",
help="From branch")
parser.add_argument("--es", default="http://localhost:9200",
help="Elasticsearch URL")
parser.add_argument("--index", default=None,
help="Elasticsearch index")
parser.add_argument("--registry", default=None,
help="Registry file to use")
parser.add_argument("-M", nargs="*", type=str, default=None,
help="Variables overrides. e.g. path=/test")
parser.add_argument("--once", action="store_true",
help="Run filebeat with the -once flag")
args = parser.parse_args()
print args
# changing directory because we use paths relative to the binary
os.chdir(os.path.dirname(sys.argv[0]))
modules = args.modules.split(",")
if len(modules) == 0:
print("You need to specify at least a module")
sys.exit(1)
load_dashboards(args)
load_datasets(args, modules)
def load_dashboards(args):
cmd = ["../libbeat/dashboards/import_dashboards",
"-dir", "_meta/kibana",
"-es", args.es]
subprocess.Popen(cmd).wait()
def load_datasets(args, modules):
prospectors = ""
for module in modules:
path = os.path.join("module", module)
if not os.path.isdir(path):
print("Module {} not found".format(module))
sys.exit(1)
print("Found module {} in {}".format(module, path))
filesets = [name for name in os.listdir(path) if
os.path.isfile(os.path.join(path, name, "manifest.yml"))]
print("Found filesets: {}".format(filesets))
for fileset in filesets:
prospectors += load_fileset(args, module, fileset,
os.path.join(path, fileset))
print("Generated configuration: {}".format(prospectors))
run_filebeat(args, prospectors)
def load_fileset(args, module, fileset, path):
manifest = yaml.load(file(os.path.join(path, "manifest.yml"), "r"))
var = evaluate_vars(args, manifest["vars"], module, fileset)
var["beat"] = dict(module=module, fileset=fileset, path=path, args=args)
print("Evaluated variables: {}".format(var))
load_pipeline(var, manifest["ingest_pipeline"])
generate_prospectors(var, manifest["prospectors"])
return var["beat"]["prospectors"]
def evaluate_vars(args, var_in, module, fileset):
var = {
"builtin": get_builtin_vars()
}
for name, vals in var_in.items():
var[name] = vals["default"]
if sys.platform == "darwin" and "os.darwin" in vals:
var[name] = vals["os.darwin"]
elif sys.platform == "windows" and "os.windows" in vals:
var[name] = vals["os.windows"]
if isinstance(var[name], basestring):
var[name] = Template(var[name]).render(var)
elif isinstance(var[name], list):
# only supports array of strings atm
var[name] = [Template(x).render(var) for x in var[name]]
# overrides
if args.M is not None:
for pair in args.M:
key, val = pair.partition("=")[::2]
if key.startswith("{}.{}.".format(module, fileset)):
key = key[len("{}.{}.".format(module, fileset)):]
# this is a hack in the prototype only, because
# here we don't know the type of each variable type.
if key == "paths":
val = val.split(",")
var[key] = val
return var
def get_builtin_vars():
host = socket.gethostname()
hostname, _, domain = host.partition(".")
# separate the domain
return {
"hostname": hostname,
"domain": domain
}
def load_pipeline(var, pipeline):
path = os.path.join(var["beat"]["path"], Template(pipeline).render(var))
print("Loading ingest pipeline: {}".format(path))
var["beat"]["pipeline_id"] = var["beat"]["module"] + '-' + var["beat"]["fileset"] + \
'-' + os.path.splitext(os.path.basename(path))[0]
print("Pipeline id: {}".format(var["beat"]["pipeline_id"]))
with open(path, "r") as f:
contents = f.read()
r = requests.put("{}/_ingest/pipeline/{}"
.format(var["beat"]["args"].es,
var["beat"]["pipeline_id"]),
data=contents)
if r.status_code >= 300:
print("Error posting pipeline: {}".format(r.text))
sys.exit(1)
def run_filebeat(args, prospectors):
cfg_template = """
filebeat.prospectors:
{{prospectors}}
output.elasticsearch.hosts: ["{{es}}"]
output.elasticsearch.pipeline: "%{[fields.pipeline_id]}"
"""
if args.index:
cfg_template += "\noutput.elasticsearch.index: {}".format(args.index)
if args.once:
cfg_template += "\nfilebeat.idle_timeout: 0.5s"
if args.registry:
cfg_template += "\nfilebeat.registry_file: {}".format(args.registry)
fd, fname = tempfile.mkstemp(suffix=".yml", prefix="filebeat-",
text=True)
with open(fname, "w") as cfgfile:
cfgfile.write(Template(cfg_template).render(
dict(prospectors=prospectors, es=args.es)))
print("Wrote configuration file: {}".format(cfgfile.name))
os.close(fd)
cmd = ["./filebeat.test", "-systemTest",
"-e", "-c", cfgfile.name, "-d", "*"]
if args.once:
cmd.append("-once")
print("Starting filebeat: " + " ".join(cmd))
subprocess.Popen(cmd).wait()
def generate_prospectors(var, prospectors):
var["beat"]["prospectors"] = ""
for pr in prospectors:
path = os.path.join(var["beat"]["path"], Template(pr).render(var))
with open(path, "r") as f:
contents = Template(f.read()).render(var)
if var["beat"]["args"].once:
contents += "\n close_eof: true"
contents += "\n scan_frequency: 0.2s"
if "multiline" in contents:
contents += "\n multiline.timeout: 0.2s"
var["beat"]["prospectors"] += "\n" + contents
if __name__ == "__main__":
sys.exit(main())

View File

@ -7,7 +7,7 @@
} }
}, },
"_meta": { "_meta": {
"version": "5.1.2" "version": "5.2.2"
}, },
"dynamic_templates": [ "dynamic_templates": [
{ {

View File

@ -5,7 +5,7 @@
"norms": false "norms": false
}, },
"_meta": { "_meta": {
"version": "5.1.2" "version": "5.2.2"
}, },
"dynamic_templates": [ "dynamic_templates": [
{ {
@ -98,6 +98,7 @@
}, },
"order": 0, "order": 0,
"settings": { "settings": {
"index.mapping.total_fields.limit": 10000,
"index.refresh_interval": "5s" "index.refresh_interval": "5s"
}, },
"template": "filebeat-*" "template": "filebeat-*"

View File

@ -1,13 +1,12 @@
package input package input
import ( import (
"fmt"
"time" "time"
"github.com/elastic/beats/filebeat/harvester/reader" "github.com/elastic/beats/filebeat/harvester/reader"
"github.com/elastic/beats/filebeat/input/file" "github.com/elastic/beats/filebeat/input/file"
"github.com/elastic/beats/libbeat/common" "github.com/elastic/beats/libbeat/common"
"github.com/elastic/beats/libbeat/logp" "github.com/elastic/beats/libbeat/common/jsontransform"
) )
// Event is sent to the output and must contain all relevant information // Event is sent to the output and must contain all relevant information
@ -80,43 +79,6 @@ func mergeJSONFields(e *Event, event common.MapStr, jsonFields common.MapStr) {
// Delete existing json key // Delete existing json key
delete(event, "json") delete(event, "json")
for k, v := range jsonFields { jsontransform.WriteJSONKeys(event, jsonFields, e.JSONConfig.OverwriteKeys, reader.JsonErrorKey)
if e.JSONConfig.OverwriteKeys {
if k == "@timestamp" {
vstr, ok := v.(string)
if !ok {
logp.Err("JSON: Won't overwrite @timestamp because value is not string")
event[reader.JsonErrorKey] = "@timestamp not overwritten (not string)"
continue
}
// @timestamp must be of format RFC3339
ts, err := time.Parse(time.RFC3339, vstr)
if err != nil {
logp.Err("JSON: Won't overwrite @timestamp because of parsing error: %v", err)
event[reader.JsonErrorKey] = fmt.Sprintf("@timestamp not overwritten (parse error on %s)", vstr)
continue
}
event[k] = common.Time(ts)
} else if k == "type" {
vstr, ok := v.(string)
if !ok {
logp.Err("JSON: Won't overwrite type because value is not string")
event[reader.JsonErrorKey] = "type not overwritten (not string)"
continue
}
if len(vstr) == 0 || vstr[0] == '_' {
logp.Err("JSON: Won't overwrite type because value is empty or starts with an underscore")
event[reader.JsonErrorKey] = fmt.Sprintf("type not overwritten (invalid value [%s])", vstr)
continue
}
event[k] = vstr
} else {
event[k] = v
}
} else if _, exists := event[k]; !exists {
event[k] = v
}
}
} }
} }

View File

@ -0,0 +1,10 @@
- key: mysql
title: "MySQL"
description: >
Module for parsing the MySQL log files.
fields:
- name: mysql
type: group
description: >
Fields from the MySQL log files.
fields:

View File

@ -0,0 +1,13 @@
{
"hits": 0,
"timeRestore": false,
"description": "",
"title": "Filebeat MySQL Dashboard",
"uiStateJSON": "{\"P-1\":{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}}",
"panelsJSON": "[{\"col\":1,\"id\":\"MySQL-slowest-queries\",\"panelIndex\":1,\"row\":8,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"id\":\"MySQL-Slow-queries-over-time\",\"panelIndex\":2,\"row\":1,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"MySQL-error-logs\",\"panelIndex\":3,\"row\":1,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"columns\":[\"mysql.error.level\",\"mysql.error.message\"],\"id\":\"Filebeat-MySQL-error-log\",\"panelIndex\":4,\"row\":8,\"size_x\":6,\"size_y\":5,\"sort\":[\"@timestamp\",\"desc\"],\"type\":\"search\"},{\"col\":7,\"id\":\"MySQL-Error-logs-levels\",\"panelIndex\":5,\"row\":4,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"id\":\"MySQL-Slow-logs-by-count\",\"panelIndex\":6,\"row\":4,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"}]",
"optionsJSON": "{\"darkTheme\":false}",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}}}]}"
}
}

View File

@ -0,0 +1,16 @@
{
"sort": [
"@timestamp",
"desc"
],
"hits": 0,
"description": "",
"title": "Filebeat MySQL Slow log",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"_exists_:mysql.slowlog\"}},\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647}}"
},
"columns": [
"_source"
]
}

View File

@ -0,0 +1,17 @@
{
"sort": [
"@timestamp",
"desc"
],
"hits": 0,
"description": "",
"title": "Filebeat MySQL error log",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"_exists_:mysql.error\",\"analyze_wildcard\":true}},\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647}}"
},
"columns": [
"mysql.error.level",
"mysql.error.message"
]
}

View File

@ -0,0 +1,11 @@
{
"visState": "{\"title\":\"MySQL Error logs levels\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"isDonut\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"mysql.error.level\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
"description": "",
"title": "MySQL Error logs levels",
"uiStateJSON": "{\"vis\":{\"colors\":{\"Note\":\"#9AC48A\",\"Warning\":\"#F9934E\",\"ERROR\":\"#E24D42\"}}}",
"version": 1,
"savedSearchId": "Filebeat-MySQL-error-log",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[]}"
}
}

View File

@ -0,0 +1,11 @@
{
"visState": "{\"title\":\"MySQL Slow logs by count\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"isDonut\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"mysql.slowlog.query\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
"description": "",
"title": "MySQL Slow logs by count",
"uiStateJSON": "{}",
"version": 1,
"savedSearchId": "Filebeat-MySQL-Slow-log",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[]}"
}
}

View File

@ -0,0 +1,11 @@
{
"visState": "{\"title\":\"MySQL Slow queries over time\",\"type\":\"histogram\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"scale\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Slow queries\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}}],\"listeners\":{}}",
"description": "",
"title": "MySQL Slow queries over time",
"uiStateJSON": "{\"vis\":{\"colors\":{\"Slow queries\":\"#EF843C\"}}}",
"version": 1,
"savedSearchId": "Filebeat-MySQL-Slow-log",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[]}"
}
}

View File

@ -0,0 +1,11 @@
{
"visState": "{\"title\":\"MySQL error logs\",\"type\":\"histogram\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"scale\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"Error logs\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}}],\"listeners\":{}}",
"description": "",
"title": "MySQL error logs",
"uiStateJSON": "{\"vis\":{\"colors\":{\"Count\":\"#447EBC\",\"Error logs\":\"#1F78C1\"}}}",
"version": 1,
"savedSearchId": "Filebeat-MySQL-error-log",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[]}"
}
}

View File

@ -0,0 +1,11 @@
{
"visState": "{\"title\":\"MySQL slowest queries\",\"type\":\"table\",\"params\":{\"perPage\":10,\"showPartialRows\":false,\"showMeticsAtAllLevels\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"showTotal\":false,\"totalFunc\":\"sum\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"mysql.slowlog.query_time.sec\",\"customLabel\":\"Query time\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"mysql.slowlog.query\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Query\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"bucket\",\"params\":{\"field\":\"mysql.slowlog.user\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"User\"}}],\"listeners\":{}}",
"description": "",
"title": "MySQL slowest queries",
"uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
"version": 1,
"savedSearchId": "Filebeat-MySQL-Slow-log",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[]}"
}
}

View File

@ -0,0 +1,21 @@
- name: error
type: group
description: >
Contains fields from the MySQL error logs.
fields:
- name: timestamp
description: >
The timestamp from the log line.
- name: thread_id
type: long
description: >
As of MySQL 5.7.2, this is the thread id. For MySQL versions prior to 5.7.2, this
field contains the process id.
- name: level
example: "Warning"
description:
The log level.
- name: message
type: text
description: >
The logged message.

View File

@ -0,0 +1,9 @@
- input_type: log
paths:
{%- for path in paths %}
- {{path}}
{%- endfor %}
exclude_files: [".gz$"]
fields:
source_type: mysql-error
pipeline_id: {{beat.pipeline_id}}

View File

@ -0,0 +1,50 @@
{
"description": "Pipeline for parsing MySQL error logs. It currently uses message1 and message2 to workaround bug https://github.com/elastic/elasticsearch/issues/22117",
"processors": [{
"grok": {
"field": "message",
"patterns": [
"%{LOCALDATETIME:mysql.error.timestamp} (\\[%{DATA:mysql.error.level}\\] )?%{GREEDYDATA:mysql.error.message}",
"%{DATA:mysql.error.timestamp} %{NUMBER:mysql.error.thread_id} \\[%{DATA:mysql.error.level}\\] %{GREEDYDATA:mysql.error.message1}",
"%{GREEDYDATA:mysql.error.message2}"
],
"ignore_missing": true,
"pattern_definitions": {
"LOCALDATETIME": "[0-9]+ %{TIME}",
"GREEDYDATA1": ".*"
}
}
}, {
"remove": {
"field": "message"
}
}, {
"rename": {
"field": "mysql.error.message1",
"target_field": "mysql.error.message",
"ignore_failure": true
}
}, {
"rename": {
"field": "mysql.error.message2",
"target_field": "mysql.error.message",
"ignore_failure": true
}
}, {
"date": {
"field": "mysql.error.timestamp",
"target_field": "@timestamp",
"formats": [
"ISO8601",
"YYMMdd H:m:s"
],
"ignore_failure": true
}
}],
"on_failure" : [{
"set" : {
"field" : "error",
"value" : "{{ _ingest.on_failure_message }}"
}
}]
}

View File

@ -0,0 +1,15 @@
module_version: 1.0
vars:
paths:
default:
- /var/log/mysql/error.log*
- /var/log/mysqld.log*
os.darwin:
- /usr/local/var/mysql/{{builtin.hostname}}.{{builtin.domain}}.err*
os.windows:
- "c:/programdata/MySQL/MySQL Server*/error.log*"
ingest_pipeline: ingest/pipeline.json
prospectors:
- config/error.yml

View File

@ -0,0 +1,187 @@
161209 13:08:33 mysqld_safe Starting mysqld daemon with databases from /usr/local/var/mysql
2016-12-09T12:08:33.335060Z 0 [Warning] TIMESTAMP with implicit DEFAULT value is deprecated. Please use --explicit_defaults_for_timestamp server option (see documentation for more details).
2016-12-09T12:08:33.335892Z 0 [Warning] Insecure configuration for --secure-file-priv: Current value does not restrict location of generated files. Consider setting it to a valid, non-empty path.
2016-12-09T12:08:33.336610Z 0 [Note] /usr/local/Cellar/mysql/5.7.10/bin/mysqld (mysqld 5.7.10) starting as process 61571 ...
2016-12-09T12:08:33.345527Z 0 [Warning] Setting lower_case_table_names=2 because file system for /usr/local/var/mysql/ is case insensitive
2016-12-09T12:08:33.351596Z 0 [Note] InnoDB: Mutexes and rw_locks use GCC atomic builtins
2016-12-09T12:08:33.351632Z 0 [Note] InnoDB: Uses event mutexes
2016-12-09T12:08:33.351638Z 0 [Note] InnoDB: GCC builtin __atomic_thread_fence() is used for memory barrier
2016-12-09T12:08:33.351641Z 0 [Note] InnoDB: Compressed tables use zlib 1.2.3
2016-12-09T12:08:33.352714Z 0 [Note] InnoDB: Number of pools: 1
2016-12-09T12:08:33.354153Z 0 [Note] InnoDB: Using CPU crc32 instructions
2016-12-09T12:08:33.366818Z 0 [Note] InnoDB: Initializing buffer pool, total size = 128M, instances = 1, chunk size = 128M
2016-12-09T12:08:33.379566Z 0 [Note] InnoDB: Completed initialization of buffer pool
2016-12-09T12:08:33.401031Z 0 [Note] InnoDB: Highest supported file format is Barracuda.
2016-12-09T12:08:33.402267Z 0 [Note] InnoDB: Log scan progressed past the checkpoint lsn 2498863
2016-12-09T12:08:33.402289Z 0 [Note] InnoDB: Doing recovery: scanned up to log sequence number 2498872
2016-12-09T12:08:33.402433Z 0 [Note] InnoDB: Doing recovery: scanned up to log sequence number 2498872
2016-12-09T12:08:33.402440Z 0 [Note] InnoDB: Database was not shutdown normally!
2016-12-09T12:08:33.402443Z 0 [Note] InnoDB: Starting crash recovery.
2016-12-09T12:08:33.549180Z 0 [Note] InnoDB: Removed temporary tablespace data file: "ibtmp1"
2016-12-09T12:08:33.549206Z 0 [Note] InnoDB: Creating shared tablespace for temporary tables
2016-12-09T12:08:33.549317Z 0 [Note] InnoDB: Setting file './ibtmp1' size to 12 MB. Physically writing the file full; Please wait ...
2016-12-09T12:08:33.585175Z 0 [Note] InnoDB: File './ibtmp1' size is now 12 MB.
2016-12-09T12:08:33.588126Z 0 [Note] InnoDB: 96 redo rollback segment(s) found. 96 redo rollback segment(s) are active.
2016-12-09T12:08:33.588151Z 0 [Note] InnoDB: 32 non-redo rollback segment(s) are active.
2016-12-09T12:08:33.588453Z 0 [Note] InnoDB: Waiting for purge to start
2016-12-09T12:08:33.641851Z 0 [Note] InnoDB: 5.7.10 started; log sequence number 2498872
2016-12-09T12:08:33.642289Z 0 [Note] InnoDB: Loading buffer pool(s) from /usr/local/var/mysql/ib_buffer_pool
2016-12-09T12:08:33.642503Z 0 [Note] InnoDB: not started
2016-12-09T12:08:33.643066Z 0 [Note] Plugin 'FEDERATED' is disabled.
2016-12-09T12:08:33.652949Z 0 [Note] InnoDB: Buffer pool(s) load completed at 161209 13:08:33
2016-12-09T12:08:33.662950Z 0 [Note] Found ca.pem, server-cert.pem and server-key.pem in data directory. Trying to enable SSL support using them.
2016-12-09T12:08:33.662974Z 0 [Note] Skipping generation of SSL certificates as certificate files are present in data directory.
2016-12-09T12:08:33.665405Z 0 [Warning] CA certificate ca.pem is self signed.
2016-12-09T12:08:33.665842Z 0 [Note] Skipping generation of RSA key pair as key files are present in data directory.
2016-12-09T12:08:33.698843Z 0 [Note] Server hostname (bind-address): '*'; port: 3306
2016-12-09T12:08:33.699354Z 0 [Note] IPv6 is available.
2016-12-09T12:08:33.699553Z 0 [Note] - '::' resolves to '::';
2016-12-09T12:08:33.699665Z 0 [Note] Server socket created on IP: '::'.
2016-12-09T12:08:33.784625Z 0 [Note] Event Scheduler: Loaded 0 events
2016-12-09T12:08:33.784722Z 0 [Note] /usr/local/Cellar/mysql/5.7.10/bin/mysqld: ready for connections.
Version: '5.7.10' socket: '/tmp/mysql.sock' port: 3306 Homebrew
2016-12-09T22:21:02.443689Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 772568ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-09T22:36:49.017929Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 898642ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-09T23:37:34.021038Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3596603ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T00:17:54.198501Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 2371678ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T01:18:38.017222Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597590ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T01:39:00.017683Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 1173583ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T02:39:45.021071Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597610ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T02:49:08.015573Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515469ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T03:24:15.016664Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 2059611ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T04:25:00.016866Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597614ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T04:34:24.021797Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515589ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T04:39:18.022366Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 246613ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T05:40:03.016821Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3598614ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T06:40:48.025959Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3595608ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T06:45:55.018094Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 258594ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T07:46:40.016090Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3598632ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T07:56:04.016254Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515603ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T08:56:49.390467Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597607ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T09:06:11.019025Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515633ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T10:06:56.015782Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597617ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T10:16:18.022349Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 514638ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T11:17:02.165133Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3595614ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T11:30:44.018136Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 773594ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T12:03:24.017458Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 1912617ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T12:06:40.015089Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 150375ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T12:24:37.025219Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 1030636ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T13:25:22.017729Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3596603ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T13:39:05.016348Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 774598ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T14:39:50.178488Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597787ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T14:49:14.023629Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515462ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T15:49:59.022762Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597628ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T15:59:23.014556Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515609ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T17:00:08.019274Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3598607ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T17:09:30.026900Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515633ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T17:48:20.017991Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 2282610ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T18:00:05.183218Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515227ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T18:54:13.016955Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3200608ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T20:13:03.016738Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3089523ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T20:50:11.201413Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 2180623ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T20:53:54.016961Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 176629ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T21:03:18.023529Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 516622ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T22:04:03.021479Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3598602ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T22:13:57.015306Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 545611ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T22:49:59.020252Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 2114631ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-10T23:12:12.023076Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 1287614ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T00:12:57.015297Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3595581ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T00:26:41.053666Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 773622ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T00:47:44.015939Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 1215572ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T00:49:50.017378Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 79642ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T01:20:40.031015Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 1803651ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T02:21:24.021184Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3595607ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T02:26:30.015089Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 257596ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T03:18:55.018844Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3097591ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T04:15:14.022467Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3331614ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T04:20:52.016318Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 289611ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T04:25:56.035073Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 257653ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T05:26:41.020131Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3598198ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T05:36:05.024444Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515624ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T06:36:50.017205Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3598619ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T06:46:14.016702Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515611ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T07:22:43.020901Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 2141603ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T07:26:22.018814Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 172601ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T07:35:45.031855Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 516617ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T08:36:30.015495Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597620ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T08:45:54.017653Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515622ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T09:46:39.018813Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3598618ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T09:56:03.014615Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515592ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T10:56:48.042131Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3598651ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T11:06:12.025718Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515588ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T12:06:56.018725Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597614ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T12:16:19.014169Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 515625ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T13:17:04.016183Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597594ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T13:26:28.023088Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 514629ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T14:27:13.164509Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597613ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T14:36:38.015297Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 514596ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T15:01:44.026173Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 1460642ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T15:15:28.015652Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 773648ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T16:16:13.027289Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597657ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T16:30:51.021667Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 813477ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T17:18:43.027126Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 2824646ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T21:47:54.189105Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 592456ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T22:10:17.210550Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 1309781ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T23:11:02.043893Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3596681ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-11T23:24:54.163597Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 782633ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T00:25:39.024180Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3597668ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T00:35:03.029112Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 514635ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T01:07:31.173633Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 1900621ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T01:10:46.030187Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 147678ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T01:28:43.027567Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 1029630ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T02:29:28.163850Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3595664ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T03:17:29.198796Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 2831638ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T03:27:41.042627Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 564697ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T03:29:27.172837Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 58662ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T04:30:12.026414Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3595651ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T04:35:19.040164Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 257645ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T05:24:10.022697Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 2883673ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T05:43:00.028615Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 1060590ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T06:43:49.185223Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3596652ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T06:48:27.033604Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 228658ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T06:55:03.184509Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 257407ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
2016-12-12T07:55:49.022710Z 0 [Note] InnoDB: page_cleaner: 1000ms intended loop took 3598669ms. The settings might not be optimal. (flushed=0 and evicted=0, during the time.)
161212 12:40:38 mysqld_safe Starting mysqld daemon with databases from /usr/local/var/mysql
2016-12-12T11:40:39.000639Z 0 [Warning] TIMESTAMP with implicit DEFAULT value is deprecated. Please use --explicit_defaults_for_timestamp server option (see documentation for more details).
2016-12-12T11:40:39.001684Z 0 [Warning] 'NO_ZERO_DATE', 'NO_ZERO_IN_DATE' and 'ERROR_FOR_DIVISION_BY_ZERO' sql modes should be used with strict mode. They will be merged with strict mode in a future release.
2016-12-12T11:40:39.001726Z 0 [Warning] 'NO_AUTO_CREATE_USER' sql mode was not set.
2016-12-12T11:40:39.001942Z 0 [Warning] Insecure configuration for --secure-file-priv: Current value does not restrict location of generated files. Consider setting it to a valid, non-empty path.
2016-12-12T11:40:39.003193Z 0 [Note] /usr/local/Cellar/mysql/5.7.10/bin/mysqld (mysqld 5.7.10) starting as process 97910 ...
2016-12-12T11:40:39.012775Z 0 [Warning] Setting lower_case_table_names=2 because file system for /usr/local/var/mysql/ is case insensitive
2016-12-12T11:40:39.020465Z 0 [Note] InnoDB: Mutexes and rw_locks use GCC atomic builtins
2016-12-12T11:40:39.020530Z 0 [Note] InnoDB: Uses event mutexes
2016-12-12T11:40:39.020540Z 0 [Note] InnoDB: GCC builtin __atomic_thread_fence() is used for memory barrier
2016-12-12T11:40:39.020545Z 0 [Note] InnoDB: Compressed tables use zlib 1.2.3
2016-12-12T11:40:39.021852Z 0 [Note] InnoDB: Number of pools: 1
2016-12-12T11:40:39.023939Z 0 [Note] InnoDB: Using CPU crc32 instructions
2016-12-12T11:40:39.037980Z 0 [Note] InnoDB: Initializing buffer pool, total size = 128M, instances = 1, chunk size = 128M
2016-12-12T11:40:39.055856Z 0 [Note] InnoDB: Completed initialization of buffer pool
2016-12-12T11:40:39.079839Z 0 [Note] InnoDB: Highest supported file format is Barracuda.
2016-12-12T11:40:39.082943Z 0 [Note] InnoDB: Log scan progressed past the checkpoint lsn 2498891
2016-12-12T11:40:39.082997Z 0 [Note] InnoDB: Doing recovery: scanned up to log sequence number 2498900
2016-12-12T11:40:39.083372Z 0 [Note] InnoDB: Doing recovery: scanned up to log sequence number 2498900
2016-12-12T11:40:39.083404Z 0 [Note] InnoDB: Database was not shutdown normally!
2016-12-12T11:40:39.083412Z 0 [Note] InnoDB: Starting crash recovery.
2016-12-12T11:40:39.243369Z 0 [Note] InnoDB: Removed temporary tablespace data file: "ibtmp1"
2016-12-12T11:40:39.243411Z 0 [Note] InnoDB: Creating shared tablespace for temporary tables
2016-12-12T11:40:39.243762Z 0 [Note] InnoDB: Setting file './ibtmp1' size to 12 MB. Physically writing the file full; Please wait ...
2016-12-12T11:40:39.262976Z 0 [Note] InnoDB: File './ibtmp1' size is now 12 MB.
2016-12-12T11:40:39.264392Z 0 [Note] InnoDB: 96 redo rollback segment(s) found. 96 redo rollback segment(s) are active.
2016-12-12T11:40:39.264418Z 0 [Note] InnoDB: 32 non-redo rollback segment(s) are active.
2016-12-12T11:40:39.264631Z 0 [Note] InnoDB: Waiting for purge to start
2016-12-12T11:40:39.318411Z 0 [Note] InnoDB: 5.7.10 started; log sequence number 2498900
2016-12-12T11:40:39.319011Z 0 [Note] InnoDB: Loading buffer pool(s) from /usr/local/var/mysql/ib_buffer_pool
2016-12-12T11:40:39.319443Z 0 [Note] InnoDB: not started
2016-12-12T11:40:39.319804Z 0 [Note] Plugin 'FEDERATED' is disabled.
2016-12-12T11:40:39.330889Z 0 [Note] InnoDB: Buffer pool(s) load completed at 161212 12:40:39
2016-12-12T11:40:39.343948Z 0 [Note] Found ca.pem, server-cert.pem and server-key.pem in data directory. Trying to enable SSL support using them.
2016-12-12T11:40:39.343980Z 0 [Note] Skipping generation of SSL certificates as certificate files are present in data directory.
2016-12-12T11:40:39.346921Z 0 [Warning] CA certificate ca.pem is self signed.
2016-12-12T11:40:39.347342Z 0 [Note] Skipping generation of RSA key pair as key files are present in data directory.
2016-12-12T11:40:39.362897Z 0 [Note] Server hostname (bind-address): '*'; port: 3306
2016-12-12T11:40:39.363163Z 0 [Note] IPv6 is available.
2016-12-12T11:40:39.363202Z 0 [Note] - '::' resolves to '::';
2016-12-12T11:40:39.363224Z 0 [Note] Server socket created on IP: '::'.
2016-12-12T11:40:39.540292Z 0 [Note] Event Scheduler: Loaded 0 events
2016-12-12T11:40:39.540498Z 0 [Note] /usr/local/Cellar/mysql/5.7.10/bin/mysqld: ready for connections.
Version: '5.7.10' socket: '/tmp/mysql.sock' port: 3306 Homebrew

View File

@ -0,0 +1,223 @@
161209 14:18:50 [Warning] Using unique option prefix myisam-recover instead of myisam-recover-options is deprecated and will be removed in a future release. Please use the full name instead.
161209 14:18:50 [Note] Plugin 'FEDERATED' is disabled.
161209 14:18:50 InnoDB: The InnoDB memory heap is disabled
161209 14:18:50 InnoDB: Mutexes and rw_locks use GCC atomic builtins
161209 14:18:50 InnoDB: Compressed tables use zlib 1.2.3.4
161209 14:18:50 InnoDB: Initializing buffer pool, size = 128.0M
161209 14:18:50 InnoDB: Completed initialization of buffer pool
InnoDB: The first specified data file ./ibdata1 did not exist:
InnoDB: a new database to be created!
161209 14:18:50 InnoDB: Setting file ./ibdata1 size to 10 MB
InnoDB: Database physically writes the file full: wait...
161209 14:18:50 InnoDB: Log file ./ib_logfile0 did not exist: new to be created
InnoDB: Setting log file ./ib_logfile0 size to 5 MB
InnoDB: Database physically writes the file full: wait...
161209 14:18:50 InnoDB: Log file ./ib_logfile1 did not exist: new to be created
InnoDB: Setting log file ./ib_logfile1 size to 5 MB
InnoDB: Database physically writes the file full: wait...
InnoDB: Doublewrite buffer not found: creating new
InnoDB: Doublewrite buffer created
InnoDB: 127 rollback segment(s) active.
InnoDB: Creating foreign key constraint system tables
InnoDB: Foreign key constraint system tables created
161209 14:18:50 InnoDB: Waiting for the background threads to start
161209 14:18:51 InnoDB: 5.5.53 started; log sequence number 0
161209 14:18:51 InnoDB: Starting shutdown...
161209 14:18:52 InnoDB: Shutdown completed; log sequence number 1595675
161209 14:18:52 [Warning] Using unique option prefix myisam-recover instead of myisam-recover-options is deprecated and will be removed in a future release. Please use the full name instead.
161209 14:18:52 [Note] Plugin 'FEDERATED' is disabled.
161209 14:18:52 InnoDB: The InnoDB memory heap is disabled
161209 14:18:52 InnoDB: Mutexes and rw_locks use GCC atomic builtins
161209 14:18:52 InnoDB: Compressed tables use zlib 1.2.3.4
161209 14:18:52 InnoDB: Initializing buffer pool, size = 128.0M
161209 14:18:52 InnoDB: Completed initialization of buffer pool
161209 14:18:52 InnoDB: highest supported file format is Barracuda.
161209 14:18:52 InnoDB: Waiting for the background threads to start
161209 14:18:53 InnoDB: 5.5.53 started; log sequence number 1595675
ERROR: 1064 You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near 'ALTER TABLE user ADD column Show_view_priv enum('N','Y') CHARACTER SET utf8 NOT ' at line 1
161209 14:18:53 [ERROR] Aborting
161209 14:18:53 InnoDB: Starting shutdown...
161209 14:18:53 InnoDB: Shutdown completed; log sequence number 1595675
161209 14:18:53 [Note] /usr/sbin/mysqld: Shutdown complete
161209 14:18:53 [Warning] Using unique option prefix myisam-recover instead of myisam-recover-options is deprecated and will be removed in a future release. Please use the full name instead.
161209 14:18:53 [Note] Plugin 'FEDERATED' is disabled.
161209 14:18:53 InnoDB: The InnoDB memory heap is disabled
161209 14:18:53 InnoDB: Mutexes and rw_locks use GCC atomic builtins
161209 14:18:53 InnoDB: Compressed tables use zlib 1.2.3.4
161209 14:18:53 InnoDB: Initializing buffer pool, size = 128.0M
161209 14:18:53 InnoDB: Completed initialization of buffer pool
161209 14:18:53 InnoDB: highest supported file format is Barracuda.
161209 14:18:53 InnoDB: Waiting for the background threads to start
161209 14:18:54 InnoDB: 5.5.53 started; log sequence number 1595675
161209 14:18:54 InnoDB: Starting shutdown...
161209 14:18:56 [ERROR] Aborting
161209 14:18:56 InnoDB: Starting shutdown...
161209 14:18:56 InnoDB: Shutdown completed; log sequence number 1595675
161209 14:18:56 [Note] /usr/sbin/mysqld: Shutdown complete
161209 14:18:56 [Warning] Using unique option prefix myisam-recover instead of myisam-recover-options is deprecated and will be removed in a future release. Please use the full name instead.
161209 14:18:56 [Note] Plugin 'FEDERATED' is disabled.
161209 14:18:56 InnoDB: The InnoDB memory heap is disabled
161209 14:18:56 InnoDB: Mutexes and rw_locks use GCC atomic builtins
161209 14:18:56 InnoDB: Compressed tables use zlib 1.2.3.4
161209 14:18:56 InnoDB: Initializing buffer pool, size = 128.0M
161209 14:18:56 InnoDB: Completed initialization of buffer pool
161209 14:18:57 InnoDB: highest supported file format is Barracuda.
161209 14:18:57 InnoDB: Waiting for the background threads to start
161209 14:18:58 InnoDB: 5.5.53 started; log sequence number 1595675
161209 14:18:58 [Note] Server hostname (bind-address): '127.0.0.1'; port: 3306
161209 14:18:58 [Note] - '127.0.0.1' resolves to '127.0.0.1';
161209 14:18:58 [Note] Server socket created on IP: '127.0.0.1'.
161209 14:18:58 [Note] Event Scheduler: Loaded 0 events
161209 14:18:58 [Note] /usr/sbin/mysqld: ready for connections.
Version: '5.5.53-0ubuntu0.12.04.1' socket: '/var/run/mysqld/mysqld.sock' port: 3306 (Ubuntu)
161209 14:37:57 [Note] /usr/sbin/mysqld: Normal shutdown
161209 14:37:57 [Note] Event Scheduler: Purging the queue. 0 events
161209 14:37:57 InnoDB: Starting shutdown...
161209 14:37:57 InnoDB: Shutdown completed; log sequence number 1595685
161209 14:37:57 [Note] /usr/sbin/mysqld: Shutdown complete
161209 14:37:57 [Warning] Using unique option prefix myisam-recover instead of myisam-recover-options is deprecated and will be removed in a future release. Please use the full name instead.
161209 14:37:57 [Note] Plugin 'FEDERATED' is disabled.
161209 14:37:57 InnoDB: The InnoDB memory heap is disabled
161209 14:37:57 InnoDB: Mutexes and rw_locks use GCC atomic builtins
161209 14:37:57 InnoDB: Compressed tables use zlib 1.2.3.4
161209 14:37:57 InnoDB: Initializing buffer pool, size = 128.0M
161209 14:37:57 InnoDB: Completed initialization of buffer pool
161209 14:37:57 InnoDB: highest supported file format is Barracuda.
161209 14:37:57 InnoDB: Waiting for the background threads to start
161209 14:37:58 InnoDB: 5.5.53 started; log sequence number 1595685
161209 14:37:58 [Note] Server hostname (bind-address): '127.0.0.1'; port: 3306
161209 14:37:58 [Note] - '127.0.0.1' resolves to '127.0.0.1';
161209 14:37:58 [Note] Server socket created on IP: '127.0.0.1'.
161209 14:37:58 [Note] Event Scheduler: Loaded 0 events
161209 14:37:58 [Note] /usr/sbin/mysqld: ready for connections.
Version: '5.5.53-0ubuntu0.12.04.1-log' socket: '/var/run/mysqld/mysqld.sock' port: 3306 (Ubuntu)
vagrant@precise32:~$ cat /var/log/mysql.log | grep phisically
vagrant@precise32:~$ cat /var/log/mysql.log | grep physi
vagrant@precise32:~$ cat /var/log/mysql.log | physically
physically: command not found
vagrant@precise32:~$ cat /var/log/mysql.log | grep physically
vagrant@precise32:~$ less /var/log/mysql.
mysql.err mysql.log
vagrant@precise32:~$ less /var/log/mysql.err
vagrant@precise32:~$ less /var/log/mysql.log
vagrant@precise32:~$ less /var/log/mysql/
error.log mysql-slow.log
vagrant@precise32:~$ less /var/log/mysql/error.log
vagrant@precise32:~$ cat /var/log/mysql/error.log
161209 14:18:50 [Warning] Using unique option prefix myisam-recover instead of myisam-recover-options is deprecated and will be removed in a future release. Please use the full name instead.
161209 14:18:50 [Note] Plugin 'FEDERATED' is disabled.
161209 14:18:50 InnoDB: The InnoDB memory heap is disabled
161209 14:18:50 InnoDB: Mutexes and rw_locks use GCC atomic builtins
161209 14:18:50 InnoDB: Compressed tables use zlib 1.2.3.4
161209 14:18:50 InnoDB: Initializing buffer pool, size = 128.0M
161209 14:18:50 InnoDB: Completed initialization of buffer pool
InnoDB: The first specified data file ./ibdata1 did not exist:
InnoDB: a new database to be created!
161209 14:18:50 InnoDB: Setting file ./ibdata1 size to 10 MB
InnoDB: Database physically writes the file full: wait...
161209 14:18:50 InnoDB: Log file ./ib_logfile0 did not exist: new to be created
InnoDB: Setting log file ./ib_logfile0 size to 5 MB
InnoDB: Database physically writes the file full: wait...
161209 14:18:50 InnoDB: Log file ./ib_logfile1 did not exist: new to be created
InnoDB: Setting log file ./ib_logfile1 size to 5 MB
InnoDB: Database physically writes the file full: wait...
InnoDB: Doublewrite buffer not found: creating new
InnoDB: Doublewrite buffer created
InnoDB: 127 rollback segment(s) active.
InnoDB: Creating foreign key constraint system tables
InnoDB: Foreign key constraint system tables created
161209 14:18:50 InnoDB: Waiting for the background threads to start
161209 14:18:51 InnoDB: 5.5.53 started; log sequence number 0
161209 14:18:51 InnoDB: Starting shutdown...
161209 14:18:52 InnoDB: Shutdown completed; log sequence number 1595675
161209 14:18:52 [Warning] Using unique option prefix myisam-recover instead of myisam-recover-options is deprecated and will be removed in a future release. Please use the full name instead.
161209 14:18:52 [Note] Plugin 'FEDERATED' is disabled.
161209 14:18:52 InnoDB: The InnoDB memory heap is disabled
161209 14:18:52 InnoDB: Mutexes and rw_locks use GCC atomic builtins
161209 14:18:52 InnoDB: Compressed tables use zlib 1.2.3.4
161209 14:18:52 InnoDB: Initializing buffer pool, size = 128.0M
161209 14:18:52 InnoDB: Completed initialization of buffer pool
161209 14:18:52 InnoDB: highest supported file format is Barracuda.
161209 14:18:52 InnoDB: Waiting for the background threads to start
161209 14:18:53 InnoDB: 5.5.53 started; log sequence number 1595675
ERROR: 1064 You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near 'ALTER TABLE user ADD column Show_view_priv enum('N','Y') CHARACTER SET utf8 NOT ' at line 1
161209 14:18:53 [ERROR] Aborting
161209 14:18:53 InnoDB: Starting shutdown...
161209 14:18:53 InnoDB: Shutdown completed; log sequence number 1595675
161209 14:18:53 [Note] /usr/sbin/mysqld: Shutdown complete
161209 14:18:53 [Warning] Using unique option prefix myisam-recover instead of myisam-recover-options is deprecated and will be removed in a future release. Please use the full name instead.
161209 14:18:53 [Note] Plugin 'FEDERATED' is disabled.
161209 14:18:53 InnoDB: The InnoDB memory heap is disabled
161209 14:18:53 InnoDB: Mutexes and rw_locks use GCC atomic builtins
161209 14:18:53 InnoDB: Compressed tables use zlib 1.2.3.4
161209 14:18:53 InnoDB: Initializing buffer pool, size = 128.0M
161209 14:18:53 InnoDB: Completed initialization of buffer pool
161209 14:18:53 InnoDB: highest supported file format is Barracuda.
161209 14:18:53 InnoDB: Waiting for the background threads to start
161209 14:18:54 InnoDB: 5.5.53 started; log sequence number 1595675
161209 14:18:54 InnoDB: Starting shutdown...
161209 14:18:55 InnoDB: Shutdown completed; log sequence number 1595675
161209 14:18:55 [Warning] Using unique option prefix myisam-recover instead of myisam-recover-options is deprecated and will be removed in a future release. Please use the full name instead.
161209 14:18:55 [Note] Plugin 'FEDERATED' is disabled.
161209 14:18:55 InnoDB: The InnoDB memory heap is disabled
161209 14:18:55 InnoDB: Mutexes and rw_locks use GCC atomic builtins
161209 14:18:55 InnoDB: Compressed tables use zlib 1.2.3.4
161209 14:18:55 InnoDB: Initializing buffer pool, size = 128.0M
161209 14:18:55 InnoDB: Completed initialization of buffer pool
161209 14:18:55 InnoDB: highest supported file format is Barracuda.
161209 14:18:55 InnoDB: Waiting for the background threads to start
161209 14:18:56 InnoDB: 5.5.53 started; log sequence number 1595675
ERROR: 1050 Table 'plugin' already exists
161209 14:18:56 [ERROR] Aborting
161209 14:18:56 InnoDB: Starting shutdown...
161209 14:18:56 InnoDB: Shutdown completed; log sequence number 1595675
161209 14:18:56 [Note] /usr/sbin/mysqld: Shutdown complete
161209 14:18:56 [Warning] Using unique option prefix myisam-recover instead of myisam-recover-options is deprecated and will be removed in a future release. Please use the full name instead.
161209 14:18:56 [Note] Plugin 'FEDERATED' is disabled.
161209 14:18:56 InnoDB: The InnoDB memory heap is disabled
161209 14:18:56 InnoDB: Mutexes and rw_locks use GCC atomic builtins
161209 14:18:56 InnoDB: Compressed tables use zlib 1.2.3.4
161209 14:18:56 InnoDB: Initializing buffer pool, size = 128.0M
161209 14:18:56 InnoDB: Completed initialization of buffer pool
161209 14:18:57 InnoDB: highest supported file format is Barracuda.
161209 14:18:57 InnoDB: Waiting for the background threads to start
161209 14:18:58 InnoDB: 5.5.53 started; log sequence number 1595675
161209 14:18:58 [Note] Server hostname (bind-address): '127.0.0.1'; port: 3306
161209 14:18:58 [Note] - '127.0.0.1' resolves to '127.0.0.1';
161209 14:18:58 [Note] Server socket created on IP: '127.0.0.1'.
161209 14:18:58 [Note] Event Scheduler: Loaded 0 events
161209 14:18:58 [Note] /usr/sbin/mysqld: ready for connections.
Version: '5.5.53-0ubuntu0.12.04.1' socket: '/var/run/mysqld/mysqld.sock' port: 3306 (Ubuntu)
161209 14:37:57 [Note] /usr/sbin/mysqld: Normal shutdown
161209 14:37:57 [Note] Event Scheduler: Purging the queue. 0 events
161209 14:37:57 InnoDB: Starting shutdown...
161209 14:37:57 InnoDB: Shutdown completed; log sequence number 1595685
161209 14:37:57 [Note] /usr/sbin/mysqld: Shutdown complete
161209 14:37:57 [Warning] Using unique option prefix myisam-recover instead of myisam-recover-options is deprecated and will be removed in a future release. Please use the full name instead.
161209 14:37:57 [Note] Plugin 'FEDERATED' is disabled.
161209 14:37:57 InnoDB: The InnoDB memory heap is disabled
161209 14:37:57 InnoDB: Mutexes and rw_locks use GCC atomic builtins
161209 14:37:57 InnoDB: Compressed tables use zlib 1.2.3.4
161209 14:37:57 InnoDB: Initializing buffer pool, size = 128.0M
161209 14:37:57 InnoDB: Completed initialization of buffer pool
161209 14:37:57 InnoDB: highest supported file format is Barracuda.
161209 14:37:57 InnoDB: Waiting for the background threads to start
161209 14:37:58 InnoDB: 5.5.53 started; log sequence number 1595685
161209 14:37:58 [Note] Server hostname (bind-address): '127.0.0.1'; port: 3306
161209 14:37:58 [Note] - '127.0.0.1' resolves to '127.0.0.1';
161209 14:37:58 [Note] Server socket created on IP: '127.0.0.1'.
161209 14:37:58 [Note] Event Scheduler: Loaded 0 events
161209 14:37:58 [Note] /usr/sbin/mysqld: ready for connections.
Version: '5.5.53-0ubuntu0.12.04.1-log' socket: '/var/run/mysqld/mysqld.sock' port: 3306 (Ubuntu)

View File

@ -0,0 +1,42 @@
- name: slowlog
type: group
description: >
Contains fields from the MySQL slow logs.
fields:
- name: user
description: >
The MySQL user that created the query.
- name: host
description: >
The host from where the user that created the query logged in.
- name: ip
description: >
The IP address from where the user that created the query logged in.
- name: query_time.sec
type: float
description: >
The total time the query took, in seconds, as a floating point number.
- name: lock_time.sec
type: float
description: >
The amount of time the query waited for the lock to be available. The
value is in seconds, as a floating point number.
- name: rows_sent
type: long
description: >
The number of rows returned by the query.
- name: rows_examined
type: long
description: >
The number of rows scanned by the query.
- name: timestamp
type: long
description: >
The unix timestamp taken from the `SET timestamp` query.
- name: query
description: >
The slow query.
- name: id
type: long
description: >
The connection ID for the query.

View File

@ -0,0 +1,13 @@
- input_type: log
paths:
{%- for path in paths %}
- {{path}}
{%- endfor %}
exclude_files: [".gz$"]
multiline:
pattern: "^# User@Host: "
negate: true
match: after
fields:
source_type: mysql-slowlog
pipeline_id: {{beat.pipeline_id}}

View File

@ -0,0 +1,39 @@
{
"description": "Pipeline for parsing MySQL slow logs.",
"processors": [{
"grok": {
"field": "message",
"patterns":[
"^# User@Host: %{USER:mysql.slowlog.user}(\\[[^\\]]+\\])? @ %{HOSTNAME:mysql.slowlog.host} \\[(IP:mysql.slowlog.ip)?\\](\\s*Id:\\s* %{NUMBER:mysql.slowlog.id})?\n# Query_time: %{NUMBER:mysql.slowlog.query_time.sec}\\s* Lock_time: %{NUMBER:mysql.slowlog.lock_time.sec}\\s* Rows_sent: %{NUMBER:mysql.slowlog.rows_sent}\\s* Rows_examined: %{NUMBER:mysql.slowlog.rows_examined}\n(SET timestamp=%{NUMBER:mysql.slowlog.timestamp};\n)?%{GREEDYMULTILINE:mysql.slowlog.query}"
],
"pattern_definitions" : {
"GREEDYMULTILINE" : "(.|\n)*"
},
"ignore_missing": true
}
}, {
"remove":{
"field": "message"
}
}, {
"date": {
"field": "mysql.slowlog.timestamp",
"target_field": "@timestamp",
"formats": ["UNIX"],
"ignore_failure": true
}
}, {
"gsub": {
"field": "mysql.slowlog.query",
"pattern": "\n# Time: [0-9]+ [0-9][0-9]:[0-9][0-9]:[0-9][0-9](\\.[0-9]+)?$",
"replacement": "",
"ignore_failure": true
}
}],
"on_failure" : [{
"set" : {
"field" : "error",
"value" : "{{ _ingest.on_failure_message }}"
}
}]
}

View File

@ -0,0 +1,15 @@
module_version: 1.0
vars:
paths:
default:
- /var/log/mysql/mysql-slow.log*
- /var/lib/mysql/{{builtin.hostname}}-slow.log
os.darwin:
- /usr/local/var/mysql/{{builtin.hostname}}-slow.log*
os.windows:
- "c:/programdata/MySQL/MySQL Server*/mysql-slow.log*"
ingest_pipeline: ingest/pipeline.json
prospectors:
- config/slowlog.yml

View File

@ -0,0 +1,8 @@
/usr/local/Cellar/mysql/5.7.10/bin/mysqld, Version: 5.7.10 (Homebrew). started with:
Tcp port: 3306 Unix socket: /tmp/mysql.sock
Time Id Command Argument
# Time: 2016-12-12T11:54:16.601172Z
# User@Host: root[root] @ localhost [] Id: 2
# Query_time: 11.004467 Lock_time: 0.000000 Rows_sent: 1 Rows_examined: 0
SET timestamp=1481543656;
select sleep(11);

View File

@ -0,0 +1,66 @@
/usr/sbin/mysqld, Version: 5.5.53-0ubuntu0.12.04.1-log ((Ubuntu)). started with:
Tcp port: 3306 Unix socket: /var/run/mysqld/mysqld.sock
Time Id Command Argument
# Time: 161209 14:37:59
# User@Host: debian-sys-maint[debian-sys-maint] @ localhost []
# Query_time: 0.000153 Lock_time: 0.000061 Rows_sent: 1 Rows_examined: 5
SET timestamp=1481294279;
SELECT count(*) FROM mysql.user WHERE user='root' and password='';
# User@Host: debian-sys-maint[debian-sys-maint] @ localhost []
# Query_time: 0.002456 Lock_time: 0.000095 Rows_sent: 31 Rows_examined: 81
SET timestamp=1481294279;
select concat('select count(*) into @discard from `',
TABLE_SCHEMA, '`.`', TABLE_NAME, '`')
from information_schema.TABLES where ENGINE='MyISAM';
# User@Host: debian-sys-maint[debian-sys-maint] @ localhost []
# Query_time: 0.006278 Lock_time: 0.000153 Rows_sent: 0 Rows_examined: 808
SET timestamp=1481294279;
select count(*) into @discard from `information_schema`.`COLUMNS`;
# User@Host: debian-sys-maint[debian-sys-maint] @ localhost []
# Query_time: 0.000262 Lock_time: 0.000204 Rows_sent: 0 Rows_examined: 0
SET timestamp=1481294279;
select count(*) into @discard from `information_schema`.`EVENTS`;
# User@Host: debian-sys-maint[debian-sys-maint] @ localhost []
# Query_time: 0.000323 Lock_time: 0.000241 Rows_sent: 0 Rows_examined: 0
SET timestamp=1481294279;
select count(*) into @discard from `information_schema`.`PARAMETERS`;
# User@Host: debian-sys-maint[debian-sys-maint] @ localhost []
# Query_time: 0.007084 Lock_time: 0.000148 Rows_sent: 0 Rows_examined: 81
SET timestamp=1481294279;
select count(*) into @discard from `information_schema`.`PARTITIONS`;
# User@Host: debian-sys-maint[debian-sys-maint] @ localhost []
# Query_time: 0.000277 Lock_time: 0.000135 Rows_sent: 0 Rows_examined: 23
SET timestamp=1481294279;
select count(*) into @discard from `information_schema`.`PLUGINS`;
# User@Host: debian-sys-maint[debian-sys-maint] @ localhost []
# Query_time: 0.000254 Lock_time: 0.000159 Rows_sent: 0 Rows_examined: 1
SET timestamp=1481294279;
select count(*) into @discard from `information_schema`.`PROCESSLIST`;
# User@Host: debian-sys-maint[debian-sys-maint] @ localhost []
# Query_time: 0.000297 Lock_time: 0.000229 Rows_sent: 0 Rows_examined: 0
SET timestamp=1481294279;
select count(*) into @discard from `information_schema`.`ROUTINES`;
# User@Host: debian-sys-maint[debian-sys-maint] @ localhost []
# Query_time: 0.001676 Lock_time: 0.000156 Rows_sent: 0 Rows_examined: 0
SET timestamp=1481294279;
select count(*) into @discard from `information_schema`.`TRIGGERS`;
# User@Host: debian-sys-maint[debian-sys-maint] @ localhost []
# Query_time: 0.008782 Lock_time: 0.001187 Rows_sent: 0 Rows_examined: 0
SET timestamp=1481294279;
select count(*) into @discard from `information_schema`.`VIEWS`;
# Time: 161209 14:39:02
# User@Host: root[root] @ localhost []
# Query_time: 2.000268 Lock_time: 0.000000 Rows_sent: 1 Rows_examined: 0
SET timestamp=1481294342;
select sleep(2);
# Time: 161209 14:39:23
# User@Host: root[root] @ localhost []
# Query_time: 0.000138 Lock_time: 0.000056 Rows_sent: 0 Rows_examined: 0
use mysql;
SET timestamp=1481294363;
select * from general_log;
# Time: 161209 14:39:40
# User@Host: root[root] @ localhost []
# Query_time: 0.000159 Lock_time: 0.000059 Rows_sent: 5 Rows_examined: 5
SET timestamp=1481294380;
select * from user;

View File

@ -0,0 +1,10 @@
- key: nginx
title: "Nginx"
description: >
Module for parsing the Nginx log files.
fields:
- name: nginx
type: group
description: >
Fields from the Nginx log files.
fields:

View File

@ -0,0 +1,13 @@
{
"hits": 0,
"timeRestore": false,
"description": "",
"title": "Filebeat Nginx Dashboard",
"uiStateJSON": "{\"P-4\":{\"vis\":{\"legendOpen\":true}},\"P-8\":{\"mapCenter\":[50.51342652633956,-0.17578125]}}",
"panelsJSON": "[{\"col\":9,\"id\":\"Errors-over-time\",\"panelIndex\":2,\"row\":4,\"size_x\":4,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"Nginx-Access-Browsers\",\"panelIndex\":3,\"row\":10,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"col\":5,\"id\":\"Nginx-Access-OSes\",\"panelIndex\":4,\"row\":10,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"id\":\"New-Visualization\",\"panelIndex\":5,\"row\":4,\"size_x\":8,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"Nginx-Access-Response-codes-by-top-URLs\",\"panelIndex\":6,\"row\":7,\"size_x\":12,\"size_y\":3,\"type\":\"visualization\"},{\"col\":9,\"id\":\"Sent-sizes\",\"panelIndex\":7,\"row\":10,\"size_x\":4,\"size_y\":4,\"type\":\"visualization\"},{\"id\":\"Nginx-Access-Map\",\"type\":\"visualization\",\"panelIndex\":8,\"size_x\":12,\"size_y\":3,\"col\":1,\"row\":1}]",
"optionsJSON": "{\"darkTheme\":false}",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}}}]}"
}
}

View File

@ -0,0 +1,16 @@
{
"sort": [
"@timestamp",
"desc"
],
"hits": 0,
"description": "",
"title": "Filebeat Nginx module",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"_exists_:nginx\",\"analyze_wildcard\":true}},\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647}}"
},
"columns": [
"_source"
]
}

View File

@ -0,0 +1,10 @@
{
"visState": "{\n \"title\": \"Errors over time\",\n \"type\": \"area\",\n \"params\": {\n \"shareYAxis\": true,\n \"addTooltip\": true,\n \"addLegend\": true,\n \"legendPosition\": \"right\",\n \"smoothLines\": false,\n \"scale\": \"linear\",\n \"interpolate\": \"linear\",\n \"mode\": \"stacked\",\n \"times\": [],\n \"addTimeMarker\": false,\n \"defaultYExtents\": false,\n \"setYExtents\": false,\n \"yAxis\": {}\n },\n \"aggs\": [\n {\n \"id\": \"1\",\n \"enabled\": true,\n \"type\": \"count\",\n \"schema\": \"metric\",\n \"params\": {}\n },\n {\n \"id\": \"2\",\n \"enabled\": true,\n \"type\": \"date_histogram\",\n \"schema\": \"segment\",\n \"params\": {\n \"field\": \"@timestamp\",\n \"interval\": \"auto\",\n \"customInterval\": \"2h\",\n \"min_doc_count\": 1,\n \"extended_bounds\": {}\n }\n },\n {\n \"id\": \"3\",\n \"enabled\": true,\n \"type\": \"terms\",\n \"schema\": \"group\",\n \"params\": {\n \"field\": \"nginx.error.level\",\n \"size\": 5,\n \"order\": \"desc\",\n \"orderBy\": \"1\"\n }\n }\n ],\n \"listeners\": {}\n}",
"description": "",
"title": "Nginx Errors over time",
"uiStateJSON": "{}",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\n \"index\": \"filebeat-*\",\n \"query\": {\n \"query_string\": {\n \"query\": \"*\",\n \"analyze_wildcard\": true\n }\n },\n \"filter\": []\n}"
}
}

View File

@ -0,0 +1,11 @@
{
"visState": "{\n \"title\": \"New Visualization\",\n \"type\": \"histogram\",\n \"params\": {\n \"shareYAxis\": true,\n \"addTooltip\": true,\n \"addLegend\": true,\n \"legendPosition\": \"right\",\n \"scale\": \"linear\",\n \"mode\": \"stacked\",\n \"times\": [],\n \"addTimeMarker\": false,\n \"defaultYExtents\": false,\n \"setYExtents\": false,\n \"yAxis\": {}\n },\n \"aggs\": [\n {\n \"id\": \"1\",\n \"enabled\": true,\n \"type\": \"count\",\n \"schema\": \"metric\",\n \"params\": {}\n },\n {\n \"id\": \"2\",\n \"enabled\": true,\n \"type\": \"date_histogram\",\n \"schema\": \"segment\",\n \"params\": {\n \"field\": \"@timestamp\",\n \"interval\": \"auto\",\n \"customInterval\": \"2h\",\n \"min_doc_count\": 1,\n \"extended_bounds\": {}\n }\n },\n {\n \"id\": \"3\",\n \"enabled\": true,\n \"type\": \"terms\",\n \"schema\": \"group\",\n \"params\": {\n \"field\": \"nginx.access.response_code\",\n \"size\": 5,\n \"order\": \"desc\",\n \"orderBy\": \"1\"\n }\n }\n ],\n \"listeners\": {}\n}",
"description": "",
"title": "Nginx Access over time",
"uiStateJSON": "{\n \"vis\": {\n \"colors\": {\n \"200\": \"#7EB26D\",\n \"404\": \"#614D93\"\n }\n }\n}",
"version": 1,
"savedSearchId": "Filebeat-Nginx-module",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\n \"filter\": []\n}"
}
}

View File

@ -0,0 +1,10 @@
{
"visState": "{\"title\":\"Nginx Access Browsers\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"isDonut\":true},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"nginx.access.user_agent.name\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"nginx.access.user_agent.major\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
"description": "",
"title": "Nginx Access Browsers",
"uiStateJSON": "{}",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
}
}

View File

@ -0,0 +1,11 @@
{
"visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"1\",\"params\":{},\"schema\":\"metric\",\"type\":\"count\"},{\"enabled\":true,\"id\":\"2\",\"params\":{\"autoPrecision\":true,\"field\":\"nginx.access.geoip.location\"},\"schema\":\"segment\",\"type\":\"geohash_grid\"}],\"listeners\":{},\"params\":{\"addTooltip\":true,\"heatBlur\":15,\"heatMaxZoom\":16,\"heatMinOpacity\":0.1,\"heatNormalizeData\":true,\"heatRadius\":25,\"isDesaturated\":true,\"legendPosition\":\"bottomright\",\"mapCenter\":[15,5],\"mapType\":\"Scaled Circle Markers\",\"mapZoom\":2,\"wms\":{\"enabled\":false,\"options\":{\"attribution\":\"Maps provided by USGS\",\"format\":\"image/png\",\"layers\":\"0\",\"styles\":\"\",\"transparent\":true,\"version\":\"1.3.0\"},\"url\":\"https://basemap.nationalmap.gov/arcgis/services/USGSTopo/MapServer/WMSServer\"}},\"title\":\"Nginx Access Map\",\"type\":\"tile_map\"}",
"description": "",
"title": "Nginx Access Map",
"uiStateJSON": "{\"mapCenter\":[12.039320557540572,-0.17578125]}",
"version": 1,
"savedSearchId": "Filebeat-Nginx-module",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[]}"
}
}

View File

@ -0,0 +1,10 @@
{
"visState": "{\"title\":\"Nginx Access OSes\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"isDonut\":true},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"nginx.access.user_agent.os_name\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"nginx.access.user_agent.os_major\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
"description": "",
"title": "Nginx Access OSes",
"uiStateJSON": "{}",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
}
}

View File

@ -0,0 +1,10 @@
{
"visState": "{\"title\":\"Nginx Access Response codes by top URLs\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"isDonut\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"split\",\"params\":{\"field\":\"nginx.access.url\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"row\":false}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"nginx.access.response_code\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
"description": "",
"title": "Nginx Access Response codes by top URLs",
"uiStateJSON": "{\"vis\":{\"colors\":{\"200\":\"#629E51\",\"404\":\"#0A50A1\"}}}",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}"
}
}

View File

@ -0,0 +1,10 @@
{
"visState": "{\n \"title\": \"Sent sizes\",\n \"type\": \"line\",\n \"params\": {\n \"shareYAxis\": true,\n \"addTooltip\": true,\n \"addLegend\": true,\n \"legendPosition\": \"right\",\n \"showCircles\": true,\n \"smoothLines\": true,\n \"interpolate\": \"linear\",\n \"scale\": \"linear\",\n \"drawLinesBetweenPoints\": true,\n \"radiusRatio\": \"17\",\n \"times\": [],\n \"addTimeMarker\": false,\n \"defaultYExtents\": false,\n \"setYExtents\": false,\n \"yAxis\": {}\n },\n \"aggs\": [\n {\n \"id\": \"1\",\n \"enabled\": true,\n \"type\": \"sum\",\n \"schema\": \"metric\",\n \"params\": {\n \"field\": \"nginx.access.body_sent.bytes\",\n \"customLabel\": \"Data sent\"\n }\n },\n {\n \"id\": \"2\",\n \"enabled\": true,\n \"type\": \"date_histogram\",\n \"schema\": \"segment\",\n \"params\": {\n \"field\": \"@timestamp\",\n \"interval\": \"auto\",\n \"customInterval\": \"2h\",\n \"min_doc_count\": 1,\n \"extended_bounds\": {}\n }\n },\n {\n \"id\": \"3\",\n \"enabled\": true,\n \"type\": \"count\",\n \"schema\": \"radius\",\n \"params\": {}\n }\n ],\n \"listeners\": {}\n}",
"description": "",
"title": "Nginx Sent Byte Size",
"uiStateJSON": "{}",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\n \"filter\": [],\n \"index\": \"filebeat-*\",\n \"query\": {\n \"query_string\": {\n \"query\": \"_exists_:nginx.access\",\n \"analyze_wildcard\": true\n }\n },\n \"highlight\": {\n \"pre_tags\": [\n \"@kibana-highlighted-field@\"\n ],\n \"post_tags\": [\n \"@/kibana-highlighted-field@\"\n ],\n \"fields\": {\n \"*\": {}\n },\n \"require_field_match\": false,\n \"fragment_size\": 2147483647\n }\n}"
}
}

View File

@ -0,0 +1,107 @@
- name: access
type: group
description: >
Contains fields for the Nginx access logs.
fields:
- name: remote_ip
type: keyword
description: >
Client IP address.
- name: user_name
type: keyword
description: >
The user name used when basic authentication is used.
- name: method
type: keyword
example: GET
description: >
The request HTTP method.
- name: url
type: keyword
description: >
The request HTTP URL.
- name: http_version
type: keyword
description: >
The HTTP version.
- name: response_code
type: long
description: >
The HTTP response code.
- name: body_sent.bytes
type: long
format: bytes
description: >
The number of bytes of the server response body.
- name: referrer
type: keyword
description: >
The HTTP referrer.
- name: agent
type: text
description: >
Contains the un-parsed user agent string. Only present if the user
agent Elasticsearch plugin is not available or not used.
- name: user_agent
type: group
description: >
Contains the parsed User agent field. Only present if the user
agent Elasticsearch plugin is available and used.
fields:
- name: device
type: keyword
description: >
The name of the physical device.
- name: major
type: long
description: >
The major version of the user agent.
- name: minor
type: long
description: >
The minor version of the user agent.
- name: patch
type: long
description: >
The patch version of the user agent.
- name: name
type: keyword
example: Chrome
description: >
The name of the user agent.
- name: os
type: keyword
description: >
The name of the operating system.
- name: os_major
type: long
description: >
The major version of the operating system.
- name: os_minor
type: long
description: >
The minor version of the operating system.
- name: os_name
type: keyword
description: >
The name of the operating system.
- name: geoip
type: group
description: >
Contains GeoIP information gathered based on the remote_ip field.
Only present if the GeoIP Elasticsearch plugin is available and
used.
fields:
- name: continent_name
type: keyword
description: >
The name of the continent.
- name: country_iso_code
type: keyword
description: >
Country ISO code.
- name: location
type: geo_point
description: >
The longitude and latitude.

View File

@ -0,0 +1,9 @@
- input_type: log
paths:
{%- for path in paths %}
- {{path}}
{%- endfor %}
exclude_files: [".gz$"]
fields:
source_type: nginx-access
pipeline_id: {{beat.pipeline_id}}

View File

@ -0,0 +1,16 @@
{
"description": "pipeline for parsing Nginx logs that are JSON format",
"processors": [{
"user_agent": {
"field": "agent"
}
}, {
"remove": {
"field": "agent"
}
}, {
"geoip": {
"field": "remote_ip"
}
}]
}

View File

@ -0,0 +1,37 @@
{
"description": "Pipeline for parsing Nginx logs. Requires no plugins",
"processors": [{
"grok": {
"field": "message",
"patterns":[
"%{IPORHOST:nginx.access.remote_ip} - %{DATA:nginx.access.user_name} \\[%{HTTPDATE:nginx.access.time}\\] \"%{WORD:nginx.access.method} %{DATA:nginx.access.url} HTTP/%{NUMBER:nginx.access.http_version}\" %{NUMBER:nginx.access.response_code} %{NUMBER:nginx.access.body_sent.bytes} \"%{DATA:nginx.access.referrer}\" \"%{DATA:nginx.access.agent}\""
],
"ignore_missing": true
}
},{
"remove":{
"field": "message"
}
}, {
"rename": {
"field": "@timestamp",
"target_field": "read_timestamp"
}
}, {
"date": {
"field": "nginx.access.time",
"target_field": "@timestamp",
"formats": ["dd/MMM/YYYY:H:m:s Z"]
}
}, {
"remove": {
"field": "nginx.access.time"
}
}],
"on_failure" : [{
"set" : {
"field" : "error",
"value" : "{{ _ingest.on_failure_message }}"
}
}]
}

View File

@ -0,0 +1,51 @@
{
"description": "Pipeline for parsing Nginx access logs. Requires the geoip and user_agent plugins.",
"processors": [{
"grok": {
"field": "message",
"patterns":[
"%{IPORHOST:nginx.access.remote_ip} - %{DATA:nginx.access.user_name} \\[%{HTTPDATE:nginx.access.time}\\] \"%{WORD:nginx.access.method} %{DATA:nginx.access.url} HTTP/%{NUMBER:nginx.access.http_version}\" %{NUMBER:nginx.access.response_code} %{NUMBER:nginx.access.body_sent.bytes} \"%{DATA:nginx.access.referrer}\" \"%{DATA:nginx.access.agent}\""
],
"ignore_missing": true
}
},{
"remove":{
"field": "message"
}
}, {
"rename": {
"field": "@timestamp",
"target_field": "read_timestamp"
}
}, {
"date": {
"field": "nginx.access.time",
"target_field": "@timestamp",
"formats": ["dd/MMM/YYYY:H:m:s Z"]
}
}, {
"remove": {
"field": "nginx.access.time"
}
}, {
"user_agent": {
"field": "nginx.access.agent",
"target_field": "nginx.access.user_agent"
}
}, {
"remove": {
"field": "nginx.access.agent"
}
}, {
"geoip": {
"field": "nginx.access.remote_ip",
"target_field": "nginx.access.geoip"
}
}],
"on_failure" : [{
"set" : {
"field" : "error",
"value" : "{{ _ingest.on_failure_message }}"
}
}]
}

View File

@ -0,0 +1,17 @@
module_version: 1.0
vars:
paths:
default:
- /var/log/nginx/access.log*
os.darwin:
- /usr/local/var/log/nginx/access.log*
os.windows:
- c:/programfiles/nginx/logs/access.log*
pipeline:
# options: with_plugins, no_plugins, json_with_plugins, json_no_plugins
default: with_plugins
ingest_pipeline: ingest/{{pipeline}}.json
prospectors:
- config/nginx-access.yml

View File

@ -0,0 +1,12 @@
77.179.66.156 - - [25/Oct/2016:14:49:33 +0200] "GET / HTTP/1.1" 200 612 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36"
77.179.66.156 - - [25/Oct/2016:14:49:34 +0200] "GET /favicon.ico HTTP/1.1" 404 571 "http://localhost:8080/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36"
77.179.66.156 - - [25/Oct/2016:14:50:44 +0200] "GET /adsasd HTTP/1.1" 404 571 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36"
77.179.66.156 - - [07/Dec/2016:10:34:43 +0100] "GET / HTTP/1.1" 200 612 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.98 Safari/537.36"
77.179.66.156 - - [07/Dec/2016:10:34:43 +0100] "GET /favicon.ico HTTP/1.1" 404 571 "http://localhost:8080/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.98 Safari/537.36"
77.179.66.156 - - [07/Dec/2016:10:43:18 +0100] "GET /test HTTP/1.1" 404 571 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.98 Safari/537.36"
77.179.66.156 - - [07/Dec/2016:10:43:21 +0100] "GET /test HTTP/1.1" 404 571 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.98 Safari/537.36"
77.179.66.156 - - [07/Dec/2016:10:43:23 +0100] "GET /test1 HTTP/1.1" 404 571 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.98 Safari/537.36"
127.0.0.1 - - [07/Dec/2016:11:04:37 +0100] "GET /test1 HTTP/1.1" 404 571 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.98 Safari/537.36"
127.0.0.1 - - [07/Dec/2016:11:04:58 +0100] "GET / HTTP/1.1" 304 0 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:49.0) Gecko/20100101 Firefox/49.0"
127.0.0.1 - - [07/Dec/2016:11:04:59 +0100] "GET / HTTP/1.1" 304 0 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:49.0) Gecko/20100101 Firefox/49.0"
127.0.0.1 - - [07/Dec/2016:11:05:07 +0100] "GET /taga HTTP/1.1" 404 169 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:49.0) Gecko/20100101 Firefox/49.0"

View File

@ -0,0 +1,25 @@
- name: error
type: group
description: >
Contains fields for the Nginx error logs.
fields:
- name: level
type: keyword
description: >
Error level (e.g. error, critical).
- name: pid
type: long
description: >
Process identifier (PID).
- name: tid
type: long
description: >
Thread identifier.
- name: connection_id
type: long
description: >
Connection identifier.
- name: message
type: text
description: >
The error message

View File

@ -0,0 +1,10 @@
- input_type: log
paths:
{%- for path in paths %}
- {{path}}
{%- endfor %}
exclude_files: [".gz$"]
fields:
source_type: nginx-error
pipeline_id: {{beat.pipeline_id}}

View File

@ -0,0 +1,37 @@
{
"description": "Pipeline for parsing the Nginx error logs",
"processors": [{
"grok": {
"field": "message",
"patterns": [
"%{DATA:nginx.error.time} \\[%{DATA:nginx.error.level}\\] %{NUMBER:nginx.error.pid}#%{NUMBER:nginx.error.tid}: (\\*%{NUMBER:nginx.error.connection_id} )?%{GREEDYDATA:nginx.error.message}"
],
"ignore_missing": true
}
},{
"remove":{
"field": "message"
}
}, {
"rename": {
"field": "@timestamp",
"target_field": "read_timestamp"
}
}, {
"date": {
"field": "nginx.error.time",
"target_field": "@timestamp",
"formats": ["YYYY/MM/dd H:m:s"]
}
}, {
"remove": {
"field": "nginx.error.time"
}
}],
"on_failure" : [{
"set" : {
"field" : "error",
"value" : "{{ _ingest.on_failure_message }}"
}
}]
}

View File

@ -0,0 +1,14 @@
module_version: 1.0
vars:
paths:
default:
- /var/log/nginx/error.log*
os.darwin:
- /usr/local/var/log/nginx/error.log*
os.windows:
- c:/programfiles/nginx/logs/error.log*
ingest_pipeline: ingest/pipeline.json
prospectors:
- config/nginx-error.yml

View File

@ -0,0 +1,2 @@
2016/10/25 14:49:34 [error] 54053#0: *1 open() "/usr/local/Cellar/nginx/1.10.2_1/html/favicon.ico" failed (2: No such file or directory), client: 127.0.0.1, server: localhost, request: "GET /favicon.ico HTTP/1.1", host: "localhost:8080", referrer: "http://localhost:8080/"
2016/10/25 14:50:44 [error] 54053#0: *3 open() "/usr/local/Cellar/nginx/1.10.2_1/html/adsasd" failed (2: No such file or directory), client: 127.0.0.1, server: localhost, request: "GET /adsasd HTTP/1.1", host: "localhost:8080"

View File

@ -0,0 +1,10 @@
- key: syslog
title: "Syslog"
description: >
Module for parsing syslog files.
fields:
- name: syslog
type: group
description: >
Fields from the syslog files.
fields:

View File

@ -0,0 +1,13 @@
{
"hits": 0,
"timeRestore": false,
"description": "",
"title": "Filebeat syslog dashboard",
"uiStateJSON": "{}",
"panelsJSON": "[{\"id\":\"Syslog-events-by-hostname\",\"type\":\"visualization\",\"panelIndex\":1,\"size_x\":8,\"size_y\":4,\"col\":1,\"row\":1},{\"id\":\"Syslog-hostnames-and-processes\",\"type\":\"visualization\",\"panelIndex\":2,\"size_x\":4,\"size_y\":4,\"col\":9,\"row\":1},{\"id\":\"Syslog-system-logs\",\"type\":\"search\",\"panelIndex\":3,\"size_x\":12,\"size_y\":7,\"col\":1,\"row\":5,\"columns\":[\"syslog.system.hostname\",\"syslog.system.program\",\"syslog.system.message\"],\"sort\":[\"@timestamp\",\"desc\"]}]",
"optionsJSON": "{\"darkTheme\":false}",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}}}]}"
}
}

View File

@ -0,0 +1,18 @@
{
"sort": [
"@timestamp",
"desc"
],
"hits": 0,
"description": "",
"title": "Syslog system logs",
"version": 1,
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"index\":\"filebeat-*\",\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647},\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}}}"
},
"columns": [
"syslog.system.hostname",
"syslog.system.program",
"syslog.system.message"
]
}

View File

@ -0,0 +1,11 @@
{
"visState": "{\"title\":\"Syslog events by hostname\",\"type\":\"histogram\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"scale\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{}}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"syslog.system.hostname\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
"description": "",
"title": "Syslog events by hostname",
"uiStateJSON": "{}",
"version": 1,
"savedSearchId": "Syslog-system-logs",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[]}"
}
}

View File

@ -0,0 +1,11 @@
{
"visState": "{\"title\":\"Syslog hostnames and processes\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"bottom\",\"isDonut\":true},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"syslog.system.hostname\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"syslog.system.program\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}",
"description": "",
"title": "Syslog hostnames and processes",
"uiStateJSON": "{}",
"version": 1,
"savedSearchId": "Syslog-system-logs",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{\"filter\":[]}"
}
}

View File

@ -0,0 +1,20 @@
- name: system
type: group
description: >
Contains fields from the syslog system logs.
fields:
- name: timestamp
description: >
The timestamp as read from the syslog message.
- name: hostname
description: >
The hostname as read from the syslog message.
- name: program
description: >
The process name as read from the syslog message.
- name: pid
description: >
The PID of the process that sent the syslog message.
- name: message
description: >
The message in the log line.

View File

@ -0,0 +1,12 @@
- input_type: log
paths:
{%- for path in paths %}
- {{path}}
{%- endfor %}
exclude_files: [".gz$"]
multiline:
pattern: "^\\s"
match: after
fields:
source_type: syslog-system
pipeline_id: {{beat.pipeline_id}}

View File

@ -0,0 +1,39 @@
{
"description": "Pipeline for parsing Syslog messages.",
"processors": [
{
"grok": {
"field": "message",
"patterns": [
"%{SYSLOGTIMESTAMP:syslog.system.timestamp} %{SYSLOGHOST:syslog.system.hostname} %{DATA:syslog.system.program}(?:\\[%{POSINT:syslog.system.pid}\\])?: %{GREEDYMULTILINE:syslog.system.message}"
],
"pattern_definitions" : {
"GREEDYMULTILINE" : "(.|\n)*"
},
"ignore_missing": true
}
},
{
"remove": {
"field": "message"
}
},
{
"date": {
"field": "syslog.system.timestamp",
"target_field": "@timestamp",
"formats": [
"MMM d HH:mm:ss",
"MMM dd HH:mm:ss"
],
"ignore_failure": true
}
}
],
"on_failure" : [{
"set" : {
"field" : "error",
"value" : "{{ _ingest.on_failure_message }}"
}
}]
}

Some files were not shown because too many files have changed in this diff Show More