--- /dev/null
+# netdata python.d.plugin configuration for nginx log
+#
+# This file is in YaML format. Generally the format is:
+#
+# name: value
+#
+# There are 2 sections:
+# - global variables
+# - one or more JOBS
+#
+# JOBS allow you to collect values from multiple sources.
+# Each source will have its own set of charts.
+#
+# JOB parameters have to be indented (using spaces only, example below).
+
+# ----------------------------------------------------------------------
+# Global Variables
+# These variables set the defaults for all JOBs, however each JOB
+# may define its own, overriding the defaults.
+
+# update_every sets the default data collection frequency.
+# If unset, the python.d.plugin default is used.
+# update_every: 1
+
+# priority controls the order of charts at the netdata dashboard.
+# Lower numbers move the charts towards the top of the page.
+# If unset, the default for python.d.plugin is used.
+# priority: 60000
+
+# retries sets the number of retries to be made in case of failures.
+# If unset, the default for python.d.plugin is used.
+# Attempts to restore the service are made once every update_every
+# and only if the module has collected values in the past.
+# retries: 5
+
+# ----------------------------------------------------------------------
+# JOBS (data collection sources)
+#
+# The default JOBS share the same *name*. JOBS with the same name
+# are mutually exclusive. Only one of them will be allowed running at
+# any time. This allows autodetection to try several alternatives and
+# pick the one that works.
+#
+# Any number of jobs is supported.
+#
+# All python.d.plugin JOBS (for all its modules) support a set of
+# predefined parameters. These are:
+#
+# job_name:
+# name: myname # the JOB's name as it will appear at the
+# # dashboard (by default is the job_name)
+# # JOBs sharing a name are mutually exclusive
+# update_every: 1 # the JOB's data collection frequency
+# priority: 60000 # the JOB's order on the dashboard
+# retries: 5 # the JOB's number of restoration attempts
+#
+# Additionally to the above, nginx_log also supports the following:
+#
+# path: 'PATH' # the path to nginx's access.log
+#
+
+# ----------------------------------------------------------------------
+# AUTO-DETECTION JOBS
+# only one of them will run (they have the same name)
+
+nginx_log:
+ name: 'local'
+ path: '/var/log/nginx/access.log'
+
+nginx_log2:
+ name: 'local'
+ path: '/var/log/nginx/nginx-access.log'
--- /dev/null
+# -*- coding: utf-8 -*-
+# Description: nginx log netdata python.d module
+# Author: Pawel Krupa (paulfantom)
+
+from base import LogService
+import re
+
+priority = 60000
+retries = 60
+# update_every = 3
+
+ORDER = ['codes']
+CHARTS = {
+ 'codes': {
+ 'options': [None, 'nginx status codes', 'requests/s', 'requests', 'nginx_log.codes', 'stacked'],
+ 'lines': [
+ ["20X", None, "incremental"],
+ ["30X", None, "incremental"],
+ ["40X", None, "incremental"],
+ ["50X", None, "incremental"]
+ ]}
+}
+
+
+class Service(LogService):
+ def __init__(self, configuration=None, name=None):
+ LogService.__init__(self, configuration=configuration, name=name)
+ if len(self.log_path) == 0:
+ self.log_path = "/var/log/nginx/access.log"
+ self.order = ORDER
+ self.definitions = CHARTS
+ pattern = r'" ([0-9]{3}) ?'
+ #pattern = r'(?:" )([0-9][0-9][0-9]) ?'
+ self.regex = re.compile(pattern)
+
+ def _get_data(self):
+ """
+ Parse new log lines
+ :return: dict
+ """
+ data = {'20X': 0,
+ '30X': 0,
+ '40X': 0,
+ '50X': 0}
+ try:
+ raw = self._get_raw_data()
+ if raw is None:
+ return None
+ elif not raw:
+ return data
+ except (ValueError, AttributeError):
+ return None
+
+ regex = self.regex
+ for line in raw:
+ code = regex.search(line)
+ beginning = code.group(1)[0]
+
+ if beginning == '2':
+ data["20X"] += 1
+ elif beginning == '3':
+ data["30X"] += 1
+ elif beginning == '4':
+ data["40X"] += 1
+ elif beginning == '5':
+ data["50X"] += 1
+
+ return data
+