priority = 60000
retries = 60
-ORDER = ['response_codes', 'bandwidth', 'response_time', 'requests_per_url', 'http_method', 'requests_per_ipproto',
- 'clients', 'clients_all']
+ORDER = ['response_statuses', 'response_codes', 'bandwidth', 'response_time', 'requests_per_url', 'http_method',
+ 'requests_per_ipproto', 'clients', 'clients_all']
CHARTS = {
'response_codes': {
'options': [None, 'Response Codes', 'requests/s', 'responses', 'web_log.response_codes', 'stacked'],
'lines': [
['req_ipv4', 'ipv4', 'incremental', 1, 1],
['req_ipv6', 'ipv6', 'incremental', 1, 1]
+ ]},
+ 'response_statuses': {
+ 'options': [None, 'Response Statuses', 'requests/s', 'responses', 'web_log.response_statuses',
+ 'stacked'],
+ 'lines': [
+ ['successful_requests', 'successful', 'incremental', 1, 1],
+ ['redirects', None, 'incremental', 1, 1],
+ ['bad_requests', 'bad', 'incremental', 1, 1],
+ ['server_errors', None, 'incremental', 1, 1]
]}
}
# sorted list of unique IPs
self.unique_all_time = list()
# if there is no new logs this dict returned to netdata
- self.data = {'bytes_sent': 0, 'resp_length': 0, 'resp_time_min': 0,
- 'resp_time_max': 0, 'resp_time_avg': 0, 'unique_cur_ipv4': 0,
- 'unique_cur_ipv6': 0, '2xx': 0, '5xx': 0, '3xx': 0, '4xx': 0,
- '1xx': 0, '0xx': 0, 'unmatched': 0, 'req_ipv4': 0, 'req_ipv6': 0,
- 'unique_tot_ipv4': 0, 'unique_tot_ipv6': 0}
+ self.data = {'bytes_sent': 0, 'resp_length': 0, 'resp_time_min': 0, 'resp_time_max': 0,
+ 'resp_time_avg': 0, 'unique_cur_ipv4': 0, 'unique_cur_ipv6': 0, '2xx': 0,
+ '5xx': 0, '3xx': 0, '4xx': 0, '1xx': 0, '0xx': 0, 'unmatched': 0, 'req_ipv4': 0,
+ 'req_ipv6': 0, 'unique_tot_ipv4': 0, 'unique_tot_ipv6': 0, 'successful_requests': 0,
+ 'redirects': 0, 'bad_requests': 0, 'server_errors': 0}
def check(self):
if not self.log_path:
self.error('Can\'t parse %s' % self.log_path)
return False
- if regex_name.startswith('access_'):
+ if regex_name.startswith('acs_'):
self.create_access_charts(regex_name)
- if regex_name == 'access_default':
+ if regex_name == 'acs_default':
self.info('Not all data collected. You need to modify LogFormat.')
self._get_data = self._get_access_data
self.info('Used regex: %s' % regex_name)
"""
# REGEX: 1.IPv4 address 2.HTTP method 3. URL 4. Response code
# 5. Bytes sent 6. Response length 7. Response process time
- access_default = re.compile(r'([\da-f.:]+)'
- r' -.*?"([A-Z]+)'
- r' (.*?)"'
- r' ([1-9]\d{2})'
- r' (\d+)')
-
- access_apache_ext = re.compile(r'([\da-f.:]+)'
- r' -.*?"([A-Z]+)'
- r' (.*?)"'
- r' ([1-9]\d{2})'
- r' (\d+)'
- r' (\d+)'
- r' (\d+) ')
-
- access_nginx_ext = re.compile(r'([\da-f.:]+)'
- r' -.*?"([A-Z]+)'
- r' (.*?)"'
- r' ([1-9]\d{2})'
- r' (\d+)'
- r' (\d+)'
- r' ([\d.]+) ')
-
- regex_function = zip([access_apache_ext, access_nginx_ext, access_default],
- [lambda x: x, lambda x: x * 1000000, lambda x: x],
- ['access_apache_ext', 'access_nginx_ext', 'access_default'])
+ acs_default = re.compile(r'([\da-f.:]+)'
+ r' -.*?"([A-Z]+)'
+ r' (.*?)"'
+ r' ([1-9]\d{2})'
+ r' (\d+|-)')
+
+ acs_apache_ext_insert = re.compile(r'([\da-f.:]+)'
+ r' -.*?"([A-Z]+)'
+ r' (.*?)"'
+ r' ([1-9]\d{2})'
+ r' (\d+|-)'
+ r' (\d+)'
+ r' (\d+) ')
+
+ acs_apache_ext_append = re.compile(r'([\da-f.:]+)'
+ r' -.*?"([A-Z]+)'
+ r' (.*?)"'
+ r' ([1-9]\d{2})'
+ r' (\d+|-)'
+ r' .*?'
+ r' (\d+)'
+ r' (\d+)'
+ r'(?: |$)')
+
+ acs_nginx_ext_insert = re.compile(r'([\da-f.:]+)'
+ r' -.*?"([A-Z]+)'
+ r' (.*?)"'
+ r' ([1-9]\d{2})'
+ r' (\d+)'
+ r' (\d+)'
+ r' (\d\.\d+) ')
+
+ acs_nginx_ext_append = re.compile(r'([\da-f.:]+)'
+ r' -.*?"([A-Z]+)'
+ r' (.*?)"'
+ r' ([1-9]\d{2})'
+ r' (\d+)'
+ r' .*?'
+ r' (\d+)'
+ r' (\d\.\d+)')
+
+ r_regex = [acs_apache_ext_insert, acs_apache_ext_append, acs_nginx_ext_insert,
+ acs_nginx_ext_append, acs_default]
+ r_function = [lambda x: x, lambda x: x, lambda x: x * 1000000, lambda x: x * 1000000, lambda x: x]
+ r_name = ['acs_apache_ext_insert', 'acs_apache_ext_append', 'acs_nginx_ext_insert',
+ 'acs_nginx_ext_append', 'acs_default']
+ regex_function_name = zip(r_regex, r_function, r_name)
+
regex_name = None
- for regex, function, name in regex_function:
+ for regex, function, name in regex_function_name:
if regex.search(last_line):
self.regex = regex
self.resp_time_func = function
' web_log.http_method stacked 2 %s\n' % (job_name, self.update_every)
# Remove 'request_time' chart from ORDER if request_time not in logs
- if regex_name == 'access_default':
+ if regex_name == 'acs_default':
self.order.remove('response_time')
# Remove 'clients_all' chart from ORDER if specified in the configuration
if not self.all_time:
# detailed response code
if self.detailed_response_codes:
self._get_data_detailed_response_codes(match_dict['code'])
+ # response statuses
+ self._get_data_statuses(match_dict['code'])
# requests per url
if self.url_pattern:
self._get_data_per_url(match_dict['url'])
# requests per http method
self._get_data_http_method(match_dict['method'])
# bandwidth sent
- self.data['bytes_sent'] += int(match_dict['sent'])
+ self.data['bytes_sent'] += int(match_dict['sent'] if '-' not in match_dict['sent'] else 0)
# request processing time and bandwidth received
if match_dict['resp_length'] and match_dict['resp_time']:
self.data['resp_length'] += int(match_dict['resp_length'])
ip_address_counter['unique_cur_ip'] += 1
else:
self.data['unmatched'] += 1
+
# timings
if request_time:
self.data['resp_time_min'] += int(request_time[0])
if not match:
self.data['other_url'] += 1
+ def _get_data_statuses(self, code):
+ """
+ :param code: str: response status code. Ex.: '202', '499'
+ :return:
+ """
+ if code[0] == '2' or code == '304' or code[0] == '1':
+ self.data['successful_requests'] += 1
+ elif code[0] == '3':
+ self.data['redirects'] += 1
+ elif code[0] == '4':
+ self.data['bad_requests'] += 1
+ elif code[0] == '5':
+ self.data['server_errors'] += 1
+
def address_not_in_pool(pool, address, pool_size):
"""