1 # -*- coding: utf-8 -*-
2 # Description: web log netdata python.d module
5 from base import LogService
8 from os import access, R_OK
9 from os.path import getsize
10 from collections import namedtuple
11 from copy import deepcopy
16 ORDER = ['response_statuses', 'response_codes', 'bandwidth', 'response_time', 'requests_per_url', 'http_method',
17 'requests_per_ipproto', 'clients', 'clients_all']
20 'options': [None, 'Response Codes', 'requests/s', 'responses', 'web_log.response_codes', 'stacked'],
22 ['2xx', '2xx', 'incremental'],
23 ['5xx', '5xx', 'incremental'],
24 ['3xx', '3xx', 'incremental'],
25 ['4xx', '4xx', 'incremental'],
26 ['1xx', '1xx', 'incremental'],
27 ['0xx', 'other', 'incremental'],
28 ['unmatched', 'unmatched', 'incremental']
31 'options': [None, 'Bandwidth', 'KB/s', 'bandwidth', 'web_log.bandwidth', 'area'],
33 ['resp_length', 'received', 'incremental', 1, 1024],
34 ['bytes_sent', 'sent', 'incremental', -1, 1024]
37 'options': [None, 'Processing Time', 'milliseconds', 'timings', 'web_log.response_time', 'area'],
39 ['resp_time_min', 'min', 'incremental', 1, 1000],
40 ['resp_time_max', 'max', 'incremental', 1, 1000],
41 ['resp_time_avg', 'avg', 'incremental', 1, 1000]
44 'options': [None, 'Current Poll Unique Client IPs', 'unique ips', 'clients', 'web_log.clients', 'stacked'],
46 ['unique_cur_ipv4', 'ipv4', 'incremental', 1, 1],
47 ['unique_cur_ipv6', 'ipv6', 'incremental', 1, 1]
50 'options': [None, 'All Time Unique Client IPs', 'unique ips', 'clients', 'web_log.clients_all', 'stacked'],
52 ['unique_tot_ipv4', 'ipv4', 'absolute', 1, 1],
53 ['unique_tot_ipv6', 'ipv6', 'absolute', 1, 1]
56 'options': [None, 'Requests Per HTTP Method', 'requests/s', 'http methods', 'web_log.http_method', 'stacked'],
59 'requests_per_ipproto': {
60 'options': [None, 'Requests Per IP Protocol', 'requests/s', 'ip protocols', 'web_log.requests_per_ipproto',
63 ['req_ipv4', 'ipv4', 'incremental', 1, 1],
64 ['req_ipv6', 'ipv6', 'incremental', 1, 1]
66 'response_statuses': {
67 'options': [None, 'Response Statuses', 'requests/s', 'responses', 'web_log.response_statuses',
70 ['successful_requests', 'success', 'incremental', 1, 1],
71 ['server_errors', 'error', 'incremental', 1, 1],
72 ['redirects', 'redirect', 'incremental', 1, 1],
73 ['bad_requests', 'bad', 'incremental', 1, 1],
74 ['other_requests', 'other', 'incremental', 1, 1]
78 NAMED_URL_PATTERN = namedtuple('URL_PATTERN', ['description', 'pattern'])
81 class Service(LogService):
82 def __init__(self, configuration=None, name=None):
86 # self._get_data = None # will be assigned in 'check' method.
87 # self.order = None # will be assigned in 'create_*_method' method.
88 # self.definitions = None # will be assigned in 'create_*_method' method.
89 # self.detailed_chart = None # will be assigned in 'create_*_method' method.
90 # self.http_method_chart = None # will be assigned in 'create_*_method' method.
92 LogService.__init__(self, configuration=configuration, name=name)
93 # Variables from module configuration file
94 self.log_path = self.configuration.get('path')
95 self.detailed_response_codes = self.configuration.get('detailed_response_codes', True)
96 self.all_time = self.configuration.get('all_time', True)
97 self.url_pattern = self.configuration.get('categories') # dict
98 self.custom_log_format = self.configuration.get('custom_log_format') # dict
100 self.unique_all_time = list() # sorted list of unique IPs
101 self.regex = None # will be assigned in 'find_regex' or 'find_regex_custom' method
102 self.resp_time_func = None # will be assigned in 'find_regex' or 'find_regex_custom' method
103 # if there is no new logs this dict returned to netdata
104 self.data = {'bytes_sent': 0, 'resp_length': 0, 'resp_time_min': 0, 'resp_time_max': 0,
105 'resp_time_avg': 0, 'unique_cur_ipv4': 0, 'unique_cur_ipv6': 0, '2xx': 0,
106 '5xx': 0, '3xx': 0, '4xx': 0, '1xx': 0, '0xx': 0, 'unmatched': 0, 'req_ipv4': 0,
107 'req_ipv6': 0, 'unique_tot_ipv4': 0, 'unique_tot_ipv6': 0, 'successful_requests': 0,
108 'redirects': 0, 'bad_requests': 0, 'server_errors': 0, 'other_requests': 0}
111 if not self.log_path:
112 self.error('log path is not specified')
115 # log_path must be readable
116 if not access(self.log_path, R_OK):
117 self.error('%s not readable or not exist' % self.log_path)
120 # log_path file should not be empty
121 if not getsize(self.log_path):
122 self.error('%s is empty' % self.log_path)
125 # Read last line (or first if there is only one line)
126 with open(self.log_path, 'rb') as logs:
128 while logs.read(1) != b'\n':
132 last_line = logs.readline()
135 last_line = last_line.decode()
136 except UnicodeDecodeError:
138 last_line = last_line.decode(encoding='utf-8')
139 except (TypeError, UnicodeDecodeError) as error:
140 self.error(str(error))
143 # Custom_log_format is preferable
144 if self.custom_log_format:
145 match_dict, log_name, error = self.find_regex_custom(last_line)
147 match_dict, log_name, error = self.find_regex(last_line)
149 if match_dict is None:
150 self.error(str(error))
153 if not (self.regex and self.resp_time_func):
154 self.error('That can not happen, but it happened. "regex" or "resp_time_func" is None')
156 if log_name == 'web_access':
157 self.create_access_charts(match_dict) # Create charts
158 self._get_data = self._get_access_data
159 self.info('Collected data: %s' % list(match_dict.keys()))
162 # If it's not access_logs.. Not used at the moment
165 def find_regex_custom(self, last_line):
167 :param last_line: str: literally last line from log file
168 :return: tuple where:
169 [0]: dict or None: match_dict or None
170 [1]: str or None: log_name or None
171 [2]: str: error description
173 We are here only if "custom_log_format" is in logs. We need to make sure:
174 1. "custom_log_format" is a dict
175 2. "pattern" in "custom_log_format" and pattern is <str> instance
176 3. if "time_multiplier" is in "custom_log_format" it must be <int> instance
178 If all parameters is ok we need to make sure:
179 1. Pattern search is success
180 2. Pattern search contains named subgroups (?P<subgroup_name>) (= "match_dict")
182 If pattern search is success we need to make sure:
183 1. All mandatory keys ['address', 'code', 'bytes_sent', 'method', 'url'] are in "match_dict"
185 If this is True we need to make sure:
186 1. All mandatory key values from "match_dict" have the correct format
187 ("code" is integer, "method" is uppercase word, etc)
189 If non mandatory keys in "match_dict" we need to make sure:
190 1. All non mandatory key values from match_dict ['resp_length', 'resp_time'] have the correct format
191 ("resp_length" is integer or "-", "resp_time" is integer or float)
195 self.custom_log_format.keys()
196 except AttributeError:
197 return None, None, 'Custom log: "custom_log_format" is not a <dict>'
199 pattern = self.custom_log_format.get('pattern')
200 if not (pattern and isinstance(pattern, str)):
201 return None, None, 'Custom log: "pattern" option is not specified or type is not <str>'
203 resp_time_func = self.custom_log_format.get('time_multiplier') or 0
205 if not isinstance(resp_time_func, int):
206 return None, None, 'Custom log: "time_multiplier" is not an integer'
208 regex = re.compile(pattern)
209 match = regex.search(last_line)
211 match_dict = match.groupdict() or None
213 return None, None, 'Custom log: pattern search FAILED'
215 if match_dict is None:
216 return None, None, 'Custom log: search OK but contains no named subgroups' \
217 ' (you need to use ?P<subgroup_name>)'
219 basic_values = {'address', 'method', 'url', 'code', 'bytes_sent'} - set(match_dict)
222 return None, None, 'Custom log: search OK but some mandatory keys (%s) are missing' % list(basic_values)
224 if not re.search(r'[\da-f.:]+', match_dict['address']):
225 return None, None, 'Custom log: can\'t parse "address": %s' % match_dict['address']
226 if not re.search(r'[1-9]\d{2}', match_dict['code']):
227 return None, None, 'Custom log: can\'t parse "code": %s' % match_dict['code']
228 if not re.search(r'[A-Z]+', match_dict['method']):
229 return None, None, 'Custom log: can\'t parse "method": %s' % match_dict['method']
230 if not re.search(r'\d+|-', match_dict['bytes_sent']):
231 return None, None, 'Custom log: can\'t parse "bytes_sent": %s' % match_dict['bytes_sent']
233 if 'resp_length' in match_dict:
234 if not re.search(r'\d+', match_dict.get('resp_length', '')):
235 return None, None, 'Custom log: can\'t parse "resp_length": %s' % match_dict['resp_length']
237 if 'resp_time' in match_dict:
238 if not re.search(r'[\d.]+', match_dict.get('resp_length', '')):
239 return None, None, 'Custom log: can\'t parse "resp_time": %s' % match_dict['resp_time']
241 if '.' in match_dict['resp_time']:
242 self.resp_time_func = lambda time: time * (resp_time_func or 1000000)
244 self.resp_time_func = lambda time: time * (resp_time_func or 1)
247 return match_dict, 'web_access', 'Custom log: we are fine'
249 def find_regex(self, last_line):
251 :param last_line: str: literally last line from log file
252 :return: tuple where:
253 [0]: dict or None: match_dict or None
254 [1]: str or None: log_name or None
255 [2]: str: error description
256 We need to find appropriate pattern for current log file
257 All logic is do a regex search through the string for all predefined patterns
258 until we find something or fail.
260 # REGEX: 1.IPv4 address 2.HTTP method 3. URL 4. Response code
261 # 5. Bytes sent 6. Response length 7. Response process time
262 acs_default = re.compile(r'(?P<address>[\da-f.:]+)'
263 r' -.*?"(?P<method>[A-Z]+)'
265 r' (?P<code>[1-9]\d{2})'
266 r' (?P<bytes_sent>\d+|-)')
268 acs_apache_ext_insert = re.compile(r'(?P<address>[\da-f.:]+)'
269 r' -.*?"(?P<method>[A-Z]+)'
271 r' (?P<code>[1-9]\d{2})'
272 r' (?P<bytes_sent>\d+|-)'
273 r' (?P<resp_length>\d+)'
274 r' (?P<resp_time>\d+) ')
276 acs_apache_ext_append = re.compile(r'(?P<address>[\da-f.:]+)'
277 r' -.*?"(?P<method>[A-Z]+)'
279 r' (?P<code>[1-9]\d{2})'
280 r' (?P<bytes_sent>\d+|-)'
282 r' (?P<resp_length>\d+)'
283 r' (?P<resp_time>\d+)'
286 acs_nginx_ext_insert = re.compile(r'(?P<address>[\da-f.:]+)'
287 r' -.*?"(?P<method>[A-Z]+)'
289 r' (?P<code>[1-9]\d{2})'
290 r' (?P<bytes_sent>\d+)'
291 r' (?P<resp_length>\d+)'
292 r' (?P<resp_time>\d\.\d+) ')
294 acs_nginx_ext_append = re.compile(r'(?P<address>[\da-f.:]+)'
295 r' -.*?"(?P<method>[A-Z]+)'
297 r' (?P<code>[1-9]\d{2})'
298 r' (?P<bytes_sent>\d+)'
300 r' (?P<resp_length>\d+)'
301 r' (?P<resp_time>\d\.\d+)')
307 return time * 1000000
309 r_regex = [acs_apache_ext_insert, acs_apache_ext_append, acs_nginx_ext_insert,
310 acs_nginx_ext_append, acs_default]
311 r_function = [func_usec, func_usec, func_sec, func_sec, func_usec]
312 regex_function = zip(r_regex, r_function)
315 for regex, function in regex_function:
316 match = regex.search(last_line)
319 self.resp_time_func = function
320 match_dict = match.groupdict()
323 return match_dict or None, 'web_access', 'Unknown log format. Plugin still can work for you.' \
324 ' Read about the "custom_log_format" feature in the conf file'
326 def create_access_charts(self, match_dict):
328 :param match_dict: dict: regex.search.groupdict(). Ex. {'address': '127.0.0.1', 'code': '200', 'method': 'GET'}
330 Create additional charts depending on the 'match_dict' keys and configuration file options
331 1. 'time_response' chart is removed if there is no 'resp_time' in match_dict.
332 2. Other stuff is just remove/add chart depending on yes/no in conf
334 def find_job_name(override_name, name):
336 :param override_name: str: 'name' var from configuration file
337 :param name: str: 'job_name' from configuration file
338 :return: str: new job name
339 We need this for dynamic charts. Actually same logic as in python.d.plugin.
341 add_to_name = override_name or name
343 return '_'.join(['web_log', re.sub('\s+', '_', add_to_name)])
347 self.order = ORDER[:]
348 self.definitions = deepcopy(CHARTS)
350 job_name = find_job_name(self.override_name, self.name)
351 self.detailed_chart = 'CHART %s.detailed_response_codes ""' \
352 ' "Detailed Response Codes" requests/s responses' \
353 ' web_log.detailed_response_codes stacked 1 %s\n' % (job_name, self.update_every)
354 self.http_method_chart = 'CHART %s.http_method' \
355 ' "" "Requests Per HTTP Method" requests/s "http methods"' \
356 ' web_log.http_method stacked 2 %s\n' % (job_name, self.update_every)
358 # Remove 'request_time' chart from ORDER if resp_time not in match_dict
359 if 'resp_time' not in match_dict:
360 self.order.remove('response_time')
361 # Remove 'clients_all' chart from ORDER if specified in the configuration
362 if not self.all_time:
363 self.order.remove('clients_all')
364 # Add 'detailed_response_codes' chart if specified in the configuration
365 if self.detailed_response_codes:
366 self.order.append('detailed_response_codes')
367 self.definitions['detailed_response_codes'] = {'options': [None, 'Detailed Response Codes', 'requests/s',
368 'responses', 'web_log.detailed_response_codes',
372 # Add 'requests_per_url' chart if specified in the configuration
374 self.url_pattern = [NAMED_URL_PATTERN(description=k, pattern=re.compile(v)) for k, v
375 in self.url_pattern.items()]
376 self.definitions['requests_per_url'] = {'options': [None, 'Requests Per Url', 'requests/s',
377 'urls', 'web_log.requests_per_url', 'stacked'],
378 'lines': [['other_url', 'other', 'incremental']]}
379 for elem in self.url_pattern:
380 self.definitions['requests_per_url']['lines'].append([elem.description, elem.description,
382 self.data.update({elem.description: 0})
383 self.data.update({'other_url': 0})
385 self.order.remove('requests_per_url')
387 def add_new_dimension(self, dimension, line_list, chart_string, key):
389 :param dimension: str: response status code. Ex.: '202', '499'
390 :param line_list: list: Ex.: ['202', '202', 'incremental']
391 :param chart_string: Current string we need to pass to netdata to rebuild the chart
392 :param key: str: CHARTS dict key (chart name). Ex.: 'response_time'
393 :return: str: new chart string = previous + new dimensions
395 self.data.update({dimension: 0})
396 # SET method check if dim in _dimensions
397 self._dimensions.append(dimension)
398 # UPDATE method do SET only if dim in definitions
399 self.definitions[key]['lines'].append(line_list)
401 chart += "%s %s\n" % ('DIMENSION', ' '.join(line_list))
405 def _get_access_data(self):
408 :return: dict OR None
409 None if _get_raw_data method fails.
410 In all other cases - dict.
412 raw = self._get_raw_data()
416 request_time, unique_current = list(), list()
417 request_counter = {'count': 0, 'sum': 0}
418 ip_address_counter = {'unique_cur_ip': 0}
420 match = self.regex.search(line)
422 match_dict = match.groupdict()
424 code = ''.join([match_dict['code'][0], 'xx'])
427 self.data['0xx'] += 1
428 # detailed response code
429 if self.detailed_response_codes:
430 self._get_data_detailed_response_codes(match_dict['code'])
432 self._get_data_statuses(match_dict['code'])
435 self._get_data_per_url(match_dict['url'])
436 # requests per http method
437 self._get_data_http_method(match_dict['method'])
439 self.data['bytes_sent'] += int(match_dict['bytes_sent'] if '-' not in match_dict['bytes_sent'] else 0)
440 # request processing time and bandwidth received
441 if 'resp_length' in match_dict:
442 self.data['resp_length'] += int(match_dict['resp_length'])
443 if 'resp_time' in match_dict:
444 resp_time = self.resp_time_func(float(match_dict['resp_time']))
445 bisect.insort_left(request_time, resp_time)
446 request_counter['count'] += 1
447 request_counter['sum'] += resp_time
448 # requests per ip proto
449 proto = 'ipv4' if '.' in match_dict['address'] else 'ipv6'
450 self.data['req_' + proto] += 1
452 if address_not_in_pool(self.unique_all_time, match_dict['address'],
453 self.data['unique_tot_ipv4'] + self.data['unique_tot_ipv6']):
454 self.data['unique_tot_' + proto] += 1
455 if address_not_in_pool(unique_current, match_dict['address'], ip_address_counter['unique_cur_ip']):
456 self.data['unique_cur_' + proto] += 1
457 ip_address_counter['unique_cur_ip'] += 1
459 self.data['unmatched'] += 1
463 self.data['resp_time_min'] += int(request_time[0])
464 self.data['resp_time_avg'] += int(round(float(request_counter['sum']) / request_counter['count']))
465 self.data['resp_time_max'] += int(request_time[-1])
468 def _get_data_detailed_response_codes(self, code):
470 :param code: str: CODE from parsed line. Ex.: '202, '499'
472 Calls add_new_dimension method If the value is found for the first time
474 if code not in self.data:
475 chart_string_copy = self.detailed_chart
476 self.detailed_chart = self.add_new_dimension(code, [code, code, 'incremental'],
477 chart_string_copy, 'detailed_response_codes')
480 def _get_data_http_method(self, method):
482 :param method: str: METHOD from parsed line. Ex.: 'GET', 'POST'
484 Calls add_new_dimension method If the value is found for the first time
486 if method not in self.data:
487 chart_string_copy = self.http_method_chart
488 self.http_method_chart = self.add_new_dimension(method, [method, method, 'incremental'],
489 chart_string_copy, 'http_method')
490 self.data[method] += 1
492 def _get_data_per_url(self, url):
494 :param url: str: URL from parsed line
496 Scan through string looking for the first location where patterns produce a match for all user
500 for elem in self.url_pattern:
501 if elem.pattern.search(url):
502 self.data[elem.description] += 1
506 self.data['other_url'] += 1
508 def _get_data_statuses(self, code):
510 :param code: str: response status code. Ex.: '202', '499'
514 if code_class == '2' or code == '304' or code_class == '1':
515 self.data['successful_requests'] += 1
516 elif code_class == '3':
517 self.data['redirects'] += 1
518 elif code_class == '4':
519 self.data['bad_requests'] += 1
520 elif code_class == '5':
521 self.data['server_errors'] += 1
523 self.data['other_requests'] += 1
526 def address_not_in_pool(pool, address, pool_size):
528 :param pool: list of ip addresses
529 :param address: ip address
530 :param pool_size: current size of pool
531 :return: True if address not in pool. False if address in pool
533 index = bisect.bisect_left(pool, address)
534 if index < pool_size:
535 if pool[index] == address:
538 bisect.insort_left(pool, address)
541 bisect.insort_left(pool, address)