import time
try:
assert sys.version_info >= (3,1)
- import configparser
import importlib.machinery
except AssertionError:
- sys.stderr.write('Not supported python version. Needed python >= 3.1\n')
+ sys.stderr.write('python.d.plugin: Not supported python version. Needed python >= 3.1\n')
sys.stdout.write('DISABLE\n')
sys.exit(1)
+try:
+ import yaml
+except ImportError:
+ sys.stderr.write('python.d.plugin: Cannot find yaml library\n')
+ sys.stdout.write('DISABLE\n')
+ sys.exit(1)
+
+MODULE_EXTENSION = ".chart.py"
+BASE_CONFIG = {'update_every' : 10,
+ 'priority': 12345,
+ 'retries' : 0}
class PythonCharts(object):
modules_configs='../conf.d/',
modules_disabled=[]):
self.first_run = True
- self.default_priority = 90000
# set configuration directory
self.configs = modules_configs
# load modules
- modules = self._load_modules(modules_path,modules)
-
- # check if loaded modules are on disabled modules list
- self.modules = [ m for m in modules if m.__name__ not in modules_disabled ]
-
+ loaded_modules = self._load_modules(modules_path,modules, modules_disabled)
+
# load configuration files
- self._load_configs()
+ configured_modules = self._load_configs(loaded_modules)
- # set timetable dict (last execution, next execution and frequency)
- # set priorities
- self.timetable = {}
- freq = 1
- for m in self.modules:
- try:
- m.priority = int(m.priority)
- except (AttributeError,ValueError):
- m.priority = self.default_priority
-
- if interval is None:
+ # good economy and prosperity:
+ self.jobs = self._create_jobs(configured_modules)
+ if DEBUG_FLAG and interval is not None:
+ for job in self.jobs:
+ job._create_timetable(interval)
+
+
+ def _create_jobs(self,modules):
+ # module store a definition of Service class
+ # module store configuration in module.config
+ # configs are list of dicts or a dict
+ # one dict is one service
+ # iterate over list of modules and inside one loop iterate over configs
+ jobs = []
+ for module in modules:
+ for name in module.config:
+ # register a new job
+ conf = module.config[name]
try:
- freq = int(m.update_every)
- except (AttributeError, ValueError):
- freq = 1
-
- now = time.time()
- self.timetable[m.__name__] = {'last' : now,
- 'next' : now - (now % freq) + freq,
- 'freq' : freq}
+ job = module.Service(configuration=conf, name=name)
+ except Exception as e:
+ debug(module.__name__ +
+ ": Couldn't start job named " +
+ str(name) +
+ ": " +
+ str(e))
+ return None
+ else:
+ # set execution_name (needed to plot run time graphs)
+ job.execution_name = module.__name__
+ if name is not None:
+ job.execution_name += "_" + name
+ jobs.append(job)
+
+ return [j for j in jobs if j is not None]
- def _import_plugin(self, path, name=None):
+ def _import_module(self, path, name=None):
# try to import module using only its path
if name is None:
name = path.split('/')[-1]
- if name[-9:] != ".chart.py":
+ if name[-len(MODULE_EXTENSION):] != MODULE_EXTENSION:
return None
- name = name[:-9]
+ name = name[:-len(MODULE_EXTENSION)]
try:
return importlib.machinery.SourceFileLoader(name, path).load_module()
except Exception as e:
debug(str(e))
return None
- def _load_modules(self, path, modules):
+ def _load_modules(self, path, modules, disabled):
# check if plugin directory exists
if not os.path.isdir(path):
debug("cannot find charts directory ", path)
loaded = []
if len(modules) > 0:
for m in modules:
- mod = self._import_plugin(path + m + ".chart.py")
+ if m in disabled:
+ continue
+ mod = self._import_module(path + m + MODULE_EXTENSION)
if mod is not None:
loaded.append(mod)
else:
# scan directory specified in path and load all modules from there
names = os.listdir(path)
for mod in names:
- m = self._import_plugin(path + mod)
+ if mod.strip(MODULE_EXTENSION) in disabled:
+ debug("disabling:",mod.strip(MODULE_EXTENSION))
+ continue
+ m = self._import_module(path + mod)
if m is not None:
debug("loading chart: '" + path + mod + "'")
loaded.append(m)
return loaded
- def _load_configs(self):
- # function modifies every loaded module in self.modules
- for m in self.modules:
- configfile = self.configs + m.__name__ + ".conf"
+ def _load_configs(self,modules):
+ # function loads configuration files to modules
+ for mod in modules:
+ configfile = self.configs + mod.__name__ + ".conf"
if os.path.isfile(configfile):
debug("loading chart options: '" + configfile + "'")
- for k, v in read_config(configfile).items():
- try:
- setattr(m, k, v)
- except AttributeError:
- self._disable_module(m,"misbehaving having bad configuration")
+ try:
+ setattr(mod,
+ 'config',
+ self._parse_config(mod,read_config(configfile)))
+ except Exception as e:
+ debug("something went wrong while loading configuration",e)
else:
- debug(m.__name__ +
+ debug(mod.__name__ +
": configuration file '" +
configfile +
"' not found. Using defaults.")
+ return modules
+
+ def _parse_config(self,module,config):
+ # get default values
+ defaults = {}
+ for key in BASE_CONFIG:
+ try:
+ # get defaults from module config
+ defaults[key] = int(config.pop(key))
+ except (KeyError,ValueError):
+ try:
+ # get defaults from module source code
+ defaults[key] = getattr(module, key)
+ except (KeyError,ValueError):
+ # if above failed, get defaults from global dict
+ defaults[key] = BASE_CONFIG[key]
+
+ # check if there are dict in config dict
+ many_jobs = False
+ for name in config:
+ if type(config[name]) is dict:
+ many_jobs = True
+ break
+
+ # assign variables needed by supervisor to every job configuration
+ if many_jobs:
+ for name in config:
+ for key in defaults:
+ if key not in config[name]:
+ config[name][key] = defaults[key]
+ # if only one job is needed, values doesn't have to be in dict (in YAML)
+ else:
+ config = {None: config.copy()}
+ config[None].update(defaults)
+
+ # return dictionary of jobs where every job has BASE_CONFIG variables
+ return config
- def _disable_module(self, mod, reason=None):
- # modifies self.modules
- self.modules.remove(mod)
- del self.timetable[mod.__name__]
+ def _stop(self, job, reason=None): #FIXME test if Service has __name__
+ # modifies self.jobs
+ self.jobs.remove(job)
if reason is None:
return
elif reason[:3] == "no ":
debug("chart '" +
- mod.__name__,
+ job.execution_name,
"' does not seem to have " +
reason[3:] +
"() function. Disabling it.")
elif reason[:7] == "failed ":
debug("chart '" +
- mod.__name__ + "' " +
+ job.execution_name + "' " +
reason[7:] +
"() function reports failure.")
elif reason[:13] == "configuration":
- debug(mod.__name__,
+ debug(job.execution_name,
"configuration file '" +
self.configs +
- mod.__name__ +
+ job.execution_name +
".conf' not found. Using defaults.")
elif reason[:11] == "misbehaving":
- debug(mod.__name__, "is "+reason)
+ debug(job.execution_name, "is "+reason)
def check(self):
- # try to execute check() on every loaded module
- for mod in self.modules:
+ # try to execute check() on every job
+ for job in self.jobs:
try:
- if not mod.check():
- self._disable_module(mod, "failed check")
+ if not job.check():
+ self._stop(job, "failed check")
except AttributeError:
- self._disable_module(mod, "no check")
+ self._stop(job, "no check")
except (UnboundLocalError, Exception) as e:
- self._disable_module(mod, "misbehaving. Reason: " + str(e))
+ self._stop(job, "misbehaving. Reason: " + str(e))
def create(self):
- # try to execute create() on every loaded module
- for mod in self.modules:
+ # try to execute create() on every job
+ for job in self.jobs:
try:
- if not mod.create():
- self._disable_module(mod, "failed create")
+ if not job.create():
+ self._stop(job, "failed create")
else:
- chart = mod.__name__
+ chart = job.execution_name
sys.stdout.write(
"CHART netdata.plugin_pythond_" +
chart +
" '' 'Execution time for " +
chart +
" plugin' 'milliseconds / run' python.d netdata.plugin_python area 145000 " +
- str(self.timetable[mod.__name__]['freq']) +
+ str(job.timetable['freq']) +
'\n')
sys.stdout.write("DIMENSION run_time 'run time' absolute 1 1\n\n")
sys.stdout.flush()
except AttributeError:
- self._disable_module(mod, "no create")
+ self._stop(job, "no create")
except (UnboundLocalError, Exception) as e:
- self._disable_module(mod, "misbehaving. Reason: " + str(e))
+ self._stop(job, "misbehaving. Reason: " + str(e))
- def _update_module(self, mod):
- # try to execute update() on every module and draw run time graph
+ def _update_job(self, job):
+ # try to execute update() on every job and draw run time graph
t_start = time.time()
- # check if it is time to execute module update() function
- if self.timetable[mod.__name__]['next'] > t_start:
+ # check if it is time to execute job update() function
+ if job.timetable['next'] > t_start:
return
try:
if self.first_run:
since_last = 0
else:
- since_last = int((t_start - self.timetable[mod.__name__]['last']) * 1000000)
- if not mod.update(since_last):
- self._disable_module(mod, "update failed")
+ since_last = int((t_start - job.timetable['last']) * 1000000)
+ if not job.update(since_last):
+ self._stop(job, "update failed")
return
except AttributeError:
- self._disable_module(mod, "no update")
+ self._stop(job, "no update")
return
except (UnboundLocalError, Exception) as e:
- self._disable_module(mod, "misbehaving. Reason: " + str(e))
+ self._stop(job, "misbehaving. Reason: " + str(e))
return
t_end = time.time()
- self.timetable[mod.__name__]['next'] = t_end - (t_end % self.timetable[mod.__name__]['freq']) + self.timetable[mod.__name__]['freq']
+ job.timetable['next'] = t_end - (t_end % job.timetable['freq']) + job.timetable['freq']
# draw performance graph
if self.first_run:
dt = 0
else:
- dt = int((t_end - self.timetable[mod.__name__]['last']) * 1000000)
- sys.stdout.write("BEGIN netdata.plugin_pythond_"+mod.__name__+" "+str(since_last)+'\n')
+ dt = int((t_end - job.timetable['last']) * 1000000)
+ sys.stdout.write("BEGIN netdata.plugin_pythond_"+job.execution_name+" "+str(since_last)+'\n')
sys.stdout.write("SET run_time = " + str(int((t_end - t_start) * 1000)) + '\n')
sys.stdout.write("END\n")
sys.stdout.flush()
- self.timetable[mod.__name__]['last'] = t_start
+ job.timetable['last'] = t_start
self.first_run = False
def update(self):
while True:
t_begin = time.time()
next_runs = []
- for mod in self.modules:
- self._update_module(mod)
+ for job in self.jobs:
+ self._update_job(job)
try:
- next_runs.append(self.timetable[mod.__name__]['next'])
+ next_runs.append(job.timetable['next'])
except KeyError:
pass
if len(next_runs) == 0:
def read_config(path):
- config = configparser.ConfigParser()
- config_str = ""
try:
- with open(path, 'r', encoding="utf_8") as f:
- config_str = '[config]\n' + f.read()
+ with open(path, 'r') as stream:
+ config = yaml.load(stream)
except IsADirectoryError:
debug(str(path), "is a directory")
- return
- try:
- config.read_string(config_str)
- except configparser.ParsingError as e:
- debug("Malformed configuration file: "+str(e))
- return
- return dict(config.items('config'))
+ return None
+ except yaml.YAMLError as e:
+ debug(str(path), "is malformed:", e)
+ return None
+ return config
def debug(*args):
try:
conf = read_config(configfile)
try:
- if str(conf['enable']) == "no":
+ if str(conf['enable']) is False:
debug("disabled in configuration file")
sys.stdout.write("DISABLE\n")
sys.exit(1)
pass
try:
modules_conf = conf['plugins_config_dir']
- except (KeyError):
+ except KeyError:
modules_conf = config_dir + "python.d/" # default configuration directory
try:
modules_dir = conf['plugins_dir']
- except (KeyError):
+ except KeyError:
modules_dir = main_dir.replace("plugins.d", "python.d")
try:
- interval = int(conf['interval'])
- except (KeyError, TypeError):
+ interval = conf['interval']
+ except KeyError:
pass # use default interval from NETDATA_UPDATE_EVERY
try:
- DEBUG_FLAG = bool(conf['debug'])
- except (KeyError, TypeError):
+ DEBUG_FLAG = conf['debug']
+ except KeyError:
pass
for k, v in conf.items():
if k in ("plugins_config_dir", "plugins_dir", "interval", "debug"):
continue
- if v == 'no':
+ if v is False:
disabled.append(k)
except FileNotFoundError:
modules_conf = config_dir
+# Description: MySQL netdata python.d plugin
+# Author: Pawel Krupa (paulfantom)
+
NAME = "mysql.chart.py"
-from sys import stderr
+import sys
+
+# import 3rd party library to handle MySQL communication
try:
import MySQLdb
- stderr.write(NAME + ": using MySQLdb")
# https://github.com/PyMySQL/mysqlclient-python
+ sys.stderr.write(NAME + ": using MySQLdb\n")
except ImportError:
try:
import pymysql as MySQLdb
# https://github.com/PyMySQL/PyMySQL
- stderr.write(NAME + ": using pymysql")
+ sys.stderr.write(NAME + ": using pymysql\n")
except ImportError:
- stderr.write(NAME + ": You need to install PyMySQL module to use mysql.chart.py plugin\n")
+ sys.stderr.write(NAME + ": You need to install MySQLdb or PyMySQL module to use mysql.chart.py plugin\n")
+ raise ImportError
+
+from base import BaseService
-config = [
- {
- 'name' : 'local',
+# default configuration (overriden by python.d.plugin)
+# FIXME change password
+config = {
+ 'local': {
'user' : 'root',
'password' : '',
- 'socket' : '/var/run/mysqld/mysqld.sock'
+ 'socket' : '/var/run/mysqld/mysqld.sock',
+ 'update_every' : 3,
+ 'retries' : 4,
+ 'priority' : 100
}
-]
+}
+# default module values (can be overridden per job in `config`)
update_every = 3
priority = 90000
+retries = 7
-#query = "SHOW GLOBAL STATUS WHERE value REGEX '^[0-9]'"
+# query executed on MySQL server
QUERY = "SHOW GLOBAL STATUS"
+
+# charts order (can be overriden if you want less charts, or different order)
ORDER = ['net',
'queries',
'handlers',
'binlog_stmt_cache',
'connection_errors']
+# charts definitions in format:
+# CHARTS = {
+# 'chart_name_in_netdata': (
+# "parameters defining chart (passed to CHART statement)",
+# [ # dimensions (lines) definitions
+# ("dimension_name", "dimension parameters (passed to DIMENSION statement)", "additional parameter (optional)")
+# ])
+# }
+
CHARTS = {
'net' : (
"'' 'mysql Bandwidth' 'kilobits/s' bandwidth mysql.net area",
- (
+ [
("Bytes_received", "in incremental 8 1024"),
("Bytes_sent", "out incremental -8 1024")
- )),
+ ]),
'queries' : (
"'' 'mysql Queries' 'queries/s' queries mysql.queries line",
- (
+ [
("Queries", "queries incremental 1 1"),
("Questions", "questions incremental 1 1"),
("Slow_queries", "slow_queries incremental -1 1")
- )),
+ ]),
'handlers' : (
"'' 'mysql Handlers' 'handlers/s' handlers mysql.handlers line",
- (
+ [
("Handler_commit", "commit incremental 1 1"),
("Handler_delete", "delete incremental 1 1"),
("Handler_prepare", "prepare incremental 1 1"),
("Handler_savepoint_rollback", "savepoint_rollback incremental 1 1"),
("Handler_update", "update incremental 1 1"),
("Handler_write", "write incremental 1 1")
- )),
+ ]),
'table_locks' : (
"'' 'mysql Tables Locks' 'locks/s' locks mysql.table_locks line",
- (
+ [
("Table_locks_immediate", "immediate incremental 1 1"),
("Table_locks_waited", "waited incremental -1 1")
- )),
+ ]),
'join_issues' : (
"'' 'mysql Select Join Issues' 'joins/s' issues mysql.join_issues line",
- (
+ [
("Select_full_join", "full_join incremental 1 1"),
("Select_full_range_join", "full_range_join incremental 1 1"),
("Select_range", "range incremental 1 1"),
("Select_range_check", "range_check incremental 1 1"),
("Select_scan", "scan incremental 1 1"),
- )),
+ ]),
'sort_issues' : (
"'' 'mysql Sort Issues' 'issues/s' issues mysql.sort.issues line",
- (
+ [
("Sort_merge_passes", "merge_passes incremental 1 1"),
("Sort_range", "range incremental 1 1"),
("Sort_scan", "scan incremental 1 1"),
- )),
+ ]),
'tmp' : (
"'' 'mysql Tmp Operations' 'counter' temporaries mysql.tmp line",
- (
+ [
("Created_tmp_disk_tables", "disk_tables incremental 1 1"),
("Created_tmp_files", "files incremental 1 1"),
("Created_tmp_tables", "tables incremental 1 1"),
- )),
+ ]),
'connections' : (
"'' 'mysql Connections' 'connections/s' connections mysql.connections line",
- (
+ [
("Connections", "all incremental 1 1"),
("Aborted_connects", "aborted incremental 1 1"),
- )),
+ ]),
'binlog_cache' : (
"'' 'mysql Binlog Cache' 'transactions/s' binlog mysql.binlog_cache line",
- (
+ [
("Binlog_cache_disk_use", "disk incremental 1 1"),
("Binlog_cache_use", "all incremental 1 1"),
- )),
+ ]),
'threads' : (
"'' 'mysql Threads' 'threads' threads mysql.threads line",
- (
+ [
("Threads_connected", "connected absolute 1 1"),
("Threads_created", "created incremental 1 1"),
("Threads_cached", "cached absolute -1 1"),
("Threads_running", "running absolute 1 1"),
- )),
+ ]),
'thread_cache_misses' : (
"'' 'mysql Threads Cache Misses' 'misses' threads mysql.thread_cache_misses area",
- (
+ [
("Thread_cache_misses", "misses misses absolute 1 100"),
- )),
+ ]),
'innodb_io' : (
"'' 'mysql InnoDB I/O Bandwidth' 'kilobytes/s' innodb mysql.innodb_io area",
- (
+ [
("Innodb_data_read", "read incremental 1 1024"),
("Innodb_data_written", "write incremental -1 1024"),
- )),
+ ]),
'innodb_io_ops' : (
"'' 'mysql InnoDB I/O Operations' 'operations/s' innodb mysql.innodb_io_ops line",
- (
+ [
("Innodb_data_reads", "reads incremental 1 1"),
("Innodb_data_writes", "writes incremental -1 1"),
("Innodb_data_fsyncs", "fsyncs incremental 1 1"),
- )),
+ ]),
'innodb_io_pending_ops' : (
"'' 'mysql InnoDB Pending I/O Operations' 'operations' innodb mysql.innodb_io_pending_ops line",
- (
+ [
("Innodb_data_pending_reads", "reads absolute 1 1"),
("Innodb_data_pending_writes", "writes absolute -1 1"),
("Innodb_data_pending_fsyncs", "fsyncs absolute 1 1"),
- )),
+ ]),
'innodb_log' : (
"'' 'mysql InnoDB Log Operations' 'operations/s' innodb mysql.innodb_log line",
- (
+ [
("Innodb_log_waits", "waits incremental 1 1"),
("Innodb_log_write_requests", "write_requests incremental -1 1"),
("Innodb_log_writes", "incremental -1 1"),
- )),
+ ]),
'innodb_os_log' : (
"'' 'mysql InnoDB OS Log Operations' 'operations' innodb mysql.innodb_os_log line",
- (
+ [
("Innodb_os_log_fsyncs", "fsyncs incremental 1 1"),
("Innodb_os_log_pending_fsyncs", "pending_fsyncs absolute 1 1"),
("Innodb_os_log_pending_writes", "pending_writes absolute -1 1"),
- )),
+ ]),
'innodb_os_log_io' : (
"'' 'mysql InnoDB OS Log Bandwidth' 'kilobytes/s' innodb mysql.innodb_os_log_io area",
- (
+ [
("Innodb_os_log_written", "write incremental -1 1024"),
- )),
+ ]),
'innodb_cur_row_lock' : (
"'' 'mysql InnoDB Current Row Locks' 'operations' innodb mysql.innodb_cur_row_lock area",
- (
+ [
("Innodb_row_lock_current_waits", "current_waits absolute 1 1"),
- )),
+ ]),
'innodb_rows' : (
"'' 'mysql InnoDB Row Operations' 'operations/s' innodb mysql.innodb_rows area",
- (
+ [
("Innodb_rows_inserted", "read incremental 1 1"),
("Innodb_rows_read", "deleted incremental -1 1"),
("Innodb_rows_updated", "inserted incremental 1 1"),
("Innodb_rows_deleted", "updated incremental -1 1"),
- )),
+ ]),
'innodb_buffer_pool_pages' : (
"'' 'mysql InnoDB Buffer Pool Pages' 'pages' innodb mysql.innodb_buffer_pool_pages line",
- (
+ [
("Innodb_buffer_pool_pages_data", "data absolute 1 1"),
("Innodb_buffer_pool_pages_dirty", "dirty absolute -1 1"),
("Innodb_buffer_pool_pages_free", "free absolute 1 1"),
("Innodb_buffer_pool_pages_flushed", "flushed incremental -1 1"),
("Innodb_buffer_pool_pages_misc", "misc absolute -1 1"),
("Innodb_buffer_pool_pages_total", "total absolute 1 1"),
- )),
+ ]),
'innodb_buffer_pool_bytes' : (
"'' 'mysql InnoDB Buffer Pool Bytes' 'MB' innodb mysql.innodb_buffer_pool_bytes area",
- (
+ [
("Innodb_buffer_pool_bytes_data", "data absolute 1"),
("Innodb_buffer_pool_bytes_dirty", "dirty absolute -1"),
- )),
+ ]),
'innodb_buffer_pool_read_ahead' : (
"'' 'mysql InnoDB Buffer Pool Read Ahead' 'operations/s' innodb mysql.innodb_buffer_pool_read_ahead area",
- (
+ [
("Innodb_buffer_pool_read_ahead", "all incremental 1 1"),
("Innodb_buffer_pool_read_ahead_evicted", "evicted incremental -1 1"),
("Innodb_buffer_pool_read_ahead_rnd", "random incremental 1 1"),
- )),
+ ]),
'innodb_buffer_pool_reqs' : (
"'' 'mysql InnoDB Buffer Pool Requests' 'requests/s' innodb mysql.innodb_buffer_pool_reqs area",
- (
+ [
("Innodb_buffer_pool_read_requests", "reads incremental 1 1"),
("Innodb_buffer_pool_write_requests", "writes incremental -1 1"),
- )),
+ ]),
'innodb_buffer_pool_ops' : (
"'' 'mysql InnoDB Buffer Pool Operations' 'operations/s' innodb mysql.innodb_buffer_pool_ops area",
- (
+ [
("Innodb_buffer_pool_reads", "'disk reads' incremental 1 1"),
("Innodb_buffer_pool_wait_free", "'wait free' incremental -1 1"),
- )),
+ ]),
'qcache_ops' : (
"'' 'mysql QCache Operations' 'queries/s' qcache mysql.qcache_ops line",
- (
+ [
("Qcache_hits", "hits incremental 1 1"),
("Qcache_lowmem_prunes", "'lowmem prunes' incremental -1 1"),
("Qcache_inserts", "inserts incremental 1 1"),
("Qcache_not_cached", "'not cached' incremental -1 1"),
- )),
+ ]),
'qcache' : (
"'' 'mysql QCache Queries in Cache' 'queries' qcache mysql.qcache line",
- (
+ [
("Qcache_queries_in_cache", "queries absolute 1 1"),
- )),
+ ]),
'qcache_freemem' : (
"'' 'mysql QCache Free Memory' 'MB' qcache mysql.qcache_freemem area",
- (
+ [
("Qcache_free_memory", "free absolute 1"),
- )),
+ ]),
'qcache_memblocks' : (
"'' 'mysql QCache Memory Blocks' 'blocks' qcache mysql.qcache_memblocks line",
- (
+ [
("Qcache_free_blocks", "free absolute 1"),
("Qcache_total_blocks", "total absolute 1 1"),
- )),
+ ]),
'key_blocks' : (
"'' 'mysql MyISAM Key Cache Blocks' 'blocks' myisam mysql.key_blocks line",
- (
+ [
("Key_blocks_unused", "unused absolute 1 1"),
("Key_blocks_used", "used absolute -1 1"),
("Key_blocks_not_flushed", "'not flushed' absolute 1 1"),
- )),
+ ]),
'key_requests' : (
"'' 'mysql MyISAM Key Cache Requests' 'requests/s' myisam mysql.key_requests area",
- (
+ [
("Key_read_requests", "reads incremental 1 1"),
("Key_write_requests", "writes incremental -1 1"),
- )),
+ ]),
'key_disk_ops' : (
"'' 'mysql MyISAM Key Cache Disk Operations' 'operations/s' myisam mysql.key_disk_ops area",
- (
+ [
("Key_reads", "reads incremental 1 1"),
("Key_writes", "writes incremental -1 1"),
- )),
+ ]),
'files' : (
"'' 'mysql Open Files' 'files' files mysql.files line",
- (
+ [
("Open_files", "files absolute 1 1"),
- )),
+ ]),
'files_rate' : (
"'' 'mysql Opened Files Rate' 'files/s' files mysql.files_rate line",
- (
+ [
("Opened_files", "files incremental 1 1"),
- )),
+ ]),
'binlog_stmt_cache' : (
"'' 'mysql Binlog Statement Cache' 'statements/s' binlog mysql.binlog_stmt_cache line",
- (
+ [
("Binlog_stmt_cache_disk_use", "disk incremental 1 1"),
("Binlog_stmt_cache_use", "all incremental 1 1"),
- )),
+ ]),
'connection_errors' : (
"'' 'mysql Connection Errors' 'connections/s' connections mysql.connection_errors line",
- (
+ [
("Connection_errors_accept", "accept incremental 1 1"),
("Connection_errors_internal", "internal incremental 1 1"),
("Connection_errors_max_connections", "max incremental 1 1"),
("Connection_errors_peer_address", "peer_addr incremental 1 1"),
("Connection_errors_select", "select incremental 1 1"),
("Connection_errors_tcpwrap", "tcpwrap incremental 1 1")
- ))
+ ])
}
-mysql_def = {}
-valid = []
-connections = {}
-def get_data(config):
- global connections
- try:
- cnx = connections[config['name']]
- except KeyError as e:
- stderr.write(NAME + ": reconnecting\n")
- cnx = MySQLdb.connect(user=config['user'],
- passwd=config['password'],
- read_default_file=config['my.cnf'],
- unix_socket=config['socket'],
- host=config['host'],
- port=config['port'],
- connect_timeout=int(update_every))
- connections[config['name']] = cnx
- try:
- with cnx.cursor() as cursor:
- cursor.execute(QUERY)
- raw_data = cursor.fetchall()
- except Exception as e:
- stderr.write(NAME + ": cannot execute query." + str(e) + "\n")
- cnx.close()
- del connections[config['name']]
- return None
-
- return dict(raw_data)
+class Service(BaseService):
+ def __init__(self,configuration=None,name=None):
+ super().__init__(configuration=configuration)
+ self.name = name
+ self.configuration = self._parse_config(configuration)
+ self.connection = None
+ self.defs = {}
-
-def check():
- # TODO what are the default credentials
- global valid, config
- if type(config) is str:
- from json import loads
- cfg = loads(config.replace("'",'"').replace('\n',' '))
- config = cfg
- for i in range(len(config)):
- if 'name' not in config[i]:
- config[i]['name'] = "srv_"+str(i)
- if 'user' not in config[i]:
- config[i]['user'] = 'root'
- if 'password' not in config[i]:
- config[i]['password'] = ''
- if 'my.cnf' in config[i]:
- config[i]['socket'] = ''
- config[i]['host'] = ''
- config[i]['port'] = 0
- elif 'socket' in config[i]:
- config[i]['my.cnf'] = ''
- config[i]['host'] = ''
- config[i]['port'] = 0
- elif 'host' in config[i]:
- config[i]['my.cnf'] = ''
- config[i]['socket'] = ''
- if 'port' in config[i]:
- config[i]['port'] = int(config[i]['port'])
+ def _parse_config(self,configuration):
+ # parse configuration to collect data from mysql server
+ if self.name is None:
+ self.name = 'local'
+ if 'user' not in configuration:
+ configuration['user'] = 'root'
+ if 'password' not in configuration:
+ configuration['password'] = ''
+ if 'my.cnf' in configuration:
+ configuration['socket'] = ''
+ configuration['host'] = ''
+ configuration['port'] = 0
+ elif 'socket' in configuration:
+ configuration['my.cnf'] = ''
+ configuration['host'] = ''
+ configuration['port'] = 0
+ elif 'host' in configuration:
+ configuration['my.cnf'] = ''
+ configuration['socket'] = ''
+ if 'port' in configuration:
+ configuration['port'] = int(configuration['port'])
else:
- config[i]['port'] = 3306
+ configuration['port'] = 3306
+
+ return configuration
- for srv in config:
+ def _connect(self):
try:
- cnx = MySQLdb.connect(user=srv['user'],
- passwd=srv['password'],
- read_default_file=srv['my.cnf'],
- unix_socket=srv['socket'],
- host=srv['host'],
- port=srv['port'],
- connect_timeout=int(update_every))
- cnx.close()
+ self.connection = MySQLdb.connect(user=self.configuration['user'],
+ passwd=self.configuration['password'],
+ read_default_file=self.configuration['my.cnf'],
+ unix_socket=self.configuration['socket'],
+ host=self.configuration['host'],
+ port=self.configuration['port'],
+ connect_timeout=self.configuration['update_every'])
except Exception as e:
- stderr.write(NAME + " has problem connecting to server: "+str(e).replace("\n"," ")+"\n")
- config.remove(srv)
-
- if len(config) == 0:
- return False
- return True
-
+ self.error(NAME + " has problem connecting to server:", e)
+ raise RuntimeError #stop creating module, need to catch it in supervisor
+
+ def _get_data(self):
+ if self.connection is None:
+ self._connect()
+ try:
+ with self.connection.cursor() as cursor:
+ cursor.execute(QUERY)
+ raw_data = cursor.fetchall()
+ except Exception as e:
+ self.error(NAME + ": cannot execute query.", e)
+ self.connection.close()
+ self.connection = None
+ return None
+
+ return dict(raw_data)
-def create():
- global config, mysql_def
- for name in ORDER:
- mysql_def[name] = []
- for line in CHARTS[name][1]:
- mysql_def[name].append(line[0])
+ def check(self):
+ try:
+ self.connection = self._connect()
+ return True
+ except RuntimeError:
+ self.connection = None
+ return False
- idx = 0
- for srv in config:
- data = get_data(srv)
+ def create(self):
+ for name in ORDER:
+ self.defs[name] = []
+ for line in CHARTS[name][1]:
+ self.defs[name].append(line[0])
+
+ idx = 0
+ data = self._get_data()
for name in ORDER:
header = "CHART mysql_" + \
- str(srv['name']) + "." + \
+ str(self.name) + "." + \
name + " " + \
CHARTS[name][0] + " " + \
- str(priority + idx) + " " + \
- str(update_every)
+ str(self.priority + idx) + " " + \
+ str(self.update_every)
content = ""
# check if server has this datapoint
for line in CHARTS[name][1]:
print(header)
print(content)
idx += 1
+
+ if idx == 0:
+ return False
+ return True
- if idx == 0:
- return False
- return True
-
-
-def update(interval):
- global config
- for srv in config:
- data = get_data(srv)
+ def update(self,interval):
+ data = self._get_data()
if data is None:
- config.remove(srv)
- # TODO notify user about problems with server
- continue
+ return False
try:
data['Thread cache misses'] = int( int(data['Threads_created']) * 10000 / int(data['Connections']))
except Exception:
pass
- for chart, dimensions in mysql_def.items():
- header = "BEGIN mysql_" + str(srv['name']) + "." + chart + " " + str(interval) + '\n'
+ for chart, dimensions in self.defs.items():
+ header = "BEGIN mysql_" + str(self.name) + "." + chart + " " + str(interval) + '\n'
lines = ""
for d in dimensions:
try:
pass
if len(lines) > 0:
print(header + lines + "END")
-
- if len(config) == 0:
- return False
- return True
+
+ return True