class PythonCharts(object):
+ """
+ Main class used to control every python module.
+ """
def __init__(self,
interval=None,
modules=None,
modules_path='../python.d/',
modules_configs='../conf.d/',
modules_disabled=None):
+ """
+ :param interval: int
+ :param modules: list
+ :param modules_path: str
+ :param modules_configs: str
+ :param modules_disabled: list
+ """
if modules is None:
modules = []
configured_modules = self._load_configs(loaded_modules)
# good economy and prosperity:
- self.jobs = self._create_jobs(configured_modules)
+ self.jobs = self._create_jobs(configured_modules) # type: list
if DEBUG_FLAG and interval is not None:
for job in self.jobs:
job.create_timetable(interval)
@staticmethod
def _import_module(path, name=None):
- # try to import module using only its path
+ """
+ Try to import module using only its path.
+ :param path: str
+ :param name: str
+ :return: object
+ """
+
if name is None:
name = path.split('/')[-1]
if name[-len(MODULE_EXTENSION):] != MODULE_EXTENSION:
return importlib.machinery.SourceFileLoader(name, path).load_module()
else:
return imp.load_source(name, path)
- # return importlib.import_module(path, name)
except Exception as e:
debug(str(e))
return None
def _load_modules(self, path, modules, disabled):
+ """
+ Load modules from 'modules' list or dynamically every file from 'path' (only .chart.py files)
+ :param path: str
+ :param modules: list
+ :param disabled: list
+ :return: list
+ """
+
# check if plugin directory exists
if not os.path.isdir(path):
debug("cannot find charts directory ", path)
return loaded
def _load_configs(self, modules):
- # function loads configuration files to modules
+ """
+ Append configuration in list named `config` to every module.
+ For multi-job modules `config` list is created in _parse_config,
+ otherwise it is created here based on BASE_CONFIG prototype with None as identifier.
+ :param modules: list
+ :return: list
+ """
+
for mod in modules:
configfile = self.configs + mod.__name__ + ".conf"
if os.path.isfile(configfile):
@staticmethod
def _parse_config(module, config):
+ """
+ Parse configuration file or extract configuration from module file.
+ Example of returned dictionary:
+ config = {'name': {
+ 'update_every': 2,
+ 'retries': 3,
+ 'priority': 30000
+ 'other_val': 123}}
+ :param module: object
+ :param config: dict
+ :return: dict
+ """
+
# get default values
defaults = {}
for key in BASE_CONFIG:
@staticmethod
def _create_jobs(modules):
- # module store a definition of Service class
- # module store configuration in module.config
- # configs are list of dicts or a dict
- # one dict is one service
- # iterate over list of modules and inside one loop iterate over configs
+ """
+ Create jobs based on module.config dictionary and module.Service class definition.
+ :param modules: list
+ :return: list
+ """
jobs = []
for module in modules:
for name in module.config:
if name is not None:
job.execution_name += "_" + name
jobs.append(job)
+
return [j for j in jobs if j is not None]
def _stop(self, job, reason=None):
- # modifies self.jobs
+ """
+ Stop specified job and remove it from self.jobs list
+ Also notifies user about job failure if DEBUG_FLAG is set
+ :param job: object
+ :param reason: str
+ """
self.jobs.remove(job)
if reason is None:
return
debug(job.execution_name, "is " + reason)
def check(self):
- # try to execute check() on every job
+ """
+ Tries to execute check() on every job.
+ This cannot fail thus it is catching every exception
+ If job.check() fails job is stopped
+ """
for job in self.jobs:
try:
if not job.check():
self._stop(job, "misbehaving. Reason: " + str(e))
def create(self):
- # try to execute create() on every job
+ """
+ Tries to execute create() on every job.
+ This cannot fail thus it is catching every exception.
+ If job.create() fails job is stopped.
+ This is also creating job run time chart.
+ """
for job in self.jobs:
try:
if not job.create():
self._stop(job, "misbehaving. Reason: " + str(e))
def _update_job(self, job):
- # try to execute update() on every job and draw run time graph
+ """
+ Tries to execute update() on specified job.
+ This cannot fail thus it is catching every exception.
+ If job.update() returns False, number of retries is decremented. If there are no more retries, job is stopped.
+ Job is also stopped if it throws an exception.
+ This is also updating job run time chart.
+ :param job: object
+ """
t_start = time.time()
# check if it is time to execute job update() function
if job.timetable['next'] > t_start:
else:
since_last = int((t_start - job.timetable['last']) * 1000000)
if not job.update(since_last):
- self._stop(job, "update failed")
+ if job.retries <= 0:
+ self._stop(job, "update failed")
+ else:
+ job.retries -= 1
+ job.timetable['next'] += job.timetable['freq']
return
except AttributeError:
self._stop(job, "no update")
self.first_run = False
def update(self):
- # run updates (this will stay forever and ever and ever forever and ever it'll be the one...)
+ """
+ Tries to execute update() on every job by using _update_job()
+ This will stay forever and ever and ever forever and ever it'll be the one...
+ """
self.first_run = True
while True:
next_runs = []
def read_config(path):
+ """
+ Read YAML configuration from specified file
+ :param path: str
+ :return: dict
+ """
try:
with open(path, 'r') as stream:
config = yaml.load(stream)
def debug(*args):
+ """
+ Print message on stderr.
+ """
if not DEBUG_FLAG:
return
sys.stderr.write(PROGRAM + ":")
def parse_cmdline(directory, *commands):
+ """
+ Parse parameters from command line.
+ :param directory: str
+ :param commands: list of str
+ :return: dict
+ """
global DEBUG_FLAG
interval = None
# if __name__ == '__main__':
def run():
+ """
+ Main program.
+ """
global PROGRAM, DEBUG_FLAG
PROGRAM = sys.argv[0].split('/')[-1].split('.plugin')[0]
# parse env variables
# Author: Pawel Krupa (paulfantom)
import sys
+
NAME = "mysql.chart.py"
# import 3rd party library to handle MySQL communication
try:
import MySQLdb
+
# https://github.com/PyMySQL/mysqlclient-python
sys.stderr.write(NAME + ": using MySQLdb\n")
except ImportError:
try:
import pymysql as MySQLdb
+
# https://github.com/PyMySQL/PyMySQL
sys.stderr.write(NAME + ": using pymysql\n")
except ImportError:
# default configuration (overridden by python.d.plugin)
config = {
'local': {
- 'user' : 'root',
- 'password' : '',
- 'socket' : '/var/run/mysqld/mysqld.sock',
- 'update_every' : 3,
- 'retries' : 4,
- 'priority' : 100
+ 'user': 'root',
+ 'password': '',
+ 'socket': '/var/run/mysqld/mysqld.sock',
+ 'update_every': 3,
+ 'retries': 4,
+ 'priority': 100
}
}
# }
CHARTS = {
- 'net' : (
+ 'net': (
"'' 'mysql Bandwidth' 'kilobits/s' bandwidth mysql.net area",
[
("Bytes_received", "in incremental 8 1024"),
- ("Bytes_sent", "out incremental -8 1024")
+ ("Bytes_sent", "out incremental -8 1024")
]),
- 'queries' : (
+ 'queries': (
"'' 'mysql Queries' 'queries/s' queries mysql.queries line",
[
- ("Queries", "queries incremental 1 1"),
- ("Questions", "questions incremental 1 1"),
+ ("Queries", "queries incremental 1 1"),
+ ("Questions", "questions incremental 1 1"),
("Slow_queries", "slow_queries incremental -1 1")
]),
- 'handlers' : (
+ 'handlers': (
"'' 'mysql Handlers' 'handlers/s' handlers mysql.handlers line",
[
("Handler_commit", "commit incremental 1 1"),
("Handler_update", "update incremental 1 1"),
("Handler_write", "write incremental 1 1")
]),
- 'table_locks' : (
+ 'table_locks': (
"'' 'mysql Tables Locks' 'locks/s' locks mysql.table_locks line",
[
("Table_locks_immediate", "immediate incremental 1 1"),
("Table_locks_waited", "waited incremental -1 1")
]),
- 'join_issues' : (
+ 'join_issues': (
"'' 'mysql Select Join Issues' 'joins/s' issues mysql.join_issues line",
[
("Select_full_join", "full_join incremental 1 1"),
("Select_range_check", "range_check incremental 1 1"),
("Select_scan", "scan incremental 1 1"),
]),
- 'sort_issues' : (
+ 'sort_issues': (
"'' 'mysql Sort Issues' 'issues/s' issues mysql.sort.issues line",
[
("Sort_merge_passes", "merge_passes incremental 1 1"),
("Sort_range", "range incremental 1 1"),
("Sort_scan", "scan incremental 1 1"),
]),
- 'tmp' : (
+ 'tmp': (
"'' 'mysql Tmp Operations' 'counter' temporaries mysql.tmp line",
[
("Created_tmp_disk_tables", "disk_tables incremental 1 1"),
("Created_tmp_files", "files incremental 1 1"),
("Created_tmp_tables", "tables incremental 1 1"),
]),
- 'connections' : (
+ 'connections': (
"'' 'mysql Connections' 'connections/s' connections mysql.connections line",
[
("Connections", "all incremental 1 1"),
("Aborted_connects", "aborted incremental 1 1"),
]),
- 'binlog_cache' : (
+ 'binlog_cache': (
"'' 'mysql Binlog Cache' 'transactions/s' binlog mysql.binlog_cache line",
[
("Binlog_cache_disk_use", "disk incremental 1 1"),
("Binlog_cache_use", "all incremental 1 1"),
]),
- 'threads' : (
+ 'threads': (
"'' 'mysql Threads' 'threads' threads mysql.threads line",
[
("Threads_connected", "connected absolute 1 1"),
("Threads_cached", "cached absolute -1 1"),
("Threads_running", "running absolute 1 1"),
]),
- 'thread_cache_misses' : (
+ 'thread_cache_misses': (
"'' 'mysql Threads Cache Misses' 'misses' threads mysql.thread_cache_misses area",
[
("Thread_cache_misses", "misses misses absolute 1 100"),
]),
- 'innodb_io' : (
+ 'innodb_io': (
"'' 'mysql InnoDB I/O Bandwidth' 'kilobytes/s' innodb mysql.innodb_io area",
[
("Innodb_data_read", "read incremental 1 1024"),
("Innodb_data_written", "write incremental -1 1024"),
]),
- 'innodb_io_ops' : (
+ 'innodb_io_ops': (
"'' 'mysql InnoDB I/O Operations' 'operations/s' innodb mysql.innodb_io_ops line",
[
("Innodb_data_reads", "reads incremental 1 1"),
("Innodb_data_writes", "writes incremental -1 1"),
("Innodb_data_fsyncs", "fsyncs incremental 1 1"),
]),
- 'innodb_io_pending_ops' : (
+ 'innodb_io_pending_ops': (
"'' 'mysql InnoDB Pending I/O Operations' 'operations' innodb mysql.innodb_io_pending_ops line",
[
("Innodb_data_pending_reads", "reads absolute 1 1"),
("Innodb_data_pending_writes", "writes absolute -1 1"),
("Innodb_data_pending_fsyncs", "fsyncs absolute 1 1"),
]),
- 'innodb_log' : (
+ 'innodb_log': (
"'' 'mysql InnoDB Log Operations' 'operations/s' innodb mysql.innodb_log line",
[
("Innodb_log_waits", "waits incremental 1 1"),
("Innodb_log_write_requests", "write_requests incremental -1 1"),
("Innodb_log_writes", "incremental -1 1"),
]),
- 'innodb_os_log' : (
+ 'innodb_os_log': (
"'' 'mysql InnoDB OS Log Operations' 'operations' innodb mysql.innodb_os_log line",
[
("Innodb_os_log_fsyncs", "fsyncs incremental 1 1"),
("Innodb_os_log_pending_fsyncs", "pending_fsyncs absolute 1 1"),
("Innodb_os_log_pending_writes", "pending_writes absolute -1 1"),
]),
- 'innodb_os_log_io' : (
+ 'innodb_os_log_io': (
"'' 'mysql InnoDB OS Log Bandwidth' 'kilobytes/s' innodb mysql.innodb_os_log_io area",
[
("Innodb_os_log_written", "write incremental -1 1024"),
]),
- 'innodb_cur_row_lock' : (
+ 'innodb_cur_row_lock': (
"'' 'mysql InnoDB Current Row Locks' 'operations' innodb mysql.innodb_cur_row_lock area",
[
("Innodb_row_lock_current_waits", "current_waits absolute 1 1"),
]),
- 'innodb_rows' : (
+ 'innodb_rows': (
"'' 'mysql InnoDB Row Operations' 'operations/s' innodb mysql.innodb_rows area",
[
("Innodb_rows_inserted", "read incremental 1 1"),
("Innodb_rows_updated", "inserted incremental 1 1"),
("Innodb_rows_deleted", "updated incremental -1 1"),
]),
- 'innodb_buffer_pool_pages' : (
+ 'innodb_buffer_pool_pages': (
"'' 'mysql InnoDB Buffer Pool Pages' 'pages' innodb mysql.innodb_buffer_pool_pages line",
[
("Innodb_buffer_pool_pages_data", "data absolute 1 1"),
("Innodb_buffer_pool_pages_misc", "misc absolute -1 1"),
("Innodb_buffer_pool_pages_total", "total absolute 1 1"),
]),
- 'innodb_buffer_pool_bytes' : (
+ 'innodb_buffer_pool_bytes': (
"'' 'mysql InnoDB Buffer Pool Bytes' 'MB' innodb mysql.innodb_buffer_pool_bytes area",
[
("Innodb_buffer_pool_bytes_data", "data absolute 1"),
("Innodb_buffer_pool_bytes_dirty", "dirty absolute -1"),
]),
- 'innodb_buffer_pool_read_ahead' : (
+ 'innodb_buffer_pool_read_ahead': (
"'' 'mysql InnoDB Buffer Pool Read Ahead' 'operations/s' innodb mysql.innodb_buffer_pool_read_ahead area",
[
("Innodb_buffer_pool_read_ahead", "all incremental 1 1"),
("Innodb_buffer_pool_read_ahead_evicted", "evicted incremental -1 1"),
("Innodb_buffer_pool_read_ahead_rnd", "random incremental 1 1"),
]),
- 'innodb_buffer_pool_reqs' : (
+ 'innodb_buffer_pool_reqs': (
"'' 'mysql InnoDB Buffer Pool Requests' 'requests/s' innodb mysql.innodb_buffer_pool_reqs area",
[
("Innodb_buffer_pool_read_requests", "reads incremental 1 1"),
("Innodb_buffer_pool_write_requests", "writes incremental -1 1"),
]),
- 'innodb_buffer_pool_ops' : (
+ 'innodb_buffer_pool_ops': (
"'' 'mysql InnoDB Buffer Pool Operations' 'operations/s' innodb mysql.innodb_buffer_pool_ops area",
[
("Innodb_buffer_pool_reads", "'disk reads' incremental 1 1"),
("Innodb_buffer_pool_wait_free", "'wait free' incremental -1 1"),
]),
- 'qcache_ops' : (
+ 'qcache_ops': (
"'' 'mysql QCache Operations' 'queries/s' qcache mysql.qcache_ops line",
[
("Qcache_hits", "hits incremental 1 1"),
("Qcache_inserts", "inserts incremental 1 1"),
("Qcache_not_cached", "'not cached' incremental -1 1"),
]),
- 'qcache' : (
+ 'qcache': (
"'' 'mysql QCache Queries in Cache' 'queries' qcache mysql.qcache line",
[
("Qcache_queries_in_cache", "queries absolute 1 1"),
]),
- 'qcache_freemem' : (
+ 'qcache_freemem': (
"'' 'mysql QCache Free Memory' 'MB' qcache mysql.qcache_freemem area",
[
("Qcache_free_memory", "free absolute 1"),
]),
- 'qcache_memblocks' : (
+ 'qcache_memblocks': (
"'' 'mysql QCache Memory Blocks' 'blocks' qcache mysql.qcache_memblocks line",
[
("Qcache_free_blocks", "free absolute 1"),
("Qcache_total_blocks", "total absolute 1 1"),
]),
- 'key_blocks' : (
+ 'key_blocks': (
"'' 'mysql MyISAM Key Cache Blocks' 'blocks' myisam mysql.key_blocks line",
[
("Key_blocks_unused", "unused absolute 1 1"),
("Key_blocks_used", "used absolute -1 1"),
("Key_blocks_not_flushed", "'not flushed' absolute 1 1"),
]),
- 'key_requests' : (
+ 'key_requests': (
"'' 'mysql MyISAM Key Cache Requests' 'requests/s' myisam mysql.key_requests area",
[
("Key_read_requests", "reads incremental 1 1"),
("Key_write_requests", "writes incremental -1 1"),
]),
- 'key_disk_ops' : (
+ 'key_disk_ops': (
"'' 'mysql MyISAM Key Cache Disk Operations' 'operations/s' myisam mysql.key_disk_ops area",
[
("Key_reads", "reads incremental 1 1"),
("Key_writes", "writes incremental -1 1"),
]),
- 'files' : (
+ 'files': (
"'' 'mysql Open Files' 'files' files mysql.files line",
[
("Open_files", "files absolute 1 1"),
]),
- 'files_rate' : (
+ 'files_rate': (
"'' 'mysql Opened Files Rate' 'files/s' files mysql.files_rate line",
[
("Opened_files", "files incremental 1 1"),
]),
- 'binlog_stmt_cache' : (
+ 'binlog_stmt_cache': (
"'' 'mysql Binlog Statement Cache' 'statements/s' binlog mysql.binlog_stmt_cache line",
[
("Binlog_stmt_cache_disk_use", "disk incremental 1 1"),
("Binlog_stmt_cache_use", "all incremental 1 1"),
]),
- 'connection_errors' : (
+ 'connection_errors': (
"'' 'mysql Connection Errors' 'connections/s' connections mysql.connection_errors line",
[
("Connection_errors_accept", "accept incremental 1 1"),
class Service(BaseService):
- def __init__(self,configuration=None,name=None):
+ def __init__(self, configuration=None, name=None):
super(self.__class__, self).__init__(configuration=configuration)
self.name = name
self.configuration = self._parse_config(configuration)
self.connection = None
self.defs = {}
- def _parse_config(self,configuration):
+ def _parse_config(self, configuration):
# parse configuration to collect data from mysql server
if self.name is None:
self.name = 'local'
connect_timeout=self.configuration['update_every'])
except Exception as e:
self.error(NAME + " has problem connecting to server:", e)
- raise RuntimeError # stop creating module, need to catch it in supervisor
+ raise RuntimeError
def _get_data(self):
if self.connection is None:
- self._connect()
+ try:
+ self._connect()
+ except RuntimeError:
+ return None
try:
with self.connection.cursor() as cursor:
cursor.execute(QUERY)
# check if server has this data point
for line in CHARTS[name][1]:
if line[0] in data:
- content += "DIMENSION " + line[0] + " " + line[1] + "\n"
+ content += "DIMENSION " + line[0] + " " + line[1] + "\n"
if len(content) > 0:
print(header)
print(content)