mirror of
https://github.com/netdata/netdata.git
synced 2025-05-06 10:10:11 +00:00

* modularized all external plugins * added README.md in plugins * fixed title * fixed typo * relative link to external plugins * external plugins configuration README * added plugins link * remove plugins link * plugin names are links * added links to external plugins * removed unecessary spacing * list to table * added language * fixed typo * list to table on internal plugins * added more documentation to internal plugins * moved python, node, and bash code and configs into the external plugins * added statsd README * fix bug with corrupting config.h every 2nd compilation * moved all config files together with their code * more documentation * diskspace info * fixed broken links in apps.plugin * added backends docs * updated plugins readme * move nc-backend.sh to backends * created daemon directory * moved all code outside src/ * fixed readme identation * renamed plugins.d.plugin to plugins.d * updated readme * removed linux- from linux plugins * updated readme * updated readme * updated readme * updated readme * updated readme * updated readme * fixed README.md links * fixed netdata tree links * updated codacy, codeclimate and lgtm excluded paths * update CMakeLists.txt * updated automake options at top directory * libnetdata slit into directories * updated READMEs * updated READMEs * updated ARL docs * updated ARL docs * moved /plugins to /collectors * moved all external plugins outside plugins.d * updated codacy, codeclimate, lgtm * updated README * updated url * updated readme * updated readme * updated readme * updated readme * moved api and web into webserver * web/api web/gui web/server * modularized webserver * removed web/gui/version.txt
177 lines
5.6 KiB
Python
177 lines
5.6 KiB
Python
# -*- coding: utf-8 -*-
|
|
# Description: PHP-FPM netdata python.d module
|
|
# Author: Pawel Krupa (paulfantom)
|
|
# Author: Ilya Mashchenko (l2isbad)
|
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
|
|
import json
|
|
import re
|
|
|
|
from bases.FrameworkServices.UrlService import UrlService
|
|
|
|
# default module values (can be overridden per job in `config`)
|
|
# update_every = 2
|
|
priority = 60000
|
|
retries = 60
|
|
|
|
# default job configuration (overridden by python.d.plugin)
|
|
# config = {'local': {
|
|
# 'update_every': update_every,
|
|
# 'retries': retries,
|
|
# 'priority': priority,
|
|
# 'url': 'http://localhost/status?full&json'
|
|
# }}
|
|
|
|
# charts order (can be overridden if you want less charts, or different order)
|
|
|
|
POOL_INFO = [
|
|
('active processes', 'active'),
|
|
('max active processes', 'maxActive'),
|
|
('idle processes', 'idle'),
|
|
('accepted conn', 'requests'),
|
|
('max children reached', 'reached'),
|
|
('slow requests', 'slow')
|
|
]
|
|
|
|
PER_PROCESS_INFO = [
|
|
('request duration', 'ReqDur'),
|
|
('last request cpu', 'ReqCpu'),
|
|
('last request memory', 'ReqMem')
|
|
]
|
|
|
|
|
|
def average(collection):
|
|
return sum(collection, 0.0) / max(len(collection), 1)
|
|
|
|
|
|
CALC = [
|
|
('min', min),
|
|
('max', max),
|
|
('avg', average)
|
|
]
|
|
|
|
ORDER = ['connections', 'requests', 'performance', 'request_duration', 'request_cpu', 'request_mem']
|
|
|
|
CHARTS = {
|
|
'connections': {
|
|
'options': [None, 'PHP-FPM Active Connections', 'connections', 'active connections', 'phpfpm.connections',
|
|
'line'],
|
|
'lines': [
|
|
['active'],
|
|
['maxActive', 'max active'],
|
|
['idle']
|
|
]
|
|
},
|
|
'requests': {
|
|
'options': [None, 'PHP-FPM Requests', 'requests/s', 'requests', 'phpfpm.requests', 'line'],
|
|
'lines': [
|
|
['requests', None, 'incremental']
|
|
]
|
|
},
|
|
'performance': {
|
|
'options': [None, 'PHP-FPM Performance', 'status', 'performance', 'phpfpm.performance', 'line'],
|
|
'lines': [
|
|
['reached', 'max children reached'],
|
|
['slow', 'slow requests']
|
|
]
|
|
},
|
|
'request_duration': {
|
|
'options': [None, 'PHP-FPM Request Duration', 'milliseconds', 'request duration', 'phpfpm.request_duration',
|
|
'line'],
|
|
'lines': [
|
|
['minReqDur', 'min', 'absolute', 1, 1000],
|
|
['maxReqDur', 'max', 'absolute', 1, 1000],
|
|
['avgReqDur', 'avg', 'absolute', 1, 1000]
|
|
]
|
|
},
|
|
'request_cpu': {
|
|
'options': [None, 'PHP-FPM Request CPU', 'percent', 'request CPU', 'phpfpm.request_cpu', 'line'],
|
|
'lines': [
|
|
['minReqCpu', 'min'],
|
|
['maxReqCpu', 'max'],
|
|
['avgReqCpu', 'avg']
|
|
]
|
|
},
|
|
'request_mem': {
|
|
'options': [None, 'PHP-FPM Request Memory', 'kilobytes', 'request memory', 'phpfpm.request_mem', 'line'],
|
|
'lines': [
|
|
['minReqMem', 'min', 'absolute', 1, 1024],
|
|
['maxReqMem', 'max', 'absolute', 1, 1024],
|
|
['avgReqMem', 'avg', 'absolute', 1, 1024]
|
|
]
|
|
}
|
|
}
|
|
|
|
|
|
class Service(UrlService):
|
|
def __init__(self, configuration=None, name=None):
|
|
UrlService.__init__(self, configuration=configuration, name=name)
|
|
self.url = self.configuration.get('url', 'http://localhost/status?full&json')
|
|
self.order = ORDER
|
|
self.definitions = CHARTS
|
|
self.regex = re.compile(r'([a-z][a-z ]+): ([\d.]+)')
|
|
self.json = '&json' in self.url or '?json' in self.url
|
|
self.json_full = self.url.endswith(('?full&json', '?json&full'))
|
|
self.if_all_processes_running = dict([(c_name + p_name, 0) for c_name, func in CALC
|
|
for metric, p_name in PER_PROCESS_INFO])
|
|
|
|
def _get_data(self):
|
|
"""
|
|
Format data received from http request
|
|
:return: dict
|
|
"""
|
|
raw = self._get_raw_data()
|
|
if not raw:
|
|
return None
|
|
|
|
raw_json = parse_raw_data_(is_json=self.json, regex=self.regex, raw_data=raw)
|
|
|
|
# Per Pool info: active connections, requests and performance charts
|
|
to_netdata = fetch_data_(raw_data=raw_json, metrics_list=POOL_INFO)
|
|
|
|
# Per Process Info: duration, cpu and memory charts (min, max, avg)
|
|
if self.json_full:
|
|
p_info = dict()
|
|
to_netdata.update(self.if_all_processes_running) # If all processes are in running state
|
|
# Metrics are always 0 if the process is not in Idle state because calculation is done
|
|
# when the request processing has terminated
|
|
for process in [p for p in raw_json['processes'] if p['state'] == 'Idle']:
|
|
p_info.update(fetch_data_(raw_data=process, metrics_list=PER_PROCESS_INFO, pid=str(process['pid'])))
|
|
|
|
if p_info:
|
|
for new_name in PER_PROCESS_INFO:
|
|
for name, func in CALC:
|
|
to_netdata[name + new_name[1]] = func([p_info[k] for k in p_info if new_name[1] in k])
|
|
|
|
return to_netdata or None
|
|
|
|
|
|
def fetch_data_(raw_data, metrics_list, pid=''):
|
|
"""
|
|
:param raw_data: dict
|
|
:param metrics_list: list
|
|
:param pid: str
|
|
:return: dict
|
|
"""
|
|
result = dict()
|
|
for metric, new_name in metrics_list:
|
|
if metric in raw_data:
|
|
result[new_name + pid] = float(raw_data[metric])
|
|
return result
|
|
|
|
|
|
def parse_raw_data_(is_json, regex, raw_data):
|
|
"""
|
|
:param is_json: bool
|
|
:param regex: compiled regular expr
|
|
:param raw_data: dict
|
|
:return: dict
|
|
"""
|
|
if is_json:
|
|
try:
|
|
return json.loads(raw_data)
|
|
except ValueError:
|
|
return dict()
|
|
else:
|
|
raw_data = ' '.join(raw_data.split())
|
|
return dict(regex.findall(raw_data))
|