0
0
Fork 0
mirror of https://github.com/netdata/netdata.git synced 2025-04-02 20:48:06 +00:00

Remove the overview section from cloud notif. integrations ()

Co-authored-by: ilyam8 <ilya@netdata.cloud>
This commit is contained in:
Fotis Voutsas 2024-10-14 11:23:50 +03:00 committed by GitHub
parent fc38335b40
commit 0ab7826301
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 338 additions and 153 deletions

View file

@ -11,9 +11,6 @@
- mobile-app
- phone
- personal-notifications
overview:
notification_description: "You can configure notification delivery to the Netdata Mobile Application from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -44,9 +41,6 @@
keywords:
- discord
- community
overview:
notification_description: "You can configure notification delivery to your Discord server from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -85,9 +79,6 @@
icon_filename: "pagerduty.png"
keywords:
- pagerduty
overview:
notification_description: "You can configure notification delivery to PagerDuty from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -127,9 +118,6 @@
icon_filename: "slack.png"
keywords:
- slack
overview:
notification_description: "You can configure notification delivery to Slack from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -175,9 +163,6 @@
keywords:
- opsgenie
- atlassian
overview:
notification_description: "You can configure notification delivery to Opsgenie from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -215,9 +200,6 @@
icon_filename: "mattermost.png"
keywords:
- mattermost
overview:
notification_description: "You can configure notification delivery to Mattermost from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -260,9 +242,6 @@
icon_filename: "rocketchat.png"
keywords:
- rocketchat
overview:
notification_description: "You can configure notification delivery to RocketChat from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -306,9 +285,6 @@
icon_filename: "awssns.png"
keywords:
- awssns
overview:
notification_description: "You can configure notification delivery to AWS SNS from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -351,9 +327,6 @@
keywords:
- microsoft
- teams
overview:
notification_description: "You can configure notification delivery to a Microsoft Teams channel from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -396,9 +369,6 @@
icon_filename: "telegram.svg"
keywords:
- Telegram
overview:
notification_description: "You can configure notification delivery to Telegram from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -441,9 +411,6 @@
icon_filename: "splunk-black.svg"
keywords:
- Splunk
overview:
notification_description: "You can configure notification delivery to Splunk from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -479,9 +446,6 @@
- VictorOps
- Splunk
- On-Call
overview:
notification_description: "You can configure notification delivery to Splunk On-Call/VictorOps from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -515,9 +479,6 @@
keywords:
- generic webhooks
- webhooks
overview:
notification_description: "You can configure notification delivery to a webhook using a predefined schema from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites
@ -765,9 +726,6 @@
icon_filename: "ilert.svg"
keywords:
- ilert
overview:
notification_description: "You can configure notification delivery to ilert from the Netdata Cloud UI."
notification_limitations: ""
setup:
description: |
### Prerequisites

View file

@ -1,7 +1,7 @@
import json
import re
import shutil
from pathlib import Path
import re
# Dictionary responsible for making the symbolic links at the end of the script's run.
symlink_dict = {}
@ -29,6 +29,7 @@ def cleanup():
if "integrations" in str(element) and not "metadata.yaml" in str(element):
shutil.rmtree(element)
def generate_category_from_name(category_fragment, category_array):
"""
Takes a category ID in splitted form ("." as delimiter) and the array of the categories, and returns the proper category name that Learn expects.
@ -46,7 +47,7 @@ def generate_category_from_name(category_fragment, category_array):
try:
# print("equals")
# print(fragment, category_fragment[i+1])
dummy_id = dummy_id + "." + category_fragment[i+1]
dummy_id = dummy_id + "." + category_fragment[i + 1]
# print(dummy_id)
except IndexError:
return category_name.split("/", 1)[1]
@ -79,10 +80,10 @@ def add_custom_edit_url(markdown_string, meta_yaml_link, sidebar_label_string, m
if mode == 'default':
path_to_md_file = f'{meta_yaml_link.replace("/metadata.yaml", "")}/integrations/{clean_string(sidebar_label_string)}'
elif mode == 'cloud-notifications':
elif mode == 'cloud-notification':
path_to_md_file = meta_yaml_link.replace("metadata.yaml", f'integrations/{clean_string(sidebar_label_string)}')
elif mode == 'agent-notifications':
elif mode == 'agent-notification':
path_to_md_file = meta_yaml_link.replace("metadata.yaml", "README")
elif mode == 'cloud-authentication':
@ -122,23 +123,29 @@ def read_integrations_js(path_to_file):
print("Exception", e)
def create_overview(integration, filename):
def create_overview(integration, filename, overview_key_name="overview"):
# empty overview_key_name to have only image on overview
if not overview_key_name:
return f"""# {integration['meta']['name']}
split = re.split(r'(#.*\n)', integration['overview'], 1)
<img src="https://netdata.cloud/img/{filename}" width="150"/>
"""
split = re.split(r'(#.*\n)', integration[overview_key_name], 1)
first_overview_part = split[1]
rest_overview_part = split[2]
if len(filename) > 0:
return f"""{first_overview_part}
if not filename:
return f"""{first_overview_part}{rest_overview_part}
"""
return f"""{first_overview_part}
<img src="https://netdata.cloud/img/{filename}" width="150"/>
{rest_overview_part}
"""
else:
return f"""{first_overview_part}{rest_overview_part}
"""
def build_readme_from_integration(integration, mode=''):
@ -150,7 +157,8 @@ def build_readme_from_integration(integration, mode=''):
meta_yaml = integration['edit_link'].replace("blob", "edit")
sidebar_label = integration['meta']['monitored_instance']['name']
learn_rel_path = generate_category_from_name(
integration['meta']['monitored_instance']['categories'][0].split("."), categories).replace("Data Collection", "Collecting Metrics")
integration['meta']['monitored_instance']['categories'][0].split("."), categories).replace(
"Data Collection", "Collecting Metrics")
most_popular = integration['meta']['most_popular']
# build the markdown string
@ -221,7 +229,7 @@ endmeta-->
print("Exception in exporter md construction", e, integration['id'])
# NOTIFICATIONS
elif mode == 'notification':
elif mode == 'agent-notification':
try:
# initiate the variables for the notification method
meta_yaml = integration['edit_link'].replace("blob", "edit")
@ -238,7 +246,7 @@ learn_rel_path: "{learn_rel_path.replace("notifications", "Alerts & Notification
message: "DO NOT EDIT THIS FILE DIRECTLY, IT IS GENERATED BY THE NOTIFICATION'S metadata.yaml FILE"
endmeta-->
{create_overview(integration, integration['meta']['icon_filename'])}"""
{create_overview(integration, integration['meta']['icon_filename'], "overview")}"""
if integration['setup']:
md += f"""
@ -252,7 +260,39 @@ endmeta-->
except Exception as e:
print("Exception in notification md construction", e, integration['id'])
elif mode == 'cloud-notification':
try:
# initiate the variables for the notification method
meta_yaml = integration['edit_link'].replace("blob", "edit")
sidebar_label = integration['meta']['name']
learn_rel_path = generate_category_from_name(integration['meta']['categories'][0].split("."), categories)
# build the markdown string
md = \
f"""<!--startmeta
meta_yaml: "{meta_yaml}"
sidebar_label: "{sidebar_label}"
learn_status: "Published"
learn_rel_path: "{learn_rel_path.replace("notifications", "Alerts & Notifications/Notifications")}"
message: "DO NOT EDIT THIS FILE DIRECTLY, IT IS GENERATED BY THE NOTIFICATION'S metadata.yaml FILE"
endmeta-->
{create_overview(integration, integration['meta']['icon_filename'], "")}"""
if integration['setup']:
md += f"""
{integration['setup']}
"""
if integration['troubleshooting']:
md += f"""
{integration['troubleshooting']}
"""
except Exception as e:
print("Exception in notification md construction", e, integration['id'])
# AUTHENTICATIONS
elif mode == 'authentication':
if True:
@ -339,27 +379,35 @@ def write_to_file(path, md, meta_yaml, sidebar_label, community, mode='default')
except KeyError:
# We don't need to print something here.
pass
elif mode == 'notification':
elif mode == 'cloud-notification':
if "cloud-notifications" in path:
# for cloud notifications we generate them near their metadata.yaml
name = clean_string(integration['meta']['name'])
# for cloud notifications we generate them near their metadata.yaml
name = clean_string(integration['meta']['name'])
if not Path(f'{path}/integrations').exists():
Path(f'{path}/integrations').mkdir()
if not Path(f'{path}/integrations').exists():
Path(f'{path}/integrations').mkdir()
# proper_edit_name = meta_yaml.replace(
# "metadata.yaml", f'integrations/{clean_string(sidebar_label)}.md\"')
# proper_edit_name = meta_yaml.replace(
# "metadata.yaml", f'integrations/{clean_string(sidebar_label)}.md\"')
md = add_custom_edit_url(md, meta_yaml, sidebar_label, mode='cloud-notifications')
md = add_custom_edit_url(md, meta_yaml, sidebar_label, mode='cloud-notification')
finalpath = f'{path}/integrations/{name}.md'
else:
# add custom_edit_url as the md file, so we can have uniqueness in the ingest script
# afterwards the ingest will replace this metadata with meta_yaml
md = add_custom_edit_url(md, meta_yaml, sidebar_label, mode='agent-notifications')
finalpath = f'{path}/integrations/{name}.md'
finalpath = f'{path}/README.md'
try:
clean_and_write(
md,
Path(finalpath)
)
except FileNotFoundError as e:
print("Exception in writing to file", e)
elif mode == 'agent-notification':
# add custom_edit_url as the md file, so we can have uniqueness in the ingest script
# afterwards the ingest will replace this metadata with meta_yaml
md = add_custom_edit_url(md, meta_yaml, sidebar_label, mode='agent-notification')
finalpath = f'{path}/README.md'
try:
clean_and_write(
@ -383,7 +431,7 @@ def write_to_file(path, md, meta_yaml, sidebar_label, community, mode='default')
md = add_custom_edit_url(md, meta_yaml, sidebar_label, mode='cloud-authentication')
finalpath = f'{path}/integrations/{name}.md'
try:
clean_and_write(
md,
@ -422,7 +470,6 @@ cleanup()
categories, integrations = read_integrations_js('integrations/integrations.js')
# Iterate through every integration
for integration in integrations:
@ -442,20 +489,25 @@ for integration in integrations:
path = build_path(meta_yaml)
write_to_file(path, md, meta_yaml, sidebar_label, community)
# kind of specific if clause, so we can avoid running excessive code in the go repo
elif integration['integration_type'] == "notification":
elif integration['integration_type'] == "agent_notification":
meta_yaml, sidebar_label, learn_rel_path, md, community = build_readme_from_integration(
integration, mode='notification')
integration, mode='agent-notification')
path = build_path(meta_yaml)
write_to_file(path, md, meta_yaml, sidebar_label, community, mode='notification')
write_to_file(path, md, meta_yaml, sidebar_label, community, mode='agent-notification')
elif integration['integration_type'] == "cloud_notification":
meta_yaml, sidebar_label, learn_rel_path, md, community = build_readme_from_integration(
integration, mode='cloud-notification')
path = build_path(meta_yaml)
write_to_file(path, md, meta_yaml, sidebar_label, community, mode='cloud-notification')
elif integration['integration_type'] == "authentication":
meta_yaml, sidebar_label, learn_rel_path, md, community = build_readme_from_integration(
integration, mode='authentication')
path = build_path(meta_yaml)
write_to_file(path, md, meta_yaml, sidebar_label, community, mode='authentication')
write_to_file(path, md, meta_yaml, sidebar_label, community, mode='authentication')
make_symlinks(symlink_dict)

View file

@ -4,7 +4,6 @@ import json
import os
import re
import sys
from copy import deepcopy
from pathlib import Path
@ -40,8 +39,11 @@ EXPORTER_SOURCES = [
(AGENT_REPO, REPO_PATH / 'src' / 'exporting', True),
]
NOTIFICATION_SOURCES = [
AGENT_NOTIFICATION_SOURCES = [
(AGENT_REPO, REPO_PATH / 'src' / 'health' / 'notifications', True),
]
CLOUD_NOTIFICATION_SOURCES = [
(AGENT_REPO, INTEGRATIONS_PATH / 'cloud-notifications' / 'metadata.yaml', False),
]
@ -64,12 +66,17 @@ EXPORTER_RENDER_KEYS = [
'troubleshooting',
]
NOTIFICATION_RENDER_KEYS = [
AGENT_NOTIFICATION_RENDER_KEYS = [
'overview',
'setup',
'troubleshooting',
]
CLOUD_NOTIFICATION_RENDER_KEYS = [
'setup',
'troubleshooting',
]
AUTHENTICATION_RENDER_KEYS = [
'overview',
'setup',
@ -85,18 +92,18 @@ DEBUG = os.environ.get('DEBUG', False)
def debug(msg):
if GITHUB_ACTIONS:
print(f':debug:{ msg }')
print(f':debug:{msg}')
elif DEBUG:
print(f'>>> { msg }')
print(f'>>> {msg}')
else:
pass
def warn(msg, path):
if GITHUB_ACTIONS:
print(f':warning file={ path }:{ msg }')
print(f':warning file={path}:{msg}')
else:
print(f'!!! WARNING:{ path }:{ msg }')
print(f'!!! WARNING:{path}:{msg}')
def retrieve_from_filesystem(uri):
@ -122,8 +129,13 @@ EXPORTER_VALIDATOR = Draft7Validator(
registry=registry,
)
NOTIFICATION_VALIDATOR = Draft7Validator(
{'$ref': './notification.json#'},
AGENT_NOTIFICATION_VALIDATOR = Draft7Validator(
{'$ref': './agent_notification.json#'},
registry=registry,
)
CLOUD_NOTIFICATION_VALIDATOR = Draft7Validator(
{'$ref': './cloud_notification.json#'},
registry=registry,
)
@ -209,19 +221,19 @@ def load_yaml(src):
yaml = YAML(typ='safe')
if not src.is_file():
warn(f'{ src } is not a file.', src)
warn(f'{src} is not a file.', src)
return False
try:
contents = src.read_text()
except (IOError, OSError):
warn(f'Failed to read { src }.', src)
warn(f'Failed to read {src}.', src)
return False
try:
data = yaml.load(contents)
except YAMLError:
warn(f'Failed to parse { src } as YAML.', src)
warn(f'Failed to parse {src} as YAML.', src)
return False
return data
@ -236,7 +248,7 @@ def load_categories():
try:
CATEGORY_VALIDATOR.validate(categories)
except ValidationError:
warn(f'Failed to validate { CATEGORIES_FILE } against the schema.', CATEGORIES_FILE)
warn(f'Failed to validate {CATEGORIES_FILE} against the schema.', CATEGORIES_FILE)
sys.exit(1)
return categories
@ -248,7 +260,7 @@ def load_collectors():
entries = get_collector_metadata_entries()
for repo, path in entries:
debug(f'Loading { path }.')
debug(f'Loading {path}.')
data = load_yaml(path)
if not data:
@ -257,7 +269,7 @@ def load_collectors():
try:
COLLECTOR_VALIDATOR.validate(data)
except ValidationError:
warn(f'Failed to validate { path } against the schema.', path)
warn(f'Failed to validate {path} against the schema.', path)
continue
for idx, item in enumerate(data['modules']):
@ -273,7 +285,7 @@ def load_collectors():
def _load_deploy_file(file, repo):
ret = []
debug(f'Loading { file }.')
debug(f'Loading {file}.')
data = load_yaml(file)
if not data:
@ -282,7 +294,7 @@ def _load_deploy_file(file, repo):
try:
DEPLOY_VALIDATOR.validate(data)
except ValidationError:
warn(f'Failed to validate { file } against the schema.', file)
warn(f'Failed to validate {file} against the schema.', file)
return []
for idx, item in enumerate(data):
@ -309,7 +321,7 @@ def load_deploy():
def _load_exporter_file(file, repo):
debug(f'Loading { file }.')
debug(f'Loading {file}.')
data = load_yaml(file)
if not data:
@ -318,7 +330,7 @@ def _load_exporter_file(file, repo):
try:
EXPORTER_VALIDATOR.validate(data)
except ValidationError:
warn(f'Failed to validate { file } against the schema.', file)
warn(f'Failed to validate {file} against the schema.', file)
return []
if 'id' in data:
@ -354,21 +366,21 @@ def load_exporters():
return ret
def _load_notification_file(file, repo):
debug(f'Loading { file }.')
def _load_agent_notification_file(file, repo):
debug(f'Loading {file}.')
data = load_yaml(file)
if not data:
return []
try:
NOTIFICATION_VALIDATOR.validate(data)
AGENT_NOTIFICATION_VALIDATOR.validate(data)
except ValidationError:
warn(f'Failed to validate { file } against the schema.', file)
warn(f'Failed to validate {file} against the schema.', file)
return []
if 'id' in data:
data['integration_type'] = 'notification'
data['integration_type'] = 'agent_notification'
data['_src_path'] = file
data['_repo'] = repo
data['_index'] = 0
@ -378,7 +390,7 @@ def _load_notification_file(file, repo):
ret = []
for idx, item in enumerate(data):
item['integration_type'] = 'notification'
item['integration_type'] = 'agent_notification'
item['_src_path'] = file
item['_repo'] = repo
item['_index'] = idx
@ -387,20 +399,67 @@ def _load_notification_file(file, repo):
return ret
def load_notifications():
def _load_cloud_notification_file(file, repo):
debug(f'Loading {file}.')
data = load_yaml(file)
if not data:
return []
try:
CLOUD_NOTIFICATION_VALIDATOR.validate(data)
except ValidationError:
warn(f'Failed to validate {file} against the schema.', file)
return []
if 'id' in data:
data['integration_type'] = 'cloud_notification'
data['_src_path'] = file
data['_repo'] = repo
data['_index'] = 0
return [data]
else:
ret = []
for idx, item in enumerate(data):
item['integration_type'] = 'cloud_notification'
item['_src_path'] = file
item['_repo'] = repo
item['_index'] = idx
ret.append(item)
return ret
def load_agent_notifications():
ret = []
for repo, path, match in NOTIFICATION_SOURCES:
for repo, path, match in AGENT_NOTIFICATION_SOURCES:
if match and path.exists() and path.is_dir():
for file in path.glob(METADATA_PATTERN):
ret.extend(_load_notification_file(file, repo))
ret.extend(_load_agent_notification_file(file, repo))
elif not match and path.exists() and path.is_file():
ret.extend(_load_notification_file(path, repo))
ret.extend(_load_agent_notification_file(path, repo))
return ret
def load_cloud_notifications():
ret = []
for repo, path, match in CLOUD_NOTIFICATION_SOURCES:
if match and path.exists() and path.is_dir():
for file in path.glob(METADATA_PATTERN):
ret.extend(_load_cloud_notification_file(file, repo))
elif not match and path.exists() and path.is_file():
ret.extend(_load_cloud_notification_file(path, repo))
return ret
def _load_authentication_file(file, repo):
debug(f'Loading { file }.')
debug(f'Loading {file}.')
data = load_yaml(file)
if not data:
@ -409,7 +468,7 @@ def _load_authentication_file(file, repo):
try:
AUTHENTICATION_VALIDATOR.validate(data)
except ValidationError:
warn(f'Failed to validate { file } against the schema.', file)
warn(f'Failed to validate {file} against the schema.', file)
return []
if 'id' in data:
@ -453,13 +512,13 @@ def make_id(meta):
else:
instance_name = '000_unknown'
return f'{ meta["plugin_name"] }-{ meta["module_name"] }-{ instance_name }'
return f'{meta["plugin_name"]}-{meta["module_name"]}-{instance_name}'
def make_edit_link(item):
item_path = item['_src_path'].relative_to(REPO_PATH)
return f'https://github.com/{ item["_repo"] }/blob/master/{ item_path }'
return f'https://github.com/{item["_repo"]}/blob/master/{item_path}'
def sort_integrations(integrations):
@ -474,7 +533,9 @@ def dedupe_integrations(integrations, ids):
for i in integrations:
if ids.get(i['id'], False):
first_path, first_index = ids[i['id']]
warn(f'Duplicate integration ID found at { i["_src_path"] } index { i["_index"] } (original definition at { first_path } index { first_index }), ignoring that integration.', i['_src_path'])
warn(
f'Duplicate integration ID found at {i["_src_path"]} index {i["_index"]} (original definition at {first_path} index {first_index}), ignoring that integration.',
i['_src_path'])
else:
tmp_integrations.append(i)
ids[i['id']] = (i['_src_path'], i['_index'])
@ -504,7 +565,7 @@ def render_collectors(categories, collectors, ids):
idmap = {i['id']: i for i in collectors}
for item in collectors:
debug(f'Processing { item["id"] }.')
debug(f'Processing {item["id"]}.')
item['edit_link'] = make_edit_link(item)
@ -516,7 +577,7 @@ def render_collectors(categories, collectors, ids):
res_id = make_id(res)
if res_id not in idmap.keys():
warn(f'Could not find related integration { res_id }, ignoring it.', item['_src_path'])
warn(f'Could not find related integration {res_id}, ignoring it.', item['_src_path'])
continue
related.append({
@ -532,17 +593,19 @@ def render_collectors(categories, collectors, ids):
actual_cats = item_cats & valid_cats
if bogus_cats:
warn(f'Ignoring invalid categories: { ", ".join(bogus_cats) }', item["_src_path"])
warn(f'Ignoring invalid categories: {", ".join(bogus_cats)}', item["_src_path"])
if not item_cats:
item['meta']['monitored_instance']['categories'] = list(default_cats)
warn(f'{ item["id"] } does not list any caregories, adding it to: { default_cats }', item["_src_path"])
warn(f'{item["id"]} does not list any caregories, adding it to: {default_cats}', item["_src_path"])
else:
item['meta']['monitored_instance']['categories'] = [x for x in item['meta']['monitored_instance']['categories'] if x in list(actual_cats)]
item['meta']['monitored_instance']['categories'] = [x for x in
item['meta']['monitored_instance']['categories'] if
x in list(actual_cats)]
for scope in item['metrics']['scopes']:
if scope['name'] == 'global':
scope['name'] = f'{ item["meta"]["monitored_instance"]["name"] } instance'
scope['name'] = f'{item["meta"]["monitored_instance"]["name"]} instance'
for cfg_example in item['setup']['configuration']['examples']['list']:
if 'folding' not in cfg_example:
@ -552,7 +615,7 @@ def render_collectors(categories, collectors, ids):
for key in COLLECTOR_RENDER_KEYS:
if key in item.keys():
template = get_jinja_env().get_template(f'{ key }.md')
template = get_jinja_env().get_template(f'{key}.md')
data = template.render(entry=item, related=related, clean=False)
clean_data = template.render(entry=item, related=related, clean=True)
@ -589,7 +652,7 @@ def render_deploy(distros, categories, deploy, ids):
template = get_jinja_env().get_template('platform_info.md')
for item in deploy:
debug(f'Processing { item["id"] }.')
debug(f'Processing {item["id"]}.')
item['edit_link'] = make_edit_link(item)
clean_item = deepcopy(item)
@ -646,7 +709,7 @@ def render_exporters(categories, exporters, ids):
for key in EXPORTER_RENDER_KEYS:
if key in item.keys():
template = get_jinja_env().get_template(f'{ key }.md')
template = get_jinja_env().get_template(f'{key}.md')
data = template.render(entry=item, clean=False)
clean_data = template.render(entry=item, clean=True)
@ -670,7 +733,7 @@ def render_exporters(categories, exporters, ids):
return exporters, clean_exporters, ids
def render_notifications(categories, notifications, ids):
def render_agent_notifications(categories, notifications, ids):
debug('Sorting notifications.')
sort_integrations(notifications)
@ -686,9 +749,52 @@ def render_notifications(categories, notifications, ids):
clean_item = deepcopy(item)
for key in NOTIFICATION_RENDER_KEYS:
for key in AGENT_NOTIFICATION_RENDER_KEYS:
if key in item.keys():
template = get_jinja_env().get_template(f'{ key }.md')
template = get_jinja_env().get_template(f'{key}.md')
data = template.render(entry=item, clean=False)
clean_data = template.render(entry=item, clean=True)
if 'variables' in item['meta']:
template = get_jinja_env().from_string(data)
data = template.render(variables=item['meta']['variables'], clean=False)
template = get_jinja_env().from_string(clean_data)
clean_data = template.render(variables=item['meta']['variables'], clean=True)
else:
data = ''
clean_data = ''
item[key] = data
clean_item[key] = clean_data
for k in ['_src_path', '_repo', '_index']:
del item[k], clean_item[k]
clean_notifications.append(clean_item)
return notifications, clean_notifications, ids
def render_cloud_notifications(categories, notifications, ids):
debug('Sorting notifications.')
sort_integrations(notifications)
debug('Checking notification ids.')
notifications, ids = dedupe_integrations(notifications, ids)
clean_notifications = []
for item in notifications:
item['edit_link'] = make_edit_link(item)
clean_item = deepcopy(item)
for key in CLOUD_NOTIFICATION_RENDER_KEYS:
if key in item.keys():
template = get_jinja_env().get_template(f'{key}.md')
data = template.render(entry=item, clean=False)
clean_data = template.render(entry=item, clean=True)
@ -729,9 +835,9 @@ def render_authentications(categories, authentications, ids):
clean_item = deepcopy(item)
for key in AUTHENTICATION_RENDER_KEYS:
if key in item.keys():
template = get_jinja_env().get_template(f'{ key }.md')
template = get_jinja_env().get_template(f'{key}.md')
data = template.render(entry=item, clean=False)
clean_data = template.render(entry=item, clean=True)
@ -746,7 +852,7 @@ def render_authentications(categories, authentications, ids):
item[key] = data
clean_item[key] = clean_data
for k in ['_src_path', '_repo', '_index']:
del item[k], clean_item[k]
@ -777,20 +883,23 @@ def main():
collectors = load_collectors()
deploy = load_deploy()
exporters = load_exporters()
notifications = load_notifications()
agent_notifications = load_agent_notifications()
cloud_notifications = load_cloud_notifications()
authentications = load_authentications()
collectors, clean_collectors, ids = render_collectors(categories, collectors, dict())
deploy, clean_deploy, ids = render_deploy(distros, categories, deploy, ids)
exporters, clean_exporters, ids = render_exporters(categories, exporters, ids)
notifications, clean_notifications, ids = render_notifications(categories, notifications, ids)
agent_notifications, clean_agent_notifications, ids = render_agent_notifications(categories, agent_notifications,
ids)
cloud_notifications, clean_cloud_notifications, ids = render_cloud_notifications(categories, cloud_notifications,
ids)
authentications, clean_authentications, ids = render_authentications(categories, authentications, ids)
integrations = collectors + deploy + exporters + notifications + authentications
integrations = collectors + deploy + exporters + agent_notifications + cloud_notifications + authentications
render_integrations(categories, integrations)
clean_integrations = clean_collectors + clean_deploy + clean_exporters + clean_notifications + clean_authentications
clean_integrations = clean_collectors + clean_deploy + clean_exporters + clean_agent_notifications + clean_cloud_notifications + clean_authentications
render_json(categories, clean_integrations)

View file

@ -46,20 +46,20 @@
]
},
"global_setup": {
"type": "object",
"description": "Flags that show which global setup sections are relevant for this notification method.",
"properties": {
"severity_filtering": {
"type": "boolean"
},
"http_proxy": {
"type": "boolean"
}
"type": "object",
"description": "Flags that show which global setup sections are relevant for this notification method.",
"properties": {
"severity_filtering": {
"type": "boolean"
},
"required": [
"severity_filtering",
"http_proxy"
]
"http_proxy": {
"type": "boolean"
}
},
"required": [
"severity_filtering",
"http_proxy"
]
},
"setup": {
"oneOf": [
@ -84,4 +84,4 @@
]
}
}
}
}

View file

@ -0,0 +1,68 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Netdata notification mechanism metadata.",
"oneOf": [
{
"$ref": "#/$defs/entry"
},
{
"type": "array",
"minLength": 1,
"items": {
"$ref": "#/$defs/entry"
}
}
],
"$defs": {
"entry": {
"type": "object",
"description": "Data for a single notification method.",
"properties": {
"id": {
"$ref": "./shared.json#/$defs/id"
},
"meta": {
"$ref": "./shared.json#/$defs/instance"
},
"keywords": {
"$ref": "./shared.json#/$defs/keywords"
},
"global_setup": {
"type": "object",
"description": "Flags that show which global setup sections are relevant for this notification method.",
"properties": {
"severity_filtering": {
"type": "boolean"
},
"http_proxy": {
"type": "boolean"
}
},
"required": [
"severity_filtering",
"http_proxy"
]
},
"setup": {
"oneOf": [
{
"$ref": "./shared.json#/$defs/short_setup"
},
{
"$ref": "./shared.json#/$defs/full_setup"
}
]
},
"troubleshooting": {
"$ref": "./shared.json#/$defs/troubleshooting"
}
},
"required": [
"id",
"meta",
"keywords",
"setup"
]
}
}
}

View file

@ -2,7 +2,7 @@
[% include 'overview/collector.md' %]
[% elif entry.integration_type == 'exporter' %]
[% include 'overview/exporter.md' %]
[% elif entry.integration_type == 'notification' %]
[% elif entry.integration_type == 'agent_notification' %]
[% include 'overview/notification.md' %]
[% elif entry.integration_type == 'authentication' %]
[% include 'overview/authentication.md' %]

View file

@ -85,13 +85,12 @@ docker logs netdata 2>&1 | grep [[ entry.meta.module_name ]]
[% endif %]
[% endif %]
[% elif entry.integration_type == 'notification' %]
[% if 'cloud-notifications' in entry._src_path|string %]
[% elif entry.integration_type == 'cloud_notification' %]
[% if entry.troubleshooting.problems.list %]
## Troubleshooting
[% endif %]
[% else %]
[% elif entry.integration_type == 'agent_notification' %]
## Troubleshooting
### Test Notification
@ -114,7 +113,6 @@ export NETDATA_ALARM_NOTIFY_DEBUG=1
Note that this will test _all_ alert mechanisms for the selected role.
[% endif %]
[% elif entry.integration_type == 'exporter' %]
[% if entry.troubleshooting.problems.list %]
## Troubleshooting