mirror of
https://github.com/netdata/netdata.git
synced 2025-04-17 11:12:42 +00:00
Initial tooling for Integrations Documentation (#15893)
Co-authored-by: Austin S. Hemmelgarn <austin@netdata.cloud> Co-authored-by: Tasos Katsoulas <12612986+tkatsoulas@users.noreply.github.com>
This commit is contained in:
parent
5f8b6a08be
commit
9c6c5d42a9
2 changed files with 339 additions and 0 deletions
63
.github/workflows/generate-integrations-docs.yml
vendored
Normal file
63
.github/workflows/generate-integrations-docs.yml
vendored
Normal file
|
@ -0,0 +1,63 @@
|
|||
---
|
||||
# CI workflow used to generate documentation from integrations/integrations.js.
|
||||
|
||||
name: Generate Integrations Documentation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'integrations/integrations.js'
|
||||
workflow_dispatch: null
|
||||
concurrency: # This keeps multiple instances of the job from running concurrently for the same ref.
|
||||
group: generate-integrations-docs-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
generate-integrations-documentation:
|
||||
name: Generate Integrations Documentation
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'netdata/netdata'
|
||||
steps:
|
||||
- name: Checkout Agent
|
||||
id: checkout-agent
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
submodules: recursive
|
||||
- name: Generate Integrations Documentation
|
||||
id: generate
|
||||
run: |
|
||||
python3 integrations/gen_docs_integrations.py
|
||||
- name: Create PR
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
|
||||
commit-message: Generate Integrations Documentation
|
||||
branch: integrations-docs
|
||||
title: Integrations Documentation
|
||||
body: |
|
||||
Generate Documentation from `integrations/integrations.js` based on the latest code.
|
||||
|
||||
This PR was auto-generated by
|
||||
`.github/workflows/generate-integrations-docs.yml`.
|
||||
- name: Failure Notification
|
||||
uses: rtCamp/action-slack-notify@v2
|
||||
env:
|
||||
SLACK_COLOR: 'danger'
|
||||
SLACK_FOOTER: ''
|
||||
SLACK_ICON_EMOJI: ':github-actions:'
|
||||
SLACK_TITLE: 'Integrations Documentation generation failed:'
|
||||
SLACK_USERNAME: 'GitHub Actions'
|
||||
SLACK_MESSAGE: |-
|
||||
${{ github.repository }}: Failed to create PR generating documentation from integrations.js
|
||||
Checkout Agent: ${{ steps.checkout-agent.outcome }}
|
||||
Generate Integrations: ${{ steps.generate.outcome }}
|
||||
Create PR: ${{ steps.create-pr.outcome }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
if: >-
|
||||
${{
|
||||
failure()
|
||||
&& startsWith(github.ref, 'refs/heads/master')
|
||||
&& github.repository == 'netdata/netdata'
|
||||
}}
|
276
integrations/gen_docs_integrations.py
Normal file
276
integrations/gen_docs_integrations.py
Normal file
|
@ -0,0 +1,276 @@
|
|||
import json
|
||||
import os
|
||||
|
||||
# Dictionary responsible for making the symbolic links at the end of the script's run.
|
||||
symlink_dict = {}
|
||||
|
||||
|
||||
def generate_category_from_name(category_fragment, category_array):
|
||||
"""
|
||||
Takes a category ID in splitted form ("." as delimiter) and the array of the categories, and returns the proper category name that Learn expects.
|
||||
"""
|
||||
|
||||
category_name = ""
|
||||
i = 0
|
||||
dummy_id = category_fragment[0]
|
||||
|
||||
while i < len(category_fragment):
|
||||
for category in category_array:
|
||||
|
||||
if dummy_id == category['id']:
|
||||
category_name = category_name + "/" + category["name"]
|
||||
try:
|
||||
# print("equals")
|
||||
# print(fragment, category_fragment[i+1])
|
||||
dummy_id = dummy_id + "." + category_fragment[i+1]
|
||||
# print(dummy_id)
|
||||
except IndexError:
|
||||
return category_name.split("/", 1)[1]
|
||||
category_array = category['children']
|
||||
break
|
||||
i += 1
|
||||
|
||||
|
||||
def clean_and_write(md, txt):
|
||||
"""
|
||||
This function takes care of the special details element, and converts it to the equivalent that md expects.
|
||||
Then it writes the buffer on the file provided.
|
||||
"""
|
||||
# clean first, replace
|
||||
md = md.replace("{% details summary=\"", "<details><summary>").replace(
|
||||
"\" %}", "</summary>\n").replace("{% /details %}", "</details>\n")
|
||||
# print(md)
|
||||
# exit()
|
||||
|
||||
txt.write(md)
|
||||
|
||||
|
||||
# Open integrations/integrations.js and extract the dictionaries
|
||||
with open('integrations/integrations.js') as dataFile:
|
||||
data = dataFile.read()
|
||||
|
||||
categories_str = data.split("export const categories = ")[1].split("export const integrations = ")[0]
|
||||
integrations_str = data.split("export const categories = ")[1].split("export const integrations = ")[1]
|
||||
|
||||
categories = json.loads(categories_str)
|
||||
integrations = json.loads(integrations_str)
|
||||
|
||||
i = 0
|
||||
# Iterate through every integration
|
||||
for integration in integrations:
|
||||
i += 1
|
||||
if integration['integration_type'] == "collector":
|
||||
|
||||
try:
|
||||
# initiate the variables for the collector
|
||||
meta_yaml = integration['edit_link'].replace("blob", "edit")
|
||||
sidebar_label = integration['meta']['monitored_instance']['name']
|
||||
learn_rel_path = generate_category_from_name(
|
||||
integration['meta']['monitored_instance']['categories'][0].split("."), categories)
|
||||
# build the markdown string
|
||||
md = \
|
||||
f"""<!--startmeta
|
||||
meta_yaml: "{meta_yaml}"
|
||||
sidebar_label: "{sidebar_label}"
|
||||
learn_status: "Published"
|
||||
learn_rel_path: "{learn_rel_path}"
|
||||
message: "DO NOT EDIT THIS FILE DIRECTLY, IT IS GENERATED BY THE COLLECTOR'S metadata.yaml FILE"
|
||||
endmeta-->
|
||||
|
||||
{integration['overview']}
|
||||
"""
|
||||
|
||||
if integration['metrics']:
|
||||
md += f"""
|
||||
{integration['metrics']}
|
||||
"""
|
||||
|
||||
if integration['alerts']:
|
||||
md += f"""
|
||||
{integration['alerts']}
|
||||
"""
|
||||
|
||||
if integration['setup']:
|
||||
md += f"""
|
||||
{integration['setup']}
|
||||
"""
|
||||
|
||||
if integration['troubleshooting']:
|
||||
md += f"""
|
||||
{integration['troubleshooting']}
|
||||
"""
|
||||
|
||||
path = meta_yaml.replace("https://github.com/netdata/", "") \
|
||||
.split("/", 1)[1] \
|
||||
.replace("edit/master/", "") \
|
||||
.replace("/metadata.yaml", "")
|
||||
|
||||
# Only if the path exists, this caters for running the same script on both the go and netdata repos.
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
if not os.path.exists(f'{path}/integrations'):
|
||||
os.mkdir(f'{path}/integrations')
|
||||
|
||||
with open(f'{path}/integrations/{sidebar_label.lower().replace(" ", "_").replace("/", "-")}.md', 'w+') as txt:
|
||||
# add custom_edit_url as the md file, so we can have uniqueness in the ingest script
|
||||
# afterwards the ingest will replace this metadata with meta_yaml
|
||||
md = md.replace(
|
||||
"<!--startmeta", f'<!--startmeta\ncustom_edit_url: \"{meta_yaml.replace("/metadata.yaml", "")}/integrations/{sidebar_label.lower().replace(" ", "_").replace("/", "-")}.md\"')
|
||||
|
||||
clean_and_write(md, txt)
|
||||
except Exception as e:
|
||||
print("Error in writing to the collector file", e, integration['id'])
|
||||
|
||||
# If we only created one file inside a collector, add the entry to the symlink_dict, so we can make the link
|
||||
if len(os.listdir(f'{path}/integrations')) == 1:
|
||||
symlink_dict.update(
|
||||
{path: f'integrations/{sidebar_label.lower().replace(" ", "_").replace("/", "-")}.md'})
|
||||
else:
|
||||
try:
|
||||
symlink_dict.pop(path)
|
||||
except KeyError:
|
||||
# We don't need to print something here.
|
||||
pass
|
||||
|
||||
except Exception as e:
|
||||
print("Exception in collector md construction", e, integration['id'])
|
||||
|
||||
# kind of specific if clause, so we can avoid running excessive code in the go repo
|
||||
elif integration['integration_type'] == "exporter" and "go.d.plugin" not in os.getcwd():
|
||||
try:
|
||||
# initiate the variables for the exporter
|
||||
meta_yaml = integration['edit_link'].replace("blob", "edit")
|
||||
sidebar_label = integration['meta']['name']
|
||||
learn_rel_path = generate_category_from_name(integration['meta']['categories'][0].split("."), categories)
|
||||
# build the markdown string
|
||||
md = \
|
||||
f"""<!--startmeta
|
||||
meta_yaml: "{meta_yaml}"
|
||||
sidebar_label: "{sidebar_label}"
|
||||
learn_status: "Published"
|
||||
learn_rel_path: "Exporting"
|
||||
message: "DO NOT EDIT THIS FILE DIRECTLY, IT IS GENERATED BY THE EXPORTER'S metadata.yaml FILE"
|
||||
endmeta-->
|
||||
|
||||
{integration['overview']}
|
||||
"""
|
||||
|
||||
if integration['setup']:
|
||||
md += f"""
|
||||
{integration['setup']}
|
||||
"""
|
||||
|
||||
if integration['troubleshooting']:
|
||||
md += f"""
|
||||
{integration['troubleshooting']}
|
||||
"""
|
||||
|
||||
path = meta_yaml.replace("https://github.com/netdata/", "") \
|
||||
.split("/", 1)[1] \
|
||||
.replace("edit/master/", "") \
|
||||
.replace("/metadata.yaml", "")
|
||||
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
if not os.path.exists(f'{path}/integrations'):
|
||||
os.mkdir(f'{path}/integrations')
|
||||
|
||||
with open(f'{path}/integrations/{sidebar_label.lower().replace(" ", "_").replace("/", "-")}.md', 'w+') as txt:
|
||||
# add custom_edit_url as the md file, so we can have uniqueness in the ingest script
|
||||
# afterwards the ingest will replace this metadata with meta_yaml
|
||||
md = md.replace(
|
||||
"<!--startmeta", f'<!--startmeta\ncustom_edit_url: \"{meta_yaml.replace("/metadata.yaml", "")}/integrations/{sidebar_label.lower().replace(" ", "_").replace("/", "-")}.md\"')
|
||||
|
||||
clean_and_write(md, txt)
|
||||
except Exception as e:
|
||||
print("Error in writing to the file", e, integration['id'])
|
||||
|
||||
# If we only created one file inside a collector, add the entry to the symlink_dict, so we can make the link
|
||||
if len(os.listdir(f'{path}/integrations')) == 1:
|
||||
symlink_dict.update(
|
||||
{path: f'integrations/{sidebar_label.lower().replace(" ", "_").replace("/", "-")}.md'})
|
||||
else:
|
||||
try:
|
||||
symlink_dict.pop(path)
|
||||
except KeyError:
|
||||
# We don't need to print something here.
|
||||
pass
|
||||
except Exception as e:
|
||||
print("Exception in exporter md construction", e, integration['id'])
|
||||
|
||||
# kind of specific if clause, so we can avoid running excessive code in the go repo
|
||||
elif integration['integration_type'] == "notification" and "go.d.plugin" not in os.getcwd():
|
||||
try:
|
||||
# initiate the variables for the notification method
|
||||
meta_yaml = integration['edit_link'].replace("blob", "edit")
|
||||
sidebar_label = integration['meta']['name']
|
||||
learn_rel_path = generate_category_from_name(integration['meta']['categories'][0].split("."), categories)
|
||||
# build the markdown string
|
||||
md = \
|
||||
f"""<!--startmeta
|
||||
meta_yaml: "{meta_yaml}"
|
||||
sidebar_label: "{sidebar_label}"
|
||||
learn_status: "Published"
|
||||
learn_rel_path: "{learn_rel_path.replace("notifications", "Alerting/Notifications")}"
|
||||
message: "DO NOT EDIT THIS FILE DIRECTLY, IT IS GENERATED BY THE NOTIFICATION'S metadata.yaml FILE"
|
||||
endmeta-->
|
||||
|
||||
{integration['overview']}
|
||||
"""
|
||||
|
||||
if integration['setup']:
|
||||
md += f"""
|
||||
{integration['setup']}
|
||||
"""
|
||||
|
||||
if integration['troubleshooting']:
|
||||
md += f"""
|
||||
{integration['troubleshooting']}
|
||||
"""
|
||||
|
||||
path = meta_yaml.replace("https://github.com/netdata/", "") \
|
||||
.split("/", 1)[1] \
|
||||
.replace("edit/master/", "") \
|
||||
.replace("/metadata.yaml", "")
|
||||
|
||||
if "cloud-notifications" in path:
|
||||
# for cloud notifications we generate them near their metadata.yaml
|
||||
name = integration['meta']['name'].lower().replace(" ", "_")
|
||||
if not os.path.exists(f'{path}/integrations'):
|
||||
os.mkdir(f'{path}/integrations')
|
||||
|
||||
proper_edit_name = meta_yaml.replace(
|
||||
"metadata.yaml", f'integrations/{sidebar_label.lower().replace(" ", "_").replace("/", "-")}.md\"')
|
||||
|
||||
md = md.replace("<!--startmeta", f'<!--startmeta\ncustom_edit_url: \"{proper_edit_name}')
|
||||
|
||||
finalpath = f'{path}/integrations/{name}.md'
|
||||
else:
|
||||
# add custom_edit_url as the md file, so we can have uniqueness in the ingest script
|
||||
# afterwards the ingest will replace this metadata with meta_yaml
|
||||
md = md.replace("<!--startmeta",
|
||||
f'<!--startmeta\ncustom_edit_url: \"{meta_yaml.replace("metadata.yaml", "README.md")}')
|
||||
finalpath = f'{path}/README.md'
|
||||
try:
|
||||
with open(finalpath, 'w') as txt:
|
||||
clean_and_write(md, txt)
|
||||
except Exception as e:
|
||||
print("Exception in notification md construction", e, integration['id'])
|
||||
|
||||
except Exception as e:
|
||||
print("Exception in for loop", e, "\n", integration)
|
||||
|
||||
for element in symlink_dict:
|
||||
# Remove the README to prevent it being a normal file
|
||||
os.remove(f'{element}/README.md')
|
||||
# and then make a symlink to the actual markdown
|
||||
os.symlink(symlink_dict[element], f'{element}/README.md')
|
||||
|
||||
with open(f'{element}/{symlink_dict[element]}', 'r') as txt:
|
||||
md = txt.read()
|
||||
|
||||
# This preserves the custom_edit_url for most files as it was,
|
||||
# so the existing links don't break, this is vital for link replacement afterwards
|
||||
with open(f'{element}/{symlink_dict[element]}', 'w+') as txt:
|
||||
md = md.replace(f'{element}/{symlink_dict[element]}', f'{element}/README.md')
|
||||
txt.write(md)
|
Loading…
Add table
Reference in a new issue