mirror of
https://github.com/netdata/netdata.git
synced 2025-04-17 03:02:41 +00:00
Make collectors/COLLECTORS.md have its list autogenerated from integrations.js (#15995)
This commit is contained in:
parent
9bf49b4e1c
commit
5ff2ec1a29
4 changed files with 1165 additions and 625 deletions
.github/workflows
collectors
integrations
63
.github/workflows/generate-integrations-docs.yml
vendored
63
.github/workflows/generate-integrations-docs.yml
vendored
|
@ -1,63 +0,0 @@
|
|||
---
|
||||
# CI workflow used to generate documentation from integrations/integrations.js.
|
||||
|
||||
name: Generate Integrations Documentation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'integrations/integrations.js'
|
||||
workflow_dispatch: null
|
||||
concurrency: # This keeps multiple instances of the job from running concurrently for the same ref.
|
||||
group: generate-integrations-docs-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
generate-integrations-documentation:
|
||||
name: Generate Integrations Documentation
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'netdata/netdata'
|
||||
steps:
|
||||
- name: Checkout Agent
|
||||
id: checkout-agent
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
submodules: recursive
|
||||
- name: Generate Integrations Documentation
|
||||
id: generate
|
||||
run: |
|
||||
python3 integrations/gen_docs_integrations.py
|
||||
- name: Create PR
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
|
||||
commit-message: Generate Integrations Documentation
|
||||
branch: integrations-docs
|
||||
title: Integrations Documentation
|
||||
body: |
|
||||
Generate Documentation from `integrations/integrations.js` based on the latest code.
|
||||
|
||||
This PR was auto-generated by
|
||||
`.github/workflows/generate-integrations-docs.yml`.
|
||||
- name: Failure Notification
|
||||
uses: rtCamp/action-slack-notify@v2
|
||||
env:
|
||||
SLACK_COLOR: 'danger'
|
||||
SLACK_FOOTER: ''
|
||||
SLACK_ICON_EMOJI: ':github-actions:'
|
||||
SLACK_TITLE: 'Integrations Documentation generation failed:'
|
||||
SLACK_USERNAME: 'GitHub Actions'
|
||||
SLACK_MESSAGE: |-
|
||||
${{ github.repository }}: Failed to create PR generating documentation from integrations.js
|
||||
Checkout Agent: ${{ steps.checkout-agent.outcome }}
|
||||
Generate Integrations: ${{ steps.generate.outcome }}
|
||||
Create PR: ${{ steps.create-pr.outcome }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
if: >-
|
||||
${{
|
||||
failure()
|
||||
&& startsWith(github.ref, 'refs/heads/master')
|
||||
&& github.repository == 'netdata/netdata'
|
||||
}}
|
15
.github/workflows/generate-integrations.yml
vendored
15
.github/workflows/generate-integrations.yml
vendored
|
@ -1,6 +1,5 @@
|
|||
---
|
||||
# CI workflow used to regenerate `integrations/integrations.js` when
|
||||
# relevant source files are changed.
|
||||
# CI workflow used to regenerate `integrations/integrations.js` and accompanying documentation when relevant source files are changed.
|
||||
name: Generate Integrations
|
||||
on:
|
||||
push:
|
||||
|
@ -55,6 +54,14 @@ jobs:
|
|||
run: |
|
||||
source ./virtualenv/bin/activate
|
||||
python3 integrations/gen_integrations.py
|
||||
- name: Generate Integrations Documentation
|
||||
id: generate-integrations-documentation
|
||||
run: |
|
||||
python3 integrations/gen_docs_integrations.py
|
||||
- name: Generate collectors/COLLECTORS.md
|
||||
id: generate-collectors-md
|
||||
run: |
|
||||
python3 integrations/gen_doc_collector_page.py
|
||||
- name: Clean Up Temporary Data
|
||||
id: clean
|
||||
run: rm -rf go.d.plugin virtualenv
|
||||
|
@ -67,7 +74,7 @@ jobs:
|
|||
branch: integrations-regen
|
||||
title: Regenerate integrations.js
|
||||
body: |
|
||||
Regenerate `integrations/integrations.js` based on the
|
||||
Regenerate `integrations/integrations.js`, and documentation based on the
|
||||
latest code.
|
||||
|
||||
This PR was auto-generated by
|
||||
|
@ -87,6 +94,8 @@ jobs:
|
|||
Checkout Go: ${{ steps.checkout-go.outcome }}
|
||||
Prepare Dependencies: ${{ steps.prep-deps.outcome }}
|
||||
Generate Integrations: ${{ steps.generate.outcome }}
|
||||
Generate Integrations Documentation: ${{ steps.generate-integrations-documentation.outcome }}
|
||||
Generate collectors/COLLECTORS.md: ${{ steps.generate-collectors-md.outcome }}
|
||||
Clean Up Temporary Data: ${{ steps.clean.outcome }}
|
||||
Create PR: ${{ steps.create-pr.outcome }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
|
|
File diff suppressed because it is too large
Load diff
67
integrations/gen_doc_collector_page.py
Normal file
67
integrations/gen_doc_collector_page.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
"""
|
||||
This script reads the integrations/integrations.js file and generates the list of data collection integrations inside collectors/COLLECTORS.md, with proper links that Learn can replace into Learn links.
|
||||
"""
|
||||
|
||||
import json
|
||||
import pathlib
|
||||
|
||||
# Open integrations/integrations.js and extract the dictionaries
|
||||
with open('integrations/integrations.js') as dataFile:
|
||||
data = dataFile.read()
|
||||
|
||||
categories_str = data.split("export const categories = ")[1].split("export const integrations = ")[0]
|
||||
integrations_str = data.split("export const categories = ")[1].split("export const integrations = ")[1]
|
||||
|
||||
categories = json.loads(categories_str)
|
||||
integrations = json.loads(integrations_str)
|
||||
|
||||
cat_dict = {}
|
||||
data_col_cat = {}
|
||||
|
||||
|
||||
def recursive(categories):
|
||||
for category in categories:
|
||||
data_col_cat[category['id']] = category['name']
|
||||
if category['children']:
|
||||
recursive(category['children'])
|
||||
|
||||
|
||||
recursive(categories[1]['children'])
|
||||
|
||||
|
||||
def construct_dict(array, integration):
|
||||
for element in array:
|
||||
if element not in cat_dict:
|
||||
cat_dict[element] = list()
|
||||
cat_dict[element].append(integration)
|
||||
|
||||
|
||||
md = ""
|
||||
|
||||
for integration in integrations:
|
||||
if integration['integration_type'] == "collector":
|
||||
construct_dict(integration['meta']['monitored_instance']['categories'], integration)
|
||||
|
||||
|
||||
for category_id, integrations in sorted(cat_dict.items()):
|
||||
heading = '#' * len(category_id.split('.'))
|
||||
|
||||
for cat in data_col_cat:
|
||||
if cat == category_id:
|
||||
name = data_col_cat[cat]
|
||||
|
||||
md += f'#{heading} {name}\n\n'
|
||||
names = []
|
||||
for integration in integrations:
|
||||
name = integration['meta']['monitored_instance']['name']
|
||||
link = integration['edit_link'].replace("metadata.yaml", "") + \
|
||||
"integrations/" + name.lower().replace(" ", "_").replace("/", "-").replace("(", "").replace(")", "")+".md"
|
||||
names.append(f"[{name}]({link})")
|
||||
for integration_name in sorted(names):
|
||||
md += "- " + integration_name + "\n\n"
|
||||
|
||||
|
||||
outfile = pathlib.Path("./collectors/COLLECTORS.md")
|
||||
output = outfile.read_text().split("## Available Data Collection Integrations")[0]
|
||||
output += "## Available Data Collection Integrations\n<!-- AUTOGENERATED PART BY integrations/gen_doc_collector_page.py SCRIPT, DO NOT EDIT MANUALLY -->\n" + md
|
||||
outfile.write_text(output.rstrip('\n')+"\n")
|
Loading…
Add table
Reference in a new issue