Skip to content

Fix grafana product full name #276

Fix grafana product full name

Fix grafana product full name #276

name: Validate servers JSON against schema
on:
pull_request:
branches:
- main # Only PRs targeting main branch
jobs:
validate-json:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with: { python-version: '3.12' }
- run: pip install --no-cache-dir check-jsonschema
- name: Validate JSON files schema
run: |
check-jsonschema \
--verbose \
--schemafile partners/server-schema.json \
partners/servers/*.json
- name: Validate server names are unique
run: |
python3 -c "
import json
import sys
from pathlib import Path
from collections import defaultdict
# Collect all names and their source files
names_to_files = defaultdict(list)
for json_file in Path('partners/servers').glob('*.json'):
try:
with open(json_file) as f:
data = json.load(f)
if 'name' in data:
names_to_files[data['name']].append(str(json_file))
except json.JSONDecodeError as e:
print(f'Error parsing {json_file}: {e}')
sys.exit(1)
except Exception as e:
print(f'Error reading {json_file}: {e}')
sys.exit(1)
# Check for duplicates
duplicates_found = False
for name, files in names_to_files.items():
if len(files) > 1:
original_file = files[0]
duplicate_files = files[1:]
print(f'Error: Duplicate server name \"{name}\" found. Please ensure all server names are unique across all server JSON files.')
print(f' Original: {original_file}')
for dup_file in duplicate_files:
print(f' Duplicate: {dup_file}')
duplicates_found = True
if duplicates_found:
sys.exit(1)
print(f'All {len(names_to_files)} server names are unique across JSON files')
"
- name: Validate security schema keys are unique
run: |
python3 -c "
import json
import sys
from pathlib import Path
from collections import defaultdict
# Collect all security scheme keys and their source files
keys_to_files = defaultdict(list)
for json_file in Path('partners/servers').glob('*.json'):
try:
with open(json_file) as f:
data = json.load(f)
if 'securitySchemes' in data and data['securitySchemes']:
for key in data['securitySchemes'].keys():
keys_to_files[key].append(str(json_file))
except json.JSONDecodeError as e:
print(f'Error parsing {json_file}: {e}')
sys.exit(1)
except Exception as e:
print(f'Error reading {json_file}: {e}')
sys.exit(1)
# Check for duplicates
duplicates_found = False
for key, files in keys_to_files.items():
if len(files) > 1:
original_file = files[0]
duplicate_files = files[1:]
print(f'Error: Duplicate security scheme key \"{key}\" found. Please ensure all security scheme keys are unique across all server JSON files.')
print(f' Original: {original_file}')
for dup_file in duplicate_files:
print(f' Duplicate: {dup_file}')
duplicates_found = True
if duplicates_found:
sys.exit(1)
print(f'All {len(keys_to_files)} security scheme keys are unique across JSON files')
"