a new workflow for readme.md files to wiki

This commit is contained in:
Hamidreza 2025-04-30 18:34:53 +03:30
parent edfbdb22e9
commit 7a4a33ef37
3 changed files with 114 additions and 89 deletions

184
.github/scripts/sync-wiki.py vendored Normal file → Executable file
View File

@ -2,106 +2,120 @@
import os import os
import re import re
from bs4 import BeautifulSoup import yaml
import os.path import sys
# Configuration: map of source README files to destination wiki pages # Constants
FILE_MAPPINGS = { REPO_URL = "https://github.com/PhasicFlow/phasicFlow"
'benchmarks/rotatingDrum/readme.md': 'Performance-of-phasicFlow.md', REPO_PATH = os.path.join(os.environ.get("GITHUB_WORKSPACE", ""), "repo")
# Add more mappings as needed WIKI_PATH = os.path.join(os.environ.get("GITHUB_WORKSPACE", ""), "wiki")
} MAPPING_FILE = os.path.join(REPO_PATH, ".github/workflows/markdownList.yml")
def convert_relative_links(content, source_path, repo_name): def load_mapping():
"""Convert relative links to absolute GitHub links""" """Load the markdown to wiki page mapping file."""
repo_base_url = f"https://github.com/{repo_name}/blob/main/" try:
with open(MAPPING_FILE, 'r') as f:
data = yaml.safe_load(f)
return data.get('mappings', [])
except Exception as e:
print(f"Error loading mapping file: {e}")
return []
def convert_relative_links(content, source_path):
"""Convert relative links in markdown content to absolute URLs."""
# Find markdown links with regex pattern [text](url)
pattern = r'\[([^\]]+)\]\(([^)]+)\)'
# Find the directory of the source file to correctly resolve relative paths def replace_link(match):
source_dir = os.path.dirname(source_path)
if source_dir:
source_dir += '/'
# Convert Markdown links: [text](link)
def replace_md_link(match):
link_text = match.group(1) link_text = match.group(1)
link_path = match.group(2) link_url = match.group(2)
# Skip links that are already absolute # Skip if already absolute URL or anchor
if link_path.startswith(('http://', 'https://', '#')): if link_url.startswith(('http://', 'https://', '#', 'mailto:')):
return f"[{link_text}]({link_path})" return match.group(0)
# Convert relative path to absolute # Get the directory of the source file
if link_path.startswith('./'): source_dir = os.path.dirname(source_path)
link_path = link_path[2:]
elif link_path.startswith('../'):
# Need to resolve the path based on source_dir
path_parts = source_dir.strip('/').split('/')
current_path = link_path
while current_path.startswith('../'):
path_parts.pop()
current_path = current_path[3:]
new_path = '/'.join(path_parts) + '/' + current_path if path_parts else current_path
return f"[{link_text}]({repo_base_url}{new_path})"
absolute_path = f"{source_dir}{link_path}" if not link_path.startswith('/') else link_path[1:]
return f"[{link_text}]({repo_base_url}{absolute_path})"
content = re.sub(r'\[([^\]]+)\]\(([^)]+)\)', replace_md_link, content) # Create absolute path from repository root
if link_url.startswith('/'):
# If link starts with /, it's already relative to repo root
abs_path = link_url
else:
# Otherwise, it's relative to the file location
abs_path = os.path.normpath(os.path.join(source_dir, link_url))
if not abs_path.startswith('/'):
abs_path = '/' + abs_path
# Convert to GitHub URL
github_url = f"{REPO_URL}/blob/main{abs_path}"
return f"[{link_text}]({github_url})"
# Preserve HTML img tags with their style attributes and fix src paths # Replace all links
soup = BeautifulSoup(content, 'html.parser') return re.sub(pattern, replace_link, content)
for img in soup.find_all('img'):
if img.get('src') and not img['src'].startswith(('http://', 'https://')): def process_file(source_file, target_wiki_page):
src = img['src'] """Process a markdown file and copy its contents to a wiki page."""
if src.startswith('./'): source_path = os.path.join(REPO_PATH, source_file)
src = src[2:] target_path = os.path.join(WIKI_PATH, f"{target_wiki_page}.md")
if not src.startswith('/'):
src = f"{source_dir}{src}"
else:
src = src[1:] # Remove leading slash
img['src'] = f"{repo_base_url}{src}"
# Convert the modified HTML back to string, but only if we found any img tags print(f"Processing {source_path} -> {target_path}")
if soup.find_all('img'):
# Extract just the modified HTML parts and reinsert them
for img in soup.find_all('img'):
img_str = str(img)
# Find the original img tag in content and replace it
original_img_pattern = re.compile(r'<img[^>]*src=["\']([^"\']*)["\'][^>]*>')
for match in original_img_pattern.finditer(content):
orig_src = match.group(1)
if orig_src in img['src'] or img['src'].endswith(orig_src):
content = content.replace(match.group(0), img_str)
return content try:
# Check if source exists
if not os.path.exists(source_path):
print(f"Source file not found: {source_path}")
return False
# Read source content
with open(source_path, 'r') as f:
content = f.read()
# Convert relative links
content = convert_relative_links(content, source_file)
# Write to wiki page
with open(target_path, 'w') as f:
f.write(content)
return True
except Exception as e:
print(f"Error processing {source_file}: {e}")
return False
def main(): def main():
repo_name = os.environ.get('GITHUB_REPOSITORY', 'PhasicFlow/phasicFlow') # Check if wiki directory exists
repo_path = os.path.join(os.environ.get('GITHUB_WORKSPACE', '.'), 'repo') if not os.path.exists(WIKI_PATH):
wiki_path = os.path.join(os.environ.get('GITHUB_WORKSPACE', '.'), 'wiki') print(f"Wiki path not found: {WIKI_PATH}")
sys.exit(1)
for source_rel_path, dest_wiki_page in FILE_MAPPINGS.items(): # Load mapping
source_path = os.path.join(repo_path, source_rel_path) mappings = load_mapping()
dest_path = os.path.join(wiki_path, dest_wiki_page) if not mappings:
print("No mappings found in the mapping file")
sys.exit(1)
print(f"Found {len(mappings)} mappings to process")
# Process each mapping
success_count = 0
for mapping in mappings:
source = mapping.get('source')
target = mapping.get('target')
print(f"Processing: {source_path} -> {dest_path}") if not source or not target:
print(f"Invalid mapping: {mapping}")
if not os.path.exists(source_path):
print(f"Warning: Source file {source_path} does not exist, skipping.")
continue continue
# Read the source file if process_file(source, target):
with open(source_path, 'r', encoding='utf-8') as f: success_count += 1
content = f.read()
print(f"Successfully processed {success_count} of {len(mappings)} files")
# Convert relative links to absolute
content = convert_relative_links(content, source_rel_path, repo_name) # Exit with error if any file failed
if success_count < len(mappings):
# Write the modified content to the destination sys.exit(1)
with open(dest_path, 'w', encoding='utf-8') as f:
f.write(content)
print(f"Successfully synced {source_path} to {dest_path}")
if __name__ == "__main__": if __name__ == "__main__":
main() main()

9
.github/workflows/markdownList.yml vendored Normal file
View File

@ -0,0 +1,9 @@
# This file maps source markdown files to their target wiki pages
# format:
# - source: path/to/markdown/file.md
# target: Wiki-Page-Name
mappings:
- source: benchmarks/rotatingDrum/readme.md
target: Performance-of-phasicFlow
# Add more mappings as needed

View File

@ -5,8 +5,10 @@ on:
branches: branches:
- main - main
paths: paths:
- 'benchmarks/*/readme.md' - '**/*.md'
- '.github/workflows/sync-wiki.yml' - '.github/workflows/sync-wiki.yml'
- '.github/workflows/markdownList.yml'
- '.github/scripts/sync-wiki.py'
workflow_dispatch: workflow_dispatch:
jobs: jobs:
@ -27,12 +29,12 @@ jobs:
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v4 uses: actions/setup-python@v4
with: with:
python-version: '3.x' python-version: '3.10'
- name: Install dependencies - name: Install dependencies
run: pip install beautifulsoup4 run: pip install pyyaml
- name: Sync specific README files to Wiki - name: Sync markdown files to Wiki
run: | run: |
python $GITHUB_WORKSPACE/repo/.github/scripts/sync-wiki.py python $GITHUB_WORKSPACE/repo/.github/scripts/sync-wiki.py
env: env: