181 lines
6.8 KiB
YAML
181 lines
6.8 KiB
YAML
name: Sync README files to Wiki
|
|
|
|
on:
|
|
push:
|
|
branches: [ main ]
|
|
paths:
|
|
- '**/README.md'
|
|
- '**/readme.md'
|
|
|
|
permissions:
|
|
contents: write
|
|
|
|
jobs:
|
|
sync-wiki:
|
|
runs-on: ubuntu-latest
|
|
steps:
|
|
- uses: actions/checkout@v3
|
|
|
|
- name: Configure Git for Wiki
|
|
run: |
|
|
git config --global user.name "GitHub Actions"
|
|
git config --global user.email "actions@github.com"
|
|
|
|
- name: Clone Wiki Repository
|
|
run: git clone https://${{ github.actor }}:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }}.wiki.git ./wiki
|
|
|
|
- name: Copy README files to Wiki
|
|
run: |
|
|
# Special mappings - add specific README files to specific wiki pages
|
|
declare -A special_mappings
|
|
special_mappings["benchmarks/rotatingDrum/readme.md"]="Performance-of-phasicFlow.md"
|
|
|
|
# Create an images directory in the wiki if it doesn't exist
|
|
mkdir -p ./wiki/images
|
|
|
|
# Process mapped files
|
|
for rel_path in "${!special_mappings[@]}"; do
|
|
if [ -f "./$rel_path" ]; then
|
|
wiki_page="${special_mappings[$rel_path]}"
|
|
echo "Processing special mapping: $rel_path -> $wiki_page"
|
|
|
|
# Get the base directory of the readme file
|
|
base_dir=$(dirname "./$rel_path")
|
|
|
|
# Read content of the README file
|
|
content=$(cat "./$rel_path")
|
|
|
|
# Use grep to identify and process image paths instead of regex
|
|
echo "Processing Markdown image references..."
|
|
for img_ref in $(grep -o '!\[.*\](.*[^)]*)' "./$rel_path" | sed -E 's/!\[.*\]\((.*)\)/\1/'); do
|
|
# Skip URLs
|
|
if [[ $img_ref == http* ]]; then
|
|
continue
|
|
fi
|
|
|
|
# Process markdown image as before
|
|
if [[ $img_ref == /* ]]; then
|
|
# Absolute path within repository
|
|
abs_img_path="./$img_ref"
|
|
else
|
|
# Relative path to the README
|
|
abs_img_path="$base_dir/$img_ref"
|
|
fi
|
|
|
|
# Extract just the filename
|
|
img_filename=$(basename "$img_ref")
|
|
wiki_img_path="images/$img_filename"
|
|
|
|
# Copy the image to wiki repository if it exists
|
|
if [ -f "$abs_img_path" ]; then
|
|
echo "Copying image: $abs_img_path -> ./wiki/$wiki_img_path"
|
|
cp -v "$abs_img_path" "./wiki/$wiki_img_path" || echo "Error copying image"
|
|
|
|
# Escape special characters in the path for sed
|
|
escaped_img_path=$(echo "$img_ref" | sed 's/[\/&]/\\&/g')
|
|
|
|
# Replace the image reference in content - simpler approach with sed
|
|
content=$(echo "$content" | sed "s|!\\[.*\\]($escaped_img_path)||g")
|
|
echo "Replaced image reference: $img_ref → $wiki_img_path"
|
|
else
|
|
echo "Warning: Image file not found: $abs_img_path"
|
|
# Add more debug info
|
|
echo "Current directory: $(pwd)"
|
|
echo "Files in $base_dir:"
|
|
ls -la "$base_dir"
|
|
fi
|
|
done
|
|
|
|
# Process HTML img tags separately - preserving all attributes
|
|
echo "Processing HTML image references..."
|
|
|
|
# Use a more reliable approach with temporary files and simpler perl commands
|
|
temp_file=$(mktemp)
|
|
echo "$content" > "$temp_file"
|
|
|
|
# Create a simpler perl script file instead of inline perl
|
|
perl_script=$(mktemp)
|
|
# Write the perl script with proper escaping for YAML
|
|
cat > "$perl_script" << 'PERLSCRIPT'
|
|
#!/usr/bin/perl
|
|
use strict;
|
|
use warnings;
|
|
|
|
my $content = do { local $/; <STDIN> };
|
|
|
|
# Process HTML img tags
|
|
while ($content =~ m|(<img\s+[^>]*?src=")([^"]+)("[^>]*?>)|g) {
|
|
my $prefix = $1;
|
|
my $src = $2;
|
|
my $suffix = $3;
|
|
my $tag = $prefix . $src . $suffix;
|
|
|
|
# Skip URLs
|
|
next if $src =~ /^http/;
|
|
|
|
# Get just the filename
|
|
my $filename = $src;
|
|
$filename =~ s/.*\///;
|
|
my $new_path = "images/" . $filename;
|
|
|
|
# Replace in content
|
|
my $new_tag = $prefix . $new_path . $suffix;
|
|
$content =~ s/\Q$tag\E/$new_tag/g;
|
|
}
|
|
|
|
print $content;
|
|
PERLSCRIPT
|
|
|
|
# Process content with the perl script
|
|
cat "$temp_file" | perl "$perl_script" > "${temp_file}.new"
|
|
content=$(cat "${temp_file}.new")
|
|
|
|
# Clean up temporary files
|
|
rm "$temp_file" "${temp_file}.new" "$perl_script"
|
|
|
|
# Now copy all the images referenced in HTML tags
|
|
for img_src in $(grep -o '<img [^>]*src="[^"]*"' "./$rel_path" | sed -E 's/.*src="([^"]*)".*/\1/'); do
|
|
# Skip URLs
|
|
if [[ $img_src == http* ]]; then
|
|
continue
|
|
fi
|
|
|
|
# Determine the absolute path of the image
|
|
if [[ $img_src == /* ]]; then
|
|
abs_img_path="./$img_src"
|
|
else
|
|
abs_img_path="$base_dir/$img_src"
|
|
fi
|
|
|
|
# Extract just the filename
|
|
img_filename=$(basename "$img_src")
|
|
wiki_img_path="images/$img_filename"
|
|
|
|
# Copy the image to wiki repository if it exists
|
|
if [ -f "$abs_img_path" ]; then
|
|
echo "Copying image: $abs_img_path -> ./wiki/$wiki_img_path"
|
|
cp -v "$abs_img_path" "./wiki/$wiki_img_path" || echo "Error copying image"
|
|
else
|
|
echo "Warning: HTML image file not found: $abs_img_path"
|
|
fi
|
|
done
|
|
|
|
# Debug output
|
|
echo "Wiki page content preview (first 100 chars): ${content:0:100}"
|
|
|
|
# Replace the wiki page with the updated content rather than appending
|
|
mkdir -p "$(dirname "./wiki/$wiki_page")" # Ensure directory exists
|
|
echo -e "# $(basename "$wiki_page" .md)\n\n$content" > "./wiki/$wiki_page"
|
|
echo "Updated wiki page: $wiki_page"
|
|
fi
|
|
done
|
|
|
|
- name: Commit and Push to Wiki
|
|
working-directory: ./wiki
|
|
run: |
|
|
echo "Files changed in wiki repository:"
|
|
git status
|
|
git add .
|
|
git diff-index --quiet HEAD || git commit -m "Sync README files from main repository"
|
|
git push || { echo "Push failed, retrying with more details..."; git push --verbose; }
|