Compare commits

...

9 Commits

3 changed files with 464 additions and 74 deletions

View File

@@ -45,15 +45,17 @@ on:
required: false
jobs:
serialize:
common:
name: Wait for other jobs
# since build cache, test-build cache, test-run cache are involved, out of order exec if serialization is on individual jobs will wreak all sorts of havoc
# so we serialize on the entire workflow
concurrency:
group: build-number-increment # serialize for build num cache access
group: serialize-workflow
strategy:
fail-fast: false
runs-on: 'macos-15'
steps:
- run: echo "No other contending jobs are running now...Build is ready to start"
- run: echo "No other contending jobs are running now..."
- name: Set short commit hash
id: commit-id
run: |
@@ -66,7 +68,7 @@ jobs:
build:
name: Build SideStore - ${{ inputs.release_tag }}
needs: serialize
needs: common
strategy:
fail-fast: false
matrix:
@@ -88,6 +90,7 @@ jobs:
uses: actions/checkout@v4
with:
submodules: recursive
fetch-depth: 0
- name: Install dependencies - ldid & xcbeautify
run: |
@@ -157,7 +160,7 @@ jobs:
build_num=$(echo "${{ steps.version.outputs.version }}" | sed -E 's/.*\.([0-9]+)\+.*/\1/')
# Combine them into the final output
MARKETING_VERSION="${version}-${date}.${build_num}+${{ needs.serialize.outputs.short-commit }}"
MARKETING_VERSION="${version}-${date}.${build_num}+${{ needs.common.outputs.short-commit }}"
echo "MARKETING_VERSION=$MARKETING_VERSION" >> $GITHUB_ENV
echo "MARKETING_VERSION=$MARKETING_VERSION" >> $GITHUB_OUTPUT
@@ -177,11 +180,11 @@ jobs:
- name: (Build) Cache Build
uses: irgaly/xcode-cache@v1
with:
key: xcode-cache-deriveddata-build-${{ github.sha }}
restore-keys: xcode-cache-deriveddata-build-
swiftpm-cache-key: xcode-cache-sourcedata-build-${{ github.sha }}
key: xcode-cache-deriveddata-build-${{ github.ref_name }}-${{ github.sha }}
restore-keys: xcode-cache-deriveddata-build-${{ github.ref_name }}-
swiftpm-cache-key: xcode-cache-sourcedata-build-${{ github.ref_name }}-${{ github.sha }}
swiftpm-cache-restore-keys: |
xcode-cache-sourcedata-build-
xcode-cache-sourcedata-build-${{ github.ref_name }}-
- name: (Build) Restore Pods from Cache (Exact match)
id: pods-restore
@@ -191,7 +194,7 @@ jobs:
./Podfile.lock
./Pods/
./AltStore.xcworkspace/
key: pods-cache-build-${{ hashFiles('Podfile') }}
key: pods-cache-build-${{ github.ref_name }}-${{ hashFiles('Podfile') }}
# restore-keys: | # commented out to strictly check cache for this particular podfile
# pods-cache-
@@ -204,7 +207,7 @@ jobs:
./Podfile.lock
./Pods/
./AltStore.xcworkspace/
key: pods-cache-build-
key: pods-cache-build-${{ github.ref_name }}-
- name: (Build) Install CocoaPods
@@ -219,7 +222,7 @@ jobs:
./Podfile.lock
./Pods/
./AltStore.xcworkspace/
key: pods-cache-build-${{ hashFiles('Podfile') }}
key: pods-cache-build-${{ github.ref_name }}-${{ hashFiles('Podfile') }}
- name: (Build) Clean previous build artifacts
# using 'tee' to intercept stdout and log for detailed build-log
@@ -322,21 +325,70 @@ jobs:
name: SideStore-${{ steps.version.outputs.version }}-dSYMs.zip
path: SideStore.dSYMs.zip
- name: Zip beta-beta-build-num & update_apps.py
run: |
zip -r -9 ./beta-build-num.zip ./SideStore/beta-build-num update_apps.py
- name: Upload beta-build-num artifact
- name: Keep rolling the build numbers for each successful build
if: ${{ inputs.is_beta }}
run: |
pushd SideStore/beta-build-num/
echo "Configure Git user (committer details)"
git config user.name "GitHub Actions"
git config user.email "github-actions@github.com"
echo "Adding files to commit"
git add --verbose build_number.txt
git commit -m " - updated for ${{ inputs.release_tag }} - ${{ needs.common.outputs.short-commit }} deployment" || echo "No changes to commit"
echo "Pushing to remote repo"
git push --verbose
popd
- name: Get last successful commit
id: get_last_commit
run: |
# Try to get the last successful workflow run commit
LAST_SUCCESS_SHA=$(gh run list --branch "${{ github.ref_name }}" --status success --json headSha --jq '.[0].headSha')
echo "LAST_SUCCESS_SHA=$LAST_SUCCESS_SHA" >> $GITHUB_OUTPUT
echo "LAST_SUCCESS_SHA=$LAST_SUCCESS_SHA" >> $GITHUB_ENV
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Create release notes
run: |
LAST_SUCCESS_SHA=${{ steps.get_last_commit.outputs.LAST_SUCCESS_SHA}}
echo "Last successful commit SHA: $LAST_SUCCESS_SHA"
FROM_COMMIT=$LAST_SUCCESS_SHA
# Check if we got a valid SHA
if [ -z "$LAST_SUCCESS_SHA" ] || [ "$LAST_SUCCESS_SHA" = "null" ]; then
echo "No successful run found, using initial commit of branch"
# Get the first commit of the branch (initial commit)
FROM_COMMIT=$(git rev-list --max-parents=0 HEAD)
fi
python3 update_release_notes.py $FROM_COMMIT ${{ inputs.release_tag }} ${{ github.ref_name }}
# cat release-notes.md
- name: Upload release-notes.md
uses: actions/upload-artifact@v4
with:
name: beta-build-num-${{ steps.version.outputs.version }}.zip
path: beta-build-num.zip
name: release-notes-${{ needs.common.outputs.short-commit }}.md
path: release-notes.md
- name: Upload update_release_notes.py
uses: actions/upload-artifact@v4
with:
name: update_release_notes-${{ needs.common.outputs.short-commit }}.py
path: update_release_notes.py
- name: Upload update_apps.py
uses: actions/upload-artifact@v4
with:
name: update_apps-${{ needs.common.outputs.short-commit }}.py
path: update_apps.py
tests-build:
name: Tests-Build SideStore - ${{ inputs.release_tag }}
needs: serialize
needs: common
strategy:
fail-fast: false
matrix:
@@ -363,11 +415,11 @@ jobs:
- name: (Tests-Build) Cache Build
uses: irgaly/xcode-cache@v1
with:
key: xcode-cache-deriveddata-test-${{ github.sha }}
restore-keys: xcode-cache-deriveddata-test-
swiftpm-cache-key: xcode-cache-sourcedata-test-${{ github.sha }}
key: xcode-cache-deriveddata-test-${{ github.ref_name }}-${{ github.sha }}
restore-keys: xcode-cache-deriveddata-test-${{ github.ref_name }}-
swiftpm-cache-key: xcode-cache-sourcedata-test-${{ github.ref_name }}-${{ github.sha }}
swiftpm-cache-restore-keys: |
xcode-cache-sourcedata-test-
xcode-cache-sourcedata-test-${{ github.ref_name }}-
- name: (Tests-Build) Restore Pods from Cache (Exact match)
id: pods-restore
@@ -377,7 +429,7 @@ jobs:
./Podfile.lock
./Pods/
./AltStore.xcworkspace/
key: pods-cache-test-${{ hashFiles('Podfile') }}
key: pods-cache-test-${{ github.ref_name }}-${{ hashFiles('Podfile') }}
- name: (Tests-Build) Restore Pods from Cache (Last Available)
if: ${{ steps.pods-restore.outputs.cache-hit != 'true' }}
@@ -388,7 +440,7 @@ jobs:
./Podfile.lock
./Pods/
./AltStore.xcworkspace/
key: pods-cache-test-
key: pods-cache-test-${{ github.ref_name }}-
- name: (Tests-Build) Install CocoaPods
run: pod install
@@ -401,7 +453,7 @@ jobs:
./Podfile.lock
./Pods/
./AltStore.xcworkspace/
key: pods-cache-test-${{ hashFiles('Podfile') }}
key: pods-cache-test-${{ github.ref_name }}-${{ hashFiles('Podfile') }}
- name: (Tests-Build) Clean previous build artifacts
run: |
@@ -467,12 +519,12 @@ jobs:
if: always() && steps.encrypt-test-log.outputs.encrypted == 'true'
uses: actions/upload-artifact@v4
with:
name: encrypted-tests-build-logs-${{ needs.serialize.outputs.short-commit }}.zip
name: encrypted-tests-build-logs-${{ needs.common.outputs.short-commit }}.zip
path: encrypted-tests-build-logs.zip
tests-run:
name: Tests-Run SideStore - ${{ inputs.release_tag }}
needs: [serialize, tests-build]
needs: [common, tests-build]
strategy:
fail-fast: false
matrix:
@@ -487,11 +539,10 @@ jobs:
with:
submodules: recursive
- name: Boot Simulator for testing
- name: Boot Simulator async(nohup) for testing
run: |
mkdir -p build/logs
make -B boot-sim-async | tee -a build/logs/tests-run.log
exit ${PIPESTATUS[0]}
nohup make -B boot-sim-async </dev/null >> build/logs/tests-run.log 2>&1 &
- name: Setup Xcode
uses: maxim-lobanov/setup-xcode@v1.6.0
@@ -501,11 +552,11 @@ jobs:
- name: (Tests-Run) Cache Build
uses: irgaly/xcode-cache@v1
with:
key: xcode-cache-deriveddata-test-${{ github.sha }}
restore-keys: xcode-cache-deriveddata-test-
swiftpm-cache-key: xcode-cache-sourcedata-test-${{ github.sha }}
key: xcode-cache-deriveddata-test-${{ github.ref_name }}-${{ github.sha }}
restore-keys: xcode-cache-deriveddata-test-${{ github.ref_name }}-
swiftpm-cache-key: xcode-cache-sourcedata-test-${{ github.ref_name }}-${{ github.sha }}
swiftpm-cache-restore-keys: |
xcode-cache-sourcedata-test-
xcode-cache-sourcedata-test-${{ github.ref_name }}-
- name: (Tests-Run) Restore Pods from Cache (Exact match)
id: pods-restore
@@ -515,7 +566,7 @@ jobs:
./Podfile.lock
./Pods/
./AltStore.xcworkspace/
key: pods-cache-test-${{ hashFiles('Podfile') }}
key: pods-cache-test-${{ github.ref_name }}-${{ hashFiles('Podfile') }}
- name: (Tests-Run) Restore Pods from Cache (Last Available)
if: ${{ steps.pods-restore.outputs.cache-hit != 'true' }}
@@ -526,7 +577,7 @@ jobs:
./Podfile.lock
./Pods/
./AltStore.xcworkspace/
key: pods-cache-test-
key: pods-cache-test-${{ github.ref_name }}-
- name: (Tests-Run) Install CocoaPods
run: pod install
@@ -539,7 +590,7 @@ jobs:
./Podfile.lock
./Pods/
./AltStore.xcworkspace/
key: pods-cache-test-${{ hashFiles('Podfile') }}
key: pods-cache-test-${{ github.ref_name }}-${{ hashFiles('Podfile') }}
- name: (Tests-Run) Clean previous build artifacts
run: |
@@ -625,7 +676,7 @@ jobs:
if: always() && steps.encrypt-test-log.outputs.encrypted == 'true'
uses: actions/upload-artifact@v4
with:
name: encrypted-tests-run-logs-${{ needs.serialize.outputs.short-commit }}.zip
name: encrypted-tests-run-logs-${{ needs.common.outputs.short-commit }}.zip
path: encrypted-tests-run-logs.zip
- name: Print tests-recording.log contents (if exists)
@@ -655,7 +706,7 @@ jobs:
if: ${{ always() && steps.check-recording.outputs.found == 'true' }}
uses: actions/upload-artifact@v4
with:
name: tests-recording-${{ needs.serialize.outputs.short-commit }}.mp4
name: tests-recording-${{ needs.common.outputs.short-commit }}.mp4
path: tests-recording.mp4
- name: Zip test-results
@@ -664,14 +715,14 @@ jobs:
- name: Upload Test Artifacts
uses: actions/upload-artifact@v4
with:
name: test-results-${{ needs.serialize.outputs.short-commit }}.zip
name: test-results-${{ needs.common.outputs.short-commit }}.zip
path: test-results.zip
deploy:
name: Deploy SideStore - ${{ inputs.release_tag }}
runs-on: macos-15
# needs: [serialize, build]
needs: [serialize, build, tests-build, tests-run]
# needs: [common, build]
needs: [common, build, tests-build, tests-run]
steps:
- name: Download IPA artifact
uses: actions/download-artifact@v4
@@ -691,32 +742,45 @@ jobs:
- name: Download encrypted-tests-build-logs artifact
uses: actions/download-artifact@v4
with:
name: encrypted-tests-build-logs-${{ needs.serialize.outputs.short-commit }}.zip
name: encrypted-tests-build-logs-${{ needs.common.outputs.short-commit }}.zip
- name: Download encrypted-tests-run-logs artifact
uses: actions/download-artifact@v4
with:
name: encrypted-tests-run-logs-${{ needs.serialize.outputs.short-commit }}.zip
name: encrypted-tests-run-logs-${{ needs.common.outputs.short-commit }}.zip
- name: Download tests-recording artifact
uses: actions/download-artifact@v4
with:
name: tests-recording-${{ needs.serialize.outputs.short-commit }}.mp4
name: tests-recording-${{ needs.common.outputs.short-commit }}.mp4
- name: Download test-results artifact
uses: actions/download-artifact@v4
with:
name: test-results-${{ needs.serialize.outputs.short-commit }}.zip
name: test-results-${{ needs.common.outputs.short-commit }}.zip
- name: Download beta-build-num artifact
if: ${{ inputs.is_beta }}
- name: Download release-notes.md
uses: actions/download-artifact@v4
with:
name: beta-build-num-${{ needs.build.outputs.version }}.zip
- name: Un-Zip beta-beta-build-num & update_apps.py
run: |
unzip beta-build-num.zip -d .
name: release-notes-${{ needs.common.outputs.short-commit }}.md
- name: Download update_release_notes.py
uses: actions/download-artifact@v4
with:
name: update_release_notes-${{ needs.common.outputs.short-commit }}.py
- name: Download update_apps.py
uses: actions/download-artifact@v4
with:
name: update_apps-${{ needs.common.outputs.short-commit }}.py
- name: Read release notes
id: release_notes
run: |
CONTENT=$(python3 update_release_notes.py --retrieve ${{ inputs.release_tag }})
echo "content<<EOF" >> $GITHUB_OUTPUT
echo "$CONTENT" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: List files before upload
run: |
@@ -754,22 +818,7 @@ jobs:
Commit SHA: `${{ github.sha }}`
Version: `${{ needs.build.outputs.version }}`
- name: Publish to SideStore/beta-build-num
if: ${{ inputs.is_beta }}
run: |
pushd SideStore/beta-build-num/
echo "Configure Git user (committer details)"
git config user.name "GitHub Actions"
git config user.email "github-actions@github.com"
echo "Adding files to commit"
git add --verbose build_number.txt
git commit -m " - updated for ${{ inputs.release_tag }} - ${{ needs.serialize.outputs.short-commit }} deployment" || echo "No changes to commit"
echo "Pushing to remote repo"
git push --verbose
popd
${{ steps.release_notes.outputs.content }}
- name: Get formatted date
run: |
@@ -801,8 +850,11 @@ jobs:
LOCALIZED_DESCRIPTION=$(cat <<EOF
This is release for:
- version: "${{ needs.build.outputs.version }}"
- revision: "${{ needs.serialize.outputs.short-commit }}"
- revision: "${{ needs.common.outputs.short-commit }}"
- timestamp: "${{ steps.date.outputs.date }}"
Release Notes:
${{ steps.release_notes.outputs.content }}
EOF
)
@@ -851,7 +903,7 @@ jobs:
# Commit changes and push using SSH
git add --verbose ./_includes/source.json
git commit -m " - updated for ${{ needs.serialize.outputs.short-commit }} deployment" || echo "No changes to commit"
git commit -m " - updated for ${{ needs.common.outputs.short-commit }} deployment" || echo "No changes to commit"
git push --verbose
popd

View File

@@ -242,6 +242,7 @@
<key>public.filename-extension</key>
<array>
<string>mobiledevicepairing</string>
<string>mobiledevicepair</string>
</array>
</dict>
</dict>

337
update_release_notes.py Normal file
View File

@@ -0,0 +1,337 @@
#!/usr/bin/env python3
import subprocess
import sys
import os
import re
IGNORED_AUTHORS = [
"=", # probably someone used an equalTo ?! # anyway we are ignoring it!
]
TAG_MARKER = "###"
HEADER_MARKER = "####"
def run_command(cmd):
"""Run a shell command and return its trimmed output."""
return subprocess.check_output(cmd, shell=True, text=True).strip()
def get_head_commit():
"""Return the HEAD commit SHA."""
return run_command("git rev-parse HEAD")
def get_commit_messages(last_successful, current="HEAD"):
"""Return a list of commit messages between last_successful and current."""
cmd = f"git log {last_successful}..{current} --pretty=format:%s"
output = run_command(cmd)
if not output:
return []
return output.splitlines()
def get_authors_in_range(commit_range, fmt="%an"):
"""Return a set of commit authors in the given commit range using the given format."""
cmd = f"git log {commit_range} --pretty=format:{fmt}"
output = run_command(cmd)
if not output:
return set()
authors = set(line.strip() for line in output.splitlines() if line.strip())
authors = set(authors) - set(IGNORED_AUTHORS)
return authors
def get_first_commit_of_repo():
"""Return the first commit in the repository (root commit)."""
cmd = "git rev-list --max-parents=0 HEAD"
output = run_command(cmd)
return output.splitlines()[0]
def get_branch():
"""
Attempt to determine the branch base (the commit where the current branch diverged
from the default remote branch). Falls back to the repo's first commit.
"""
try:
default_ref = run_command("git rev-parse --abbrev-ref origin/HEAD")
default_branch = default_ref.split('/')[-1]
base_commit = run_command(f"git merge-base HEAD origin/{default_branch}")
return base_commit
except Exception:
return get_first_commit_of_repo()
def get_repo_url():
"""Extract and clean the repository URL from the remote 'origin'."""
url = run_command("git config --get remote.origin.url")
if url.startswith("git@"):
url = url.replace("git@", "https://").replace(":", "/")
if url.endswith(".git"):
url = url[:-4]
return url
def format_contributor(author):
"""
Convert an author name to a GitHub username or first name.
If the author already starts with '@', return it;
otherwise, take the first token and prepend '@'.
"""
if author.startswith('@'):
return author
return f"@{author.split()[0]}"
def format_commit_message(msg):
"""Format a commit message as a bullet point for the release notes."""
msg_clean = msg.lstrip() # remove leading spaces
if msg_clean.startswith("-"):
msg_clean = msg_clean[1:].strip() # remove leading '-' and spaces
return f"- {msg_clean}"
def generate_release_notes(last_successful, tag, branch):
"""Generate release notes for the given tag."""
current_commit = get_head_commit()
messages = get_commit_messages(last_successful, current_commit)
# Start with the tag header
new_section = f"{TAG_MARKER} {tag}\n"
# What's Changed section (always present)
new_section += f"{HEADER_MARKER} What's Changed\n"
if not messages or last_successful == current_commit:
new_section += "- Nothing...\n"
else:
for msg in messages:
new_section += f"{format_commit_message(msg)}\n"
# New Contributors section (only if there are new contributors)
all_previous_authors = get_authors_in_range(f"{branch}")
recent_authors = get_authors_in_range(f"{last_successful}..{current_commit}")
new_contributors = recent_authors - all_previous_authors
if new_contributors:
new_section += f"\n{HEADER_MARKER} New Contributors\n"
for author in sorted(new_contributors):
new_section += f"- {format_contributor(author)} made their first contribution\n"
# Full Changelog section (only if there are changes)
if messages and last_successful != current_commit:
repo_url = get_repo_url()
changelog_link = f"{repo_url}/compare/{last_successful}...{current_commit}"
new_section += f"\n{HEADER_MARKER} Full Changelog: [{last_successful[:8]}...{current_commit[:8]}]({changelog_link})\n"
return new_section
def update_release_md(existing_content, new_section, tag):
"""
Update input based on rules:
1. If tag exists, update it
2. Special tags (alpha, beta, nightly) stay at the top in that order
3. Numbered tags follow special tags
4. Remove duplicate tags
5. Insert new numbered tags at the top of the numbered section
"""
tag_lower = tag.lower()
is_special_tag = tag_lower in ["alpha", "beta", "nightly"]
# Parse the existing content into sections
if not existing_content:
return new_section
# Split the content into sections by headers
pattern = fr'(^{TAG_MARKER} .*$)'
sections = re.split(pattern, existing_content, flags=re.MULTILINE)
# Create a list to store the processed content
processed_sections = []
# Track special tag positions and whether tag was found
special_tags_map = {"alpha": False, "beta": False, "nightly": False}
last_special_index = -1
tag_found = False
numbered_tag_index = -1
i = 0
while i < len(sections):
# Check if this is a header
if i % 2 == 1: # Headers are at odd indices
header = sections[i]
content = sections[i+1] if i+1 < len(sections) else ""
current_tag = header[3:].strip().lower()
# Check for special tags to track their positions
if current_tag in special_tags_map:
special_tags_map[current_tag] = True
last_special_index = len(processed_sections)
# Check if this is the first numbered tag
elif re.match(r'^[0-9]+\.[0-9]+(\.[0-9]+)?$', current_tag) and numbered_tag_index == -1:
numbered_tag_index = len(processed_sections)
# If this is the tag we're updating, mark it but don't add yet
if current_tag == tag_lower:
if not tag_found: # Replace the first occurrence
tag_found = True
i += 2 # Skip the content
continue
else: # Skip duplicate occurrences
i += 2
continue
# Add the current section
processed_sections.append(sections[i])
i += 1
# Determine where to insert the new section
if tag_found:
# We need to determine the insertion point
if is_special_tag:
# For special tags, insert after last special tag or at beginning
desired_index = -1
for pos, t in enumerate(["alpha", "beta", "nightly"]):
if t == tag_lower:
desired_index = pos
# Find position to insert
insert_pos = 0
for pos, t in enumerate(["alpha", "beta", "nightly"]):
if t == tag_lower:
break
if special_tags_map[t]:
insert_pos = processed_sections.index(f"{TAG_MARKER} {t}")
insert_pos += 2 # Move past the header and content
# Insert at the determined position
processed_sections.insert(insert_pos, new_section)
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
processed_sections.insert(insert_pos, '\n\n')
else:
# For numbered tags, insert after special tags but before other numbered tags
insert_pos = 0
if last_special_index >= 0:
# Insert after the last special tag
insert_pos = last_special_index + 2 # +2 to skip header and content
processed_sections.insert(insert_pos, new_section)
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
processed_sections.insert(insert_pos, '\n\n')
else:
# Tag doesn't exist yet, determine insertion point
if is_special_tag:
# For special tags, maintain alpha, beta, nightly order
special_tags = ["alpha", "beta", "nightly"]
insert_pos = 0
for i, t in enumerate(special_tags):
if t == tag_lower:
# Check if preceding special tags exist
for prev_tag in special_tags[:i]:
if special_tags_map[prev_tag]:
# Find the position after this tag
prev_index = processed_sections.index(f"{TAG_MARKER} {prev_tag}")
insert_pos = prev_index + 2 # Skip header and content
processed_sections.insert(insert_pos, new_section)
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
processed_sections.insert(insert_pos, '\n\n')
else:
# For numbered tags, insert after special tags but before other numbered tags
insert_pos = 0
if last_special_index >= 0:
# Insert after the last special tag
insert_pos = last_special_index + 2 # +2 to skip header and content
processed_sections.insert(insert_pos, new_section)
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
processed_sections.insert(insert_pos, '\n\n')
# Combine sections ensuring proper spacing
result = ""
for i, section in enumerate(processed_sections):
if i > 0 and section.startswith(f"{TAG_MARKER} "):
# Ensure single blank line before headers
if not result.endswith("\n\n"):
result = result.rstrip("\n") + "\n\n"
result += section
return result.rstrip() + "\n"
def retrieve_tag_content(tag, file_path):
if not os.path.exists(file_path):
return ""
with open(file_path, "r") as f:
content = f.read()
# Create a pattern for the tag header (case-insensitive)
pattern = re.compile(fr'^{TAG_MARKER} ' + re.escape(tag) + r'$', re.MULTILINE | re.IGNORECASE)
# Find the tag header
match = pattern.search(content)
if not match:
return ""
# Start after the tag line
start_pos = match.end()
# Skip a newline if present
if start_pos < len(content) and content[start_pos] == "\n":
start_pos += 1
# Find the next tag header after the current tag's content
next_tag_match = re.search(fr'^{TAG_MARKER} ', content[start_pos:], re.MULTILINE)
if next_tag_match:
end_pos = start_pos + next_tag_match.start()
return content[start_pos:end_pos].strip()
else:
# Return until the end of the file if this is the last tag
return content[start_pos:].strip()
def main():
# Update input file
release_file = "release-notes.md"
# Usage: python release.py <last_successful_commit> [tag] [branch]
# Or: python release.py --retrieve <tagname>
args = sys.argv[1:]
if len(args) < 1:
print("Usage: python release.py <last_successful_commit> [tag] [branch]")
print(" or: python release.py --retrieve <tagname>")
sys.exit(1)
# Check if we're retrieving a tag
if args[0] == "--retrieve":
if len(args) < 2:
print("Error: Missing tag name after --retrieve")
sys.exit(1)
tag_content = retrieve_tag_content(args[1], file_path=release_file)
if tag_content:
print(tag_content)
else:
print(f"Tag '{args[1]}' not found in '{release_file}'")
return
# Original functionality for generating release notes
last_successful = args[0]
tag = args[1] if len(args) > 1 else get_head_commit()
branch = args[2] if len(args) > 2 else (os.environ.get("GITHUB_REF") or get_branch())
# Generate release notes
new_section = generate_release_notes(last_successful, tag, branch)
existing_content = ""
if os.path.exists(release_file):
with open(release_file, "r") as f:
existing_content = f.read()
updated_content = update_release_md(existing_content, new_section, tag)
with open(release_file, "w") as f:
f.write(updated_content)
# Output the new section for display
print(new_section)
if __name__ == "__main__":
main()