CI: full rewrite - moved logic into ci.py and kept workflow scripts mostly dummy

This commit is contained in:
mahee96
2026-02-23 17:06:31 +05:30
parent 8fc38300d5
commit 97ee0b2dac
18 changed files with 1103 additions and 656 deletions

View File

@@ -1,28 +1,47 @@
name: Alpha SideStore build
name: Alpha SideStore Build
on:
push:
branches:
- develop-alpha
branches: [develop-alpha]
# cancel duplicate run if from same branch
concurrency:
group: ${{ github.ref }}
cancel-in-progress: true
jobs:
Reusable-build:
uses: ./.github/workflows/reusable-sidestore-build.yml
with:
# bundle_id: "com.SideStore.SideStore.Alpha"
bundle_id: "com.SideStore.SideStore"
# bundle_id_suffix: ".Alpha"
is_beta: true
publish: ${{ vars.PUBLISH_ALPHA_UPDATES == 'true' }}
is_shared_build_num: false
release_tag: "alpha"
release_name: "Alpha"
upstream_tag: "nightly"
upstream_name: "Nightly"
secrets:
CROSS_REPO_PUSH_KEY: ${{ secrets.CROSS_REPO_PUSH_KEY }}
BUILD_LOG_ZIP_PASSWORD: ${{ secrets.BUILD_LOG_ZIP_PASSWORD }}
build:
runs-on: macos-15
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
fetch-depth: 0
- run: brew install ldid xcbeautify
- name: Shared
id: shared
run: python3 scripts/ci/workflow.py shared
- name: Beta bump
env:
RELEASE_CHANNEL: alpha
run: python3 scripts/ci/workflow.py bump-beta
- name: Version
id: version
run: python3 scripts/ci/workflow.py version
- name: Build
run: python3 scripts/ci/workflow.py build
- name: Encrypt logs
env:
BUILD_LOG_ZIP_PASSWORD: ${{ secrets.BUILD_LOG_ZIP_PASSWORD }}
run: python3 scripts/ci/workflow.py encrypt-build
- uses: actions/upload-artifact@v4
with:
name: SideStore-${{ steps.version.outputs.version }}.ipa
path: SideStore.ipa

View File

@@ -1,82 +1,138 @@
name: Nightly SideStore Build
on:
push:
branches:
- develop
branches: [develop]
schedule:
- cron: '0 0 * * *' # Runs every night at midnight UTC
workflow_dispatch: # Allows manual trigger
- cron: '0 0 * * *'
workflow_dispatch:
# cancel duplicate run if from same branch
concurrency:
group: ${{ github.ref }}
cancel-in-progress: true
jobs:
check-changes:
if: github.event_name == 'schedule'
runs-on: ubuntu-latest
outputs:
has_changes: ${{ steps.check.outputs.has_changes }}
build:
runs-on: macos-26
steps:
- name: Checkout repository
uses: actions/checkout@v4
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Ensure full history
submodules: recursive
fetch-depth: 0
- name: Get last successful workflow run
id: get_last_success
- run: brew install ldid xcbeautify
- name: Restore Xcode/SwiftPM Cache (Exact match)
id: xcode-cache-restore
uses: actions/cache/restore@v3
with:
path: |
~/Library/Developer/Xcode/DerivedData
~/Library/Caches/org.swift.swiftpm
key: xcode-cache-build-${{ github.ref_name }}-${{ github.sha }}
- name: Restore Xcode/SwiftPM Cache (Last Available)
uses: actions/cache/restore@v3
with:
path: |
~/Library/Developer/Xcode/DerivedData
~/Library/Caches/org.swift.swiftpm
key: xcode-cache-build-${{ github.ref_name }}-
# --------------------------------------------------
# runtime env setup
# --------------------------------------------------
- name: Short Commit SHA
run: |
LAST_SUCCESS=$(gh run list --workflow "Nightly SideStore Build" --json createdAt,conclusion \
--jq '[.[] | select(.conclusion=="success")][0].createdAt' || echo "")
echo "Last successful run: $LAST_SUCCESS"
echo "last_success=$LAST_SUCCESS" >> $GITHUB_ENV
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
echo "SHORT_COMMIT=$(python3 scripts/ci/workflow.py commid-id)" >> $GITHUB_ENV
- name: Check for new commits since last successful build
id: check
- name: Version
run: |
if [ -n "$LAST_SUCCESS" ]; then
NEW_COMMITS=$(git rev-list --count --since="$LAST_SUCCESS" origin/develop)
COMMIT_LOG=$(git log --since="$LAST_SUCCESS" --pretty=format:"%h %s" origin/develop)
else
NEW_COMMITS=1
COMMIT_LOG=$(git log -n 10 --pretty=format:"%h %s" origin/develop) # Show last 10 commits if no history
fi
echo "VERSION=$(python3 scripts/ci/workflow.py version)" >> $GITHUB_ENV
echo "Has changes: $NEW_COMMITS"
echo "New commits since last successful build:"
echo "$COMMIT_LOG"
# --------------------------------------------------
# build and test
# --------------------------------------------------
if [ "$NEW_COMMITS" -gt 0 ]; then
echo "has_changes=true" >> $GITHUB_OUTPUT
else
echo "has_changes=false" >> $GITHUB_OUTPUT
fi
- name: Clean previous build artifacts
if: contains(github.event.head_commit.message, '[--clean-build]')
run: |
python3 scripts/ci/workflow.py clean
python3 scripts/ci/workflow.py clean-derived-data
python3 scripts/ci/workflow.py clean-spm-cache
- name: Build
run: python3 scripts/ci/workflow.py build
- name: Tests Build
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_BUILD == '1' }}
run: python3 scripts/ci/workflow.py tests-build
- name: Save Xcode & SwiftPM Cache
if: ${{ steps.xcode-cache-restore.outputs.cache-hit != 'true' }}
uses: actions/cache/save@v3
with:
path: |
~/Library/Developer/Xcode/DerivedData
~/Library/Caches/org.swift.swiftpm
key: xcode-cache-build-${{ github.ref_name }}-${{ github.sha }}
- name: Tests Run
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_RUN == '1' }}
run: python3 scripts/ci/workflow.py tests-run
- name: Encrypt build logs
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
LAST_SUCCESS: ${{ env.last_success }}
BUILD_LOG_ZIP_PASSWORD: ${{ secrets.BUILD_LOG_ZIP_PASSWORD }}
run: python3 scripts/ci/workflow.py encrypt-build
Reusable-build:
if: |
always() &&
(github.event_name == 'push' ||
(github.event_name == 'schedule' && needs.check-changes.result == 'success' && needs.check-changes.outputs.has_changes == 'true'))
needs: check-changes
uses: ./.github/workflows/reusable-sidestore-build.yml
with:
# bundle_id: "com.SideStore.SideStore.Nightly"
bundle_id: "com.SideStore.SideStore"
# bundle_id_suffix: ".Nightly"
is_beta: true
publish: ${{ vars.PUBLISH_NIGHTLY_UPDATES == 'true' }}
is_shared_build_num: false
release_tag: "nightly"
release_name: "Nightly"
upstream_tag: "0.5.10"
upstream_name: "Stable"
secrets:
CROSS_REPO_PUSH_KEY: ${{ secrets.CROSS_REPO_PUSH_KEY }}
BUILD_LOG_ZIP_PASSWORD: ${{ secrets.BUILD_LOG_ZIP_PASSWORD }}
- name: Encrypt tests-build logs
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_BUILD == '1' }}
env:
BUILD_LOG_ZIP_PASSWORD: ${{ secrets.BUILD_LOG_ZIP_PASSWORD }}
run: python3 scripts/ci/workflow.py encrypt-tests-build
- name: Encrypt tests-run logs
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_RUN == '1' }}
env:
BUILD_LOG_ZIP_PASSWORD: ${{ secrets.BUILD_LOG_ZIP_PASSWORD }}
run: python3 scripts/ci/workflow.py encrypt-tests-run
# --------------------------------------------------
# artifacts
# --------------------------------------------------
- uses: actions/upload-artifact@v4
with:
name: encrypted-build-logs-${{ env.VERSION }}.zip
path: encrypted-build-logs.zip
- uses: actions/upload-artifact@v4
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_BUILD == '1' }}
with:
name: encrypted-tests-build-logs-${{ env.SHORT_COMMIT }}.zip
path: encrypted-tests-build-logs.zip
- uses: actions/upload-artifact@v4
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_RUN == '1' }}
with:
name: encrypted-tests-run-logs-${{ env.SHORT_COMMIT }}.zip
path: encrypted-tests-run-logs.zip
- uses: actions/upload-artifact@v4
with:
name: SideStore-${{ env.VERSION }}.ipa
path: SideStore.ipa
- uses: actions/upload-artifact@v4
with:
name: SideStore-${{ env.VERSION }}-dSYMs.zip
path: SideStore.dSYMs.zip
# --------------------------------------------------
- name: Deploy
run: |
python3 scripts/ci/workflow.py deploy nightly $SHORT_COMMIT

28
.github/workflows/obsolete/alpha.yml vendored Normal file
View File

@@ -0,0 +1,28 @@
name: Alpha SideStore build
on:
push:
branches:
- develop-alpha
# cancel duplicate run if from same branch
concurrency:
group: ${{ github.ref }}
cancel-in-progress: true
jobs:
Reusable-build:
uses: ./.github/workflows/reusable-sidestore-build.yml
with:
# bundle_id: "com.SideStore.SideStore.Alpha"
bundle_id: "com.SideStore.SideStore"
# bundle_id_suffix: ".Alpha"
is_beta: true
publish: ${{ vars.PUBLISH_ALPHA_UPDATES == 'true' }}
is_shared_build_num: false
release_tag: "alpha"
release_name: "Alpha"
upstream_tag: "nightly"
upstream_name: "Nightly"
secrets:
CROSS_REPO_PUSH_KEY: ${{ secrets.CROSS_REPO_PUSH_KEY }}
BUILD_LOG_ZIP_PASSWORD: ${{ secrets.BUILD_LOG_ZIP_PASSWORD }}

82
.github/workflows/obsolete/nightly.yml vendored Normal file
View File

@@ -0,0 +1,82 @@
name: Nightly SideStore Build
on:
push:
branches:
- develop
schedule:
- cron: '0 0 * * *' # Runs every night at midnight UTC
workflow_dispatch: # Allows manual trigger
# cancel duplicate run if from same branch
concurrency:
group: ${{ github.ref }}
cancel-in-progress: true
jobs:
check-changes:
if: github.event_name == 'schedule'
runs-on: ubuntu-latest
outputs:
has_changes: ${{ steps.check.outputs.has_changes }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # Ensure full history
- name: Get last successful workflow run
id: get_last_success
run: |
LAST_SUCCESS=$(gh run list --workflow "Nightly SideStore Build" --json createdAt,conclusion \
--jq '[.[] | select(.conclusion=="success")][0].createdAt' || echo "")
echo "Last successful run: $LAST_SUCCESS"
echo "last_success=$LAST_SUCCESS" >> $GITHUB_ENV
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Check for new commits since last successful build
id: check
run: |
if [ -n "$LAST_SUCCESS" ]; then
NEW_COMMITS=$(git rev-list --count --since="$LAST_SUCCESS" origin/develop)
COMMIT_LOG=$(git log --since="$LAST_SUCCESS" --pretty=format:"%h %s" origin/develop)
else
NEW_COMMITS=1
COMMIT_LOG=$(git log -n 10 --pretty=format:"%h %s" origin/develop) # Show last 10 commits if no history
fi
echo "Has changes: $NEW_COMMITS"
echo "New commits since last successful build:"
echo "$COMMIT_LOG"
if [ "$NEW_COMMITS" -gt 0 ]; then
echo "has_changes=true" >> $GITHUB_OUTPUT
else
echo "has_changes=false" >> $GITHUB_OUTPUT
fi
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
LAST_SUCCESS: ${{ env.last_success }}
Reusable-build:
if: |
always() &&
(github.event_name == 'push' ||
(github.event_name == 'schedule' && needs.check-changes.result == 'success' && needs.check-changes.outputs.has_changes == 'true'))
needs: check-changes
uses: ./.github/workflows/reusable-sidestore-build.yml
with:
# bundle_id: "com.SideStore.SideStore.Nightly"
bundle_id: "com.SideStore.SideStore"
# bundle_id_suffix: ".Nightly"
is_beta: true
publish: ${{ vars.PUBLISH_NIGHTLY_UPDATES == 'true' }}
is_shared_build_num: false
release_tag: "nightly"
release_name: "Nightly"
upstream_tag: "0.5.10"
upstream_name: "Stable"
secrets:
CROSS_REPO_PUSH_KEY: ${{ secrets.CROSS_REPO_PUSH_KEY }}
BUILD_LOG_ZIP_PASSWORD: ${{ secrets.BUILD_LOG_ZIP_PASSWORD }}

View File

@@ -0,0 +1,258 @@
#!/usr/bin/env python3
import subprocess
import sys
import os
import re
from pathlib import Path
IGNORED_AUTHORS = []
TAG_MARKER = "###"
HEADER_MARKER = "####"
# ----------------------------------------------------------
# helpers
# ----------------------------------------------------------
def run(cmd: str) -> str:
return subprocess.check_output(cmd, shell=True, text=True).strip()
def head_commit():
return run("git rev-parse HEAD")
def first_commit():
return run("git rev-list --max-parents=0 HEAD").splitlines()[0]
def repo_url():
url = run("git config --get remote.origin.url")
if url.startswith("git@"):
url = url.replace("git@", "https://").replace(":", "/")
return url.removesuffix(".git")
def commit_messages(start, end="HEAD"):
try:
out = run(f"git log {start}..{end} --pretty=format:%s")
return out.splitlines() if out else []
except subprocess.CalledProcessError:
fallback = run("git rev-parse HEAD~5")
return run(f"git log {fallback}..{end} --pretty=format:%s").splitlines()
def authors(range_expr, fmt="%an"):
try:
out = run(f"git log {range_expr} --pretty=format:{fmt}")
result = {a.strip() for a in out.splitlines() if a.strip()}
return result - set(IGNORED_AUTHORS)
except subprocess.CalledProcessError:
return set()
def branch_base():
try:
default_ref = run("git rev-parse --abbrev-ref origin/HEAD")
default_branch = default_ref.split("/")[-1]
return run(f"git merge-base HEAD origin/{default_branch}")
except Exception:
return first_commit()
def fmt_msg(msg):
msg = msg.lstrip()
if msg.startswith("-"):
msg = msg[1:].strip()
return f"- {msg}"
def fmt_author(author):
return author if author.startswith("@") else f"@{author.split()[0]}"
# ----------------------------------------------------------
# release note generation
# ----------------------------------------------------------
def generate_release_notes(last_successful, tag, branch):
current = head_commit()
messages = commit_messages(last_successful, current)
section = f"{TAG_MARKER} {tag}\n"
section += f"{HEADER_MARKER} What's Changed\n"
if not messages or last_successful == current:
section += "- Nothing...\n"
else:
for m in messages:
section += f"{fmt_msg(m)}\n"
prev_authors = authors(branch)
new_authors = authors(f"{last_successful}..{current}") - prev_authors
if new_authors:
section += f"\n{HEADER_MARKER} New Contributors\n"
for a in sorted(new_authors):
section += f"- {fmt_author(a)} made their first contribution\n"
if messages and last_successful != current:
url = repo_url()
section += (
f"\n{HEADER_MARKER} Full Changelog: "
f"[{last_successful[:8]}...{current[:8]}]"
f"({url}/compare/{last_successful}...{current})\n"
)
return section
# ----------------------------------------------------------
# markdown update
# ----------------------------------------------------------
def update_release_md(existing, new_section, tag):
if not existing:
return new_section
tag_lower = tag.lower()
is_special = tag_lower in {"alpha", "beta", "nightly"}
pattern = fr"(^{TAG_MARKER} .*$)"
parts = re.split(pattern, existing, flags=re.MULTILINE)
processed = []
special_seen = {"alpha": False, "beta": False, "nightly": False}
last_special_idx = -1
i = 0
while i < len(parts):
if i % 2 == 1:
header = parts[i]
name = header[3:].strip().lower()
if name in special_seen:
special_seen[name] = True
last_special_idx = len(processed)
if name == tag_lower:
i += 2
continue
processed.append(parts[i])
i += 1
insert_pos = 0
if is_special:
order = ["alpha", "beta", "nightly"]
for t in order:
if t == tag_lower:
break
if special_seen[t]:
idx = processed.index(f"{TAG_MARKER} {t}")
insert_pos = idx + 2
elif last_special_idx >= 0:
insert_pos = last_special_idx + 2
processed.insert(insert_pos, new_section)
result = ""
for part in processed:
if part.startswith(f"{TAG_MARKER} ") and not result.endswith("\n\n"):
result = result.rstrip("\n") + "\n\n"
result += part
return result.rstrip() + "\n"
# ----------------------------------------------------------
# retrieval
# ----------------------------------------------------------
def retrieve_tag(tag, file_path):
if not file_path.exists():
return ""
content = file_path.read_text()
match = re.search(
fr"^{TAG_MARKER} {re.escape(tag)}$",
content,
re.MULTILINE | re.IGNORECASE,
)
if not match:
return ""
start = match.end()
if start < len(content) and content[start] == "\n":
start += 1
next_tag = re.search(fr"^{TAG_MARKER} ", content[start:], re.MULTILINE)
end = start + next_tag.start() if next_tag else len(content)
return content[start:end].strip()
# ----------------------------------------------------------
# entrypoint
# ----------------------------------------------------------
def main():
args = sys.argv[1:]
if not args:
sys.exit(
"Usage:\n"
" generate_release_notes.py <last_successful> [tag] [branch] [--output-dir DIR]\n"
" generate_release_notes.py --retrieve <tag> [--output-dir DIR]"
)
# parse optional output dir
output_dir = Path.cwd()
if "--output-dir" in args:
idx = args.index("--output-dir")
try:
output_dir = Path(args[idx + 1]).resolve()
except IndexError:
sys.exit("Missing value for --output-dir")
del args[idx:idx + 2]
output_dir.mkdir(parents=True, exist_ok=True)
release_file = output_dir / "release-notes.md"
# retrieval mode
if args[0] == "--retrieve":
if len(args) < 2:
sys.exit("Missing tag after --retrieve")
print(retrieve_tag(args[1], release_file))
return
# generation mode
last_successful = args[0]
tag = args[1] if len(args) > 1 else head_commit()
branch = args[2] if len(args) > 2 else (
os.environ.get("GITHUB_REF") or branch_base()
)
new_section = generate_release_notes(last_successful, tag, branch)
existing = (
release_file.read_text()
if release_file.exists()
else ""
)
updated = update_release_md(existing, new_section, tag)
release_file.write_text(updated)
print(new_section)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,159 @@
#!/usr/bin/env python3
import datetime
import hashlib
import json
import subprocess
from pathlib import Path
import argparse
# ----------------------------------------------------------
# helpers
# ----------------------------------------------------------
def sh(cmd: str, cwd: Path) -> str:
return subprocess.check_output(
cmd, shell=True, cwd=cwd
).decode().strip()
def file_size(path: Path) -> int:
if not path.exists():
raise SystemExit(f"Missing file: {path}")
return path.stat().st_size
def sha256(path: Path) -> str:
h = hashlib.sha256()
with open(path, "rb") as f:
while chunk := f.read(1024 * 1024):
h.update(chunk)
return h.hexdigest()
# ----------------------------------------------------------
# entry
# ----------------------------------------------------------
def main():
p = argparse.ArgumentParser()
p.add_argument(
"--repo-root",
required=True,
help="Repo used for git history + release notes",
)
p.add_argument(
"--ipa",
required=True,
help="Path to IPA file",
)
p.add_argument(
"--output-dir",
required=True,
help="Output Directory where source_metadata.json is written",
)
p.add_argument(
"--release-notes-dir",
required=True,
help="Output Directory where release-notes.md is generated/read",
)
p.add_argument("--release-tag", required=True)
p.add_argument("--version", required=True)
p.add_argument("--marketing-version", required=True)
p.add_argument("--short-commit", required=True)
p.add_argument("--release-channel", required=True)
p.add_argument("--bundle-id", required=True)
p.add_argument("--is-beta", action="store_true")
args = p.parse_args()
repo_root = Path(args.repo_root).resolve()
ipa_path = Path(args.ipa).resolve()
out_dir = Path(args.output_dir).resolve()
notes_dir = Path(args.release_notes_dir).resolve()
if not repo_root.is_dir():
raise SystemExit(f"Invalid repo root: {repo_root}")
if not ipa_path.is_file():
raise SystemExit(f"Invalid IPA path: {ipa_path}")
notes_dir.mkdir(parents=True, exist_ok=True)
out_dir.mkdir(parents=True, exist_ok=True)
out_file = out_dir / "source_metadata.json"
# ------------------------------------------------------
# ensure release notes exist
# ------------------------------------------------------
print("Generating release notes…")
sh(
(
"python3 generate_release_notes.py "
f"{args.short_commit} {args.release_tag} "
f"--output-dir \"{notes_dir}\""
),
cwd=repo_root,
)
# ------------------------------------------------------
# retrieve release notes
# ------------------------------------------------------
notes = sh(
(
"python3 generate_release_notes.py "
f"--retrieve {args.release_tag} "
f"--output-dir \"{notes_dir}\""
),
cwd=repo_root,
)
# ------------------------------------------------------
# compute metadata
# ------------------------------------------------------
now = datetime.datetime.now(datetime.UTC)
formatted = now.strftime("%Y-%m-%dT%H:%M:%SZ")
human = now.strftime("%c")
localized_description = f"""
This is release for:
- version: "{args.version}"
- revision: "{args.short_commit}"
- timestamp: "{human}"
Release Notes:
{notes}
""".strip()
metadata = {
"is_beta": bool(args.is_beta),
"bundle_identifier": args.bundle_id,
"version_ipa": args.marketing_version,
"version_date": formatted,
"release_channel": args.release_channel.lower(),
"size": file_size(ipa_path),
"sha256": sha256(ipa_path),
"download_url": (
"https://github.com/SideStore/SideStore/releases/download/"
f"{args.release_tag}/SideStore.ipa"
),
"localized_description": localized_description,
}
with open(out_file, "w", encoding="utf-8") as f:
json.dump(metadata, f, indent=2, ensure_ascii=False)
print(f"Wrote {out_file}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,180 @@
#!/usr/bin/env python3
import json
import sys
from pathlib import Path
'''
metadata.json template
{
"version_ipa": "0.0.0",
"version_date": "2000-12-18T00:00:00Z",
"is_beta": true,
"release_channel": "alpha",
"size": 0,
"sha256": "",
"localized_description": "Invalid Update",
"download_url": "https://github.com/SideStore/SideStore/releases/download/0.0.0/SideStore.ipa",
"bundle_identifier": "com.SideStore.SideStore"
}
'''
# ----------------------------------------------------------
# args
# ----------------------------------------------------------
if len(sys.argv) < 3:
print("Usage: python3 update_apps.py <metadata.json> <source.json>")
sys.exit(1)
metadata_file = Path(sys.argv[1])
source_file = Path(sys.argv[2])
# ----------------------------------------------------------
# load metadata
# ----------------------------------------------------------
if not metadata_file.exists():
print(f"Missing metadata file: {metadata_file}")
sys.exit(1)
with open(metadata_file, "r", encoding="utf-8") as f:
meta = json.load(f)
VERSION_IPA = meta.get("version_ipa")
VERSION_DATE = meta.get("version_date")
IS_BETA = meta.get("is_beta")
RELEASE_CHANNEL = meta.get("release_channel")
SIZE = meta.get("size")
SHA256 = meta.get("sha256")
LOCALIZED_DESCRIPTION = meta.get("localized_description")
DOWNLOAD_URL = meta.get("download_url")
BUNDLE_IDENTIFIER = meta.get("bundle_identifier")
print(" ====> Required parameter list <====")
print("Bundle Identifier:", BUNDLE_IDENTIFIER)
print("Version:", VERSION_IPA)
print("Version Date:", VERSION_DATE)
print("IsBeta:", IS_BETA)
print("ReleaseChannel:", RELEASE_CHANNEL)
print("Size:", SIZE)
print("Sha256:", SHA256)
print("Localized Description:", LOCALIZED_DESCRIPTION)
print("Download URL:", DOWNLOAD_URL)
# ----------------------------------------------------------
# validation
# ----------------------------------------------------------
if (
not BUNDLE_IDENTIFIER
or not VERSION_IPA
or not VERSION_DATE
or not RELEASE_CHANNEL
or not SIZE
or not SHA256
or not LOCALIZED_DESCRIPTION
or not DOWNLOAD_URL
):
print("One or more required metadata fields missing")
sys.exit(1)
SIZE = int(SIZE)
RELEASE_CHANNEL = RELEASE_CHANNEL.lower()
# ----------------------------------------------------------
# load or create source.json
# ----------------------------------------------------------
if source_file.exists():
with open(source_file, "r", encoding="utf-8") as f:
data = json.load(f)
else:
print("source.json missing — creating minimal structure")
data = {
"version": 2,
"apps": []
}
if int(data.get("version", 1)) < 2:
print("Only v2 and above are supported")
sys.exit(1)
# ----------------------------------------------------------
# ensure app entry exists
# ----------------------------------------------------------
apps = data.setdefault("apps", [])
app = next(
(a for a in apps if a.get("bundleIdentifier") == BUNDLE_IDENTIFIER),
None
)
if app is None:
print("App entry missing — creating new app entry")
app = {
"bundleIdentifier": BUNDLE_IDENTIFIER,
"releaseChannels": []
}
apps.append(app)
# ----------------------------------------------------------
# update logic
# ----------------------------------------------------------
if RELEASE_CHANNEL == "stable":
app.update({
"version": VERSION_IPA,
"versionDate": VERSION_DATE,
"size": SIZE,
"sha256": SHA256,
"localizedDescription": LOCALIZED_DESCRIPTION,
"downloadURL": DOWNLOAD_URL,
})
channels = app.setdefault("releaseChannels", [])
new_version = {
"version": VERSION_IPA,
"date": VERSION_DATE,
"localizedDescription": LOCALIZED_DESCRIPTION,
"downloadURL": DOWNLOAD_URL,
"size": SIZE,
"sha256": SHA256,
}
tracks = [t for t in channels if t.get("track") == RELEASE_CHANNEL]
if len(tracks) > 1:
print(f"Multiple tracks named {RELEASE_CHANNEL}")
sys.exit(1)
if not tracks:
channels.insert(0, {
"track": RELEASE_CHANNEL,
"releases": [new_version],
})
else:
tracks[0]["releases"][0] = new_version
# ----------------------------------------------------------
# save
# ----------------------------------------------------------
print("\nUpdated Sources File:\n")
print(json.dumps(data, indent=2, ensure_ascii=False))
with open(source_file, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
print("JSON successfully updated.")

238
scripts/ci/workflow.py Normal file
View File

@@ -0,0 +1,238 @@
#!/usr/bin/env python3
import os
import sys
import subprocess
import datetime
from pathlib import Path
# REPO ROOT relative to script dir
ROOT = Path(__file__).resolve().parents[2]
# ----------------------------------------------------------
# helpers
# ----------------------------------------------------------
def run(cmd, check=True):
print(f"$ {cmd}", flush=True)
subprocess.run(cmd, shell=True, cwd=ROOT, check=check)
print("", flush=True)
def getenv(name, default=""):
return os.environ.get(name, default)
# ----------------------------------------------------------
# SHARED
# ----------------------------------------------------------
def short_commit():
sha = subprocess.check_output(
"git rev-parse --short HEAD",
shell=True,
cwd=ROOT
).decode().strip()
return sha
# ----------------------------------------------------------
# VERSION BUMP
# ----------------------------------------------------------
def bump_beta():
date = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d")
release_channel = getenv("RELEASE_CHANNEL", "beta")
build_file = ROOT / "build_number.txt"
short = subprocess.check_output(
"git rev-parse --short HEAD",
shell=True,
cwd=ROOT
).decode().strip()
def write(num):
run(
f"""sed -e "/MARKETING_VERSION = .*/s/$/-{release_channel}.{date}.{num}+{short}/" -i '' {ROOT}/Build.xcconfig"""
)
build_file.write_text(f"{date},{num}")
if not build_file.exists():
write(1)
return
last = build_file.read_text().strip().split(",")[1]
write(int(last) + 1)
# ----------------------------------------------------------
# VERSION EXTRACTION
# ----------------------------------------------------------
def extract_version():
v = subprocess.check_output(
"grep MARKETING_VERSION Build.xcconfig | sed -e 's/MARKETING_VERSION = //g'",
shell=True,
cwd=ROOT
).decode().strip()
return v
# ----------------------------------------------------------
# CLEAN
# ----------------------------------------------------------
def clean():
run("make clean")
def clean_derived_data():
run("rm -rf ~/Library/Developer/Xcode/DerivedData/*", check=False)
def clean_spm_cache():
run("rm -rf ~/Library/Caches/org.swift.swiftpm/*", check=False)
# ----------------------------------------------------------
# BUILD
# ----------------------------------------------------------
def build():
run("make clean")
run("rm -rf ~/Library/Developer/Xcode/DerivedData/*", check=False)
run("mkdir -p build/logs")
run(
"set -o pipefail && "
"NSUnbufferedIO=YES make -B build "
"2>&1 | tee -a build/logs/build.log | xcbeautify --renderer github-actions"
)
run("make fakesign | tee -a build/logs/build.log")
run("make ipa | tee -a build/logs/build.log")
run("zip -r -9 ./SideStore.dSYMs.zip ./SideStore.xcarchive/dSYMs")
# ----------------------------------------------------------
# TESTS BUILD
# ----------------------------------------------------------
def tests_build():
run("mkdir -p build/logs")
run(
"NSUnbufferedIO=YES make -B build-tests "
"2>&1 | tee -a build/logs/tests-build.log | xcbeautify --renderer github-actions"
)
# ----------------------------------------------------------
# TESTS RUN
# ----------------------------------------------------------
def tests_run():
run("mkdir -p build/logs")
run("nohup make -B boot-sim-async </dev/null >> build/logs/tests-run.log 2>&1 &")
run("make -B sim-boot-check | tee -a build/logs/tests-run.log")
run("make run-tests 2>&1 | tee -a build/logs/tests-run.log")
run("zip -r -9 ./test-results.zip ./build/tests")
# ----------------------------------------------------------
# LOG ENCRYPTION
# ----------------------------------------------------------
def encrypt_logs(name):
pwd = getenv("BUILD_LOG_ZIP_PASSWORD", "12345")
run(
f'cd build/logs && zip -e -P "{pwd}" ../../{name}.zip *'
)
# ----------------------------------------------------------
# RELEASE NOTES
# ----------------------------------------------------------
def release_notes(tag):
run(f"python3 generate_release_notes.py {tag}")
# ----------------------------------------------------------
# PUBLISH SOURCE.JSON
# ----------------------------------------------------------
def publish_apps(release_tag, short_commit):
repo = ROOT / "Dependencies/apps-v2.json"
if not repo.exists():
raise SystemExit("Dependencies/apps-v2.json repo missing")
# generate metadata + release notes
run(
f"python3 generate_source_metadata.py "
f"--release-tag {release_tag} "
f"--short-commit {short_commit}"
)
# update source.json using generated metadata
run("pushd Dependencies/apps-v2.json", check=False)
run("git config user.name 'GitHub Actions'", check=False)
run("git config user.email 'github-actions@github.com'", check=False)
run("python3 ../../scripts/update_source_metadata.py './_includes/source.json'")
run("git add --verbose ./_includes/source.json", check=False)
run(f"git commit -m ' - updated for {short_commit} deployment' || true",check=False)
run("git push --verbose", check=False)
run("popd", check=False)
# ----------------------------------------------------------
# ENTRYPOINT
# ----------------------------------------------------------
COMMANDS = {
"commid-id" : (short_commit, 0, ""),
"bump-beta" : (bump_beta, 0, ""),
"version" : (extract_version, 0, ""),
"clean" : (clean, 0, ""),
"clean-derived-data" : (clean_derived_data, 0, ""),
"clean-spm-cache" : (clean_spm_cache, 0, ""),
"build" : (build, 0, ""),
"tests-build" : (tests_build, 0, ""),
"tests-run" : (tests_run, 0, ""),
"encrypt-build" : (lambda: encrypt_logs("encrypted-build-logs"), 0, ""),
"encrypt-tests-build": (lambda: encrypt_logs("encrypted-tests-build-logs"), 0, ""),
"encrypt-tests-run" : (lambda: encrypt_logs("encrypted-tests-run-logs"), 0, ""),
"release-notes" : (release_notes, 1, "<tag>"),
"deploy" : (publish_apps, 2, "<release_tag> <short_commit>"),
}
def main():
def usage():
lines = ["Available commands:"]
for name, (_, argc, arg_usage) in COMMANDS.items():
suffix = f" {arg_usage}" if arg_usage else ""
lines.append(f" - {name}{suffix}")
return "\n".join(lines)
if len(sys.argv) < 2:
raise SystemExit(usage())
cmd = sys.argv[1]
if cmd not in COMMANDS:
raise SystemExit(
f"Unknown command '{cmd}'.\n\n{usage()}"
)
func, argc, arg_usage = COMMANDS[cmd]
if len(sys.argv) - 2 < argc:
suffix = f" {arg_usage}" if arg_usage else ""
raise SystemExit(f"Usage: workflow.py {cmd}{suffix}")
args = sys.argv[2:2 + argc]
func(*args) if argc else func()
if __name__ == "__main__":
main()

View File

@@ -1,192 +0,0 @@
#!/usr/bin/env python3
import os
import json
import sys
# SIDESTORE_BUNDLE_ID = "com.SideStore.SideStore"
# Set environment variables with default values
VERSION_IPA = os.getenv("VERSION_IPA")
VERSION_DATE = os.getenv("VERSION_DATE")
IS_BETA = os.getenv("IS_BETA")
RELEASE_CHANNEL = os.getenv("RELEASE_CHANNEL")
SIZE = os.getenv("SIZE")
SHA256 = os.getenv("SHA256")
LOCALIZED_DESCRIPTION = os.getenv("LOCALIZED_DESCRIPTION")
DOWNLOAD_URL = os.getenv("DOWNLOAD_URL")
# BUNDLE_IDENTIFIER = os.getenv("BUNDLE_IDENTIFIER", SIDESTORE_BUNDLE_ID)
BUNDLE_IDENTIFIER = os.getenv("BUNDLE_IDENTIFIER")
# Uncomment to debug/test by simulating dummy input locally
# VERSION_IPA = os.getenv("VERSION_IPA", "0.0.0")
# VERSION_DATE = os.getenv("VERSION_DATE", "2000-12-18T00:00:00Z")
# IS_BETA = os.getenv("IS_BETA", True)
# RELEASE_CHANNEL = os.getenv("RELEASE_CHANNEL", "alpha")
# SIZE = int(os.getenv("SIZE", "0")) # Convert to integer
# SHA256 = os.getenv("SHA256", "")
# LOCALIZED_DESCRIPTION = os.getenv("LOCALIZED_DESCRIPTION", "Invalid Update")
# DOWNLOAD_URL = os.getenv("DOWNLOAD_URL", "https://github.com/SideStore/SideStore/releases/download/0.0.0/SideStore.ipa")
# Check if input file is provided
if len(sys.argv) < 2:
print("Usage: python3 update_apps.py <input_file>")
sys.exit(1)
input_file = sys.argv[1]
print(f"Input File: {input_file}")
# Debugging the environment variables
print(" ====> Required parameter list <====")
print("Bundle Identifier:", BUNDLE_IDENTIFIER)
print("Version:", VERSION_IPA)
print("Version Date:", VERSION_DATE)
print("IsBeta:", IS_BETA)
print("ReleaseChannel:", RELEASE_CHANNEL)
print("Size:", SIZE)
print("Sha256:", SHA256)
print("Localized Description:", LOCALIZED_DESCRIPTION)
print("Download URL:", DOWNLOAD_URL)
if IS_BETA is None:
print("Setting IS_BETA = False since no value was provided")
IS_BETA = False
if str(IS_BETA).lower() in ["true", "1", "yes"]:
IS_BETA = True
# Read the input JSON file
try:
with open(input_file, "r") as file:
data = json.load(file)
except Exception as e:
print(f"Error reading the input file: {e}")
sys.exit(1)
if (not BUNDLE_IDENTIFIER or
not VERSION_IPA or
not VERSION_DATE or
not RELEASE_CHANNEL or
not SIZE or
not SHA256 or
not LOCALIZED_DESCRIPTION or
not DOWNLOAD_URL):
print("One or more required parameter(s) were not defined as environment variable(s)")
sys.exit(1)
# Convert to integer
SIZE = int(SIZE)
# Process the JSON data
updated = False
# apps = data.get("apps", [])
# appsToUpdate = [app for app in apps if app.get("bundleIdentifier") == BUNDLE_IDENTIFIER]
# if len(appsToUpdate) == 0:
# print("No app with the specified bundle identifier found.")
# sys.exit(1)
# if len(appsToUpdate) > 1:
# print(f"Multiple apps with same `bundleIdentifier` = ${BUNDLE_IDENTIFIER} are not allowed!")
# sys.exit(1)
# app = appsToUpdate[0]
# # Update app-level metadata for store front page
# app.update({
# "beta": IS_BETA,
# })
# versions = app.get("versions", [])
# versionIfExists = [version for version in versions if version == VERSION_IPA]
# if versionIfExists: # current version is a duplicate, so reject it
# print(f"`version` = ${VERSION_IPA} already exists!, new build cannot have an existing version, Aborting!")
# sys.exit(1)
# # create an entry and keep ready
# new_version = {
# "version": VERSION_IPA,
# "date": VERSION_DATE,
# "localizedDescription": LOCALIZED_DESCRIPTION,
# "downloadURL": DOWNLOAD_URL,
# "size": SIZE,
# "sha256": SHA256,
# }
# if versions is []:
# versions.append(new_version)
# else:
# # versions.insert(0, new_version) # insert at front
# versions[0] = new_version # replace top one
# make it lowecase
RELEASE_CHANNEL = RELEASE_CHANNEL.lower()
version = data.get("version", 1)
if int(version) < 2:
print("Only v2 and above are supported for direct updates to sources.json on push")
sys.exit(1)
for app in data.get("apps", []):
if app.get("bundleIdentifier") == BUNDLE_IDENTIFIER:
if RELEASE_CHANNEL == "stable" :
# Update app-level metadata for store front page
app.update({
"version": VERSION_IPA,
"versionDate": VERSION_DATE,
"size": SIZE,
"sha256": SHA256,
"localizedDescription": LOCALIZED_DESCRIPTION,
"downloadURL": DOWNLOAD_URL,
})
# Process the versions array
channels = app.get("releaseChannels", [])
if not channels:
app["releaseChannels"] = channels
# create an entry and keep ready
new_version = {
"version": VERSION_IPA,
"date": VERSION_DATE,
"localizedDescription": LOCALIZED_DESCRIPTION,
"downloadURL": DOWNLOAD_URL,
"size": SIZE,
"sha256": SHA256,
}
tracks = [track for track in channels if track.get("track") == RELEASE_CHANNEL]
if len(tracks) > 1:
print(f"Multiple tracks with same `track` name = ${RELEASE_CHANNEL} are not allowed!")
sys.exit(1)
if not tracks:
# there was no entries in this release channel so create one
track = {
"track": RELEASE_CHANNEL,
"releases": [new_version]
}
channels.insert(0, track)
else:
track = tracks[0] # first result is the selected track
# Update the existing TOP version object entry
track["releases"][0] = new_version
updated = True
break
if not updated:
print("No app with the specified bundle identifier found.")
sys.exit(1)
# Save the updated JSON to the input file
try:
print("\nUpdated Sources File:\n")
print(json.dumps(data, indent=2, ensure_ascii=False))
with open(input_file, "w", encoding="utf-8") as file:
json.dump(data, file, indent=2, ensure_ascii=False)
print("JSON successfully updated.")
except Exception as e:
print(f"Error writing to the file: {e}")
sys.exit(1)

View File

@@ -1,381 +0,0 @@
#!/usr/bin/env python3
import subprocess
import sys
import os
import re
IGNORED_AUTHORS = [
]
TAG_MARKER = "###"
HEADER_MARKER = "####"
def run_command(cmd):
"""Run a shell command and return its trimmed output."""
return subprocess.check_output(cmd, shell=True, text=True).strip()
def get_head_commit():
"""Return the HEAD commit SHA."""
return run_command("git rev-parse HEAD")
def get_commit_messages(last_successful, current="HEAD"):
"""Return a list of commit messages between last_successful and current."""
cmd = f"git log {last_successful}..{current} --pretty=format:%s"
output = run_command(cmd)
if not output:
return []
return output.splitlines()
def get_authors_in_range(commit_range, fmt="%an"):
"""Return a set of commit authors in the given commit range using the given format."""
cmd = f"git log {commit_range} --pretty=format:{fmt}"
output = run_command(cmd)
if not output:
return set()
authors = set(line.strip() for line in output.splitlines() if line.strip())
authors = set(authors) - set(IGNORED_AUTHORS)
return authors
def get_first_commit_of_repo():
"""Return the first commit in the repository (root commit)."""
cmd = "git rev-list --max-parents=0 HEAD"
output = run_command(cmd)
return output.splitlines()[0]
def get_branch():
"""
Attempt to determine the branch base (the commit where the current branch diverged
from the default remote branch). Falls back to the repo's first commit.
"""
try:
default_ref = run_command("git rev-parse --abbrev-ref origin/HEAD")
default_branch = default_ref.split('/')[-1]
base_commit = run_command(f"git merge-base HEAD origin/{default_branch}")
return base_commit
except Exception:
return get_first_commit_of_repo()
def get_repo_url():
"""Extract and clean the repository URL from the remote 'origin'."""
url = run_command("git config --get remote.origin.url")
if url.startswith("git@"):
url = url.replace("git@", "https://").replace(":", "/")
if url.endswith(".git"):
url = url[:-4]
return url
def format_contributor(author):
"""
Convert an author name to a GitHub username or first name.
If the author already starts with '@', return it;
otherwise, take the first token and prepend '@'.
"""
if author.startswith('@'):
return author
return f"@{author.split()[0]}"
def format_commit_message(msg):
"""Format a commit message as a bullet point for the release notes."""
msg_clean = msg.lstrip() # remove leading spaces
if msg_clean.startswith("-"):
msg_clean = msg_clean[1:].strip() # remove leading '-' and spaces
return f"- {msg_clean}"
# def generate_release_notes(last_successful, tag, branch):
"""Generate release notes for the given tag."""
current_commit = get_head_commit()
messages = get_commit_messages(last_successful, current_commit)
# Start with the tag header
new_section = f"{TAG_MARKER} {tag}\n"
# What's Changed section (always present)
new_section += f"{HEADER_MARKER} What's Changed\n"
if not messages or last_successful == current_commit:
new_section += "- Nothing...\n"
else:
for msg in messages:
new_section += f"{format_commit_message(msg)}\n"
# New Contributors section (only if there are new contributors)
all_previous_authors = get_authors_in_range(f"{branch}")
recent_authors = get_authors_in_range(f"{last_successful}..{current_commit}")
new_contributors = recent_authors - all_previous_authors
if new_contributors:
new_section += f"\n{HEADER_MARKER} New Contributors\n"
for author in sorted(new_contributors):
new_section += f"- {format_contributor(author)} made their first contribution\n"
# Full Changelog section (only if there are changes)
if messages and last_successful != current_commit:
repo_url = get_repo_url()
changelog_link = f"{repo_url}/compare/{last_successful}...{current_commit}"
new_section += f"\n{HEADER_MARKER} Full Changelog: [{last_successful[:8]}...{current_commit[:8]}]({changelog_link})\n"
return new_section
def generate_release_notes(last_successful, tag, branch):
"""Generate release notes for the given tag."""
current_commit = get_head_commit()
try:
# Try to get commit messages using the provided last_successful commit
messages = get_commit_messages(last_successful, current_commit)
except subprocess.CalledProcessError:
# If the range is invalid (e.g. force push made last_successful obsolete),
# fall back to using the last 10 commits in the current branch.
print("\nInvalid revision range error, using last 10 commits as fallback.\n")
fallback_commit = run_command("git rev-parse HEAD~5")
messages = get_commit_messages(fallback_commit, current_commit)
last_successful = fallback_commit
# Start with the tag header
new_section = f"{TAG_MARKER} {tag}\n"
# What's Changed section (always present)
new_section += f"{HEADER_MARKER} What's Changed\n"
if not messages or last_successful == current_commit:
new_section += "- Nothing...\n"
else:
for msg in messages:
new_section += f"{format_commit_message(msg)}\n"
# New Contributors section (only if there are new contributors)
all_previous_authors = get_authors_in_range(f"{branch}")
recent_authors = get_authors_in_range(f"{last_successful}..{current_commit}")
new_contributors = recent_authors - all_previous_authors
if new_contributors:
new_section += f"\n{HEADER_MARKER} New Contributors\n"
for author in sorted(new_contributors):
new_section += f"- {format_contributor(author)} made their first contribution\n"
# Full Changelog section (only if there are changes)
if messages and last_successful != current_commit:
repo_url = get_repo_url()
changelog_link = f"{repo_url}/compare/{last_successful}...{current_commit}"
new_section += f"\n{HEADER_MARKER} Full Changelog: [{last_successful[:8]}...{current_commit[:8]}]({changelog_link})\n"
return new_section
def update_release_md(existing_content, new_section, tag):
"""
Update input based on rules:
1. If tag exists, update it
2. Special tags (alpha, beta, nightly) stay at the top in that order
3. Numbered tags follow special tags
4. Remove duplicate tags
5. Insert new numbered tags at the top of the numbered section
"""
tag_lower = tag.lower()
is_special_tag = tag_lower in ["alpha", "beta", "nightly"]
# Parse the existing content into sections
if not existing_content:
return new_section
# Split the content into sections by headers
pattern = fr'(^{TAG_MARKER} .*$)'
sections = re.split(pattern, existing_content, flags=re.MULTILINE)
# Create a list to store the processed content
processed_sections = []
# Track special tag positions and whether tag was found
special_tags_map = {"alpha": False, "beta": False, "nightly": False}
last_special_index = -1
tag_found = False
numbered_tag_index = -1
i = 0
while i < len(sections):
# Check if this is a header
if i % 2 == 1: # Headers are at odd indices
header = sections[i]
content = sections[i+1] if i+1 < len(sections) else ""
current_tag = header[3:].strip().lower()
# Check for special tags to track their positions
if current_tag in special_tags_map:
special_tags_map[current_tag] = True
last_special_index = len(processed_sections)
# Check if this is the first numbered tag
elif re.match(r'^[0-9]+\.[0-9]+(\.[0-9]+)?$', current_tag) and numbered_tag_index == -1:
numbered_tag_index = len(processed_sections)
# If this is the tag we're updating, mark it but don't add yet
if current_tag == tag_lower:
if not tag_found: # Replace the first occurrence
tag_found = True
i += 2 # Skip the content
continue
else: # Skip duplicate occurrences
i += 2
continue
# Add the current section
processed_sections.append(sections[i])
i += 1
# Determine where to insert the new section
if tag_found:
# We need to determine the insertion point
if is_special_tag:
# For special tags, insert after last special tag or at beginning
desired_index = -1
for pos, t in enumerate(["alpha", "beta", "nightly"]):
if t == tag_lower:
desired_index = pos
# Find position to insert
insert_pos = 0
for pos, t in enumerate(["alpha", "beta", "nightly"]):
if t == tag_lower:
break
if special_tags_map[t]:
insert_pos = processed_sections.index(f"{TAG_MARKER} {t}")
insert_pos += 2 # Move past the header and content
# Insert at the determined position
processed_sections.insert(insert_pos, new_section)
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
processed_sections.insert(insert_pos, '\n\n')
else:
# For numbered tags, insert after special tags but before other numbered tags
insert_pos = 0
if last_special_index >= 0:
# Insert after the last special tag
insert_pos = last_special_index + 2 # +2 to skip header and content
processed_sections.insert(insert_pos, new_section)
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
processed_sections.insert(insert_pos, '\n\n')
else:
# Tag doesn't exist yet, determine insertion point
if is_special_tag:
# For special tags, maintain alpha, beta, nightly order
special_tags = ["alpha", "beta", "nightly"]
insert_pos = 0
for i, t in enumerate(special_tags):
if t == tag_lower:
# Check if preceding special tags exist
for prev_tag in special_tags[:i]:
if special_tags_map[prev_tag]:
# Find the position after this tag
prev_index = processed_sections.index(f"{TAG_MARKER} {prev_tag}")
insert_pos = prev_index + 2 # Skip header and content
processed_sections.insert(insert_pos, new_section)
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
processed_sections.insert(insert_pos, '\n\n')
else:
# For numbered tags, insert after special tags but before other numbered tags
insert_pos = 0
if last_special_index >= 0:
# Insert after the last special tag
insert_pos = last_special_index + 2 # +2 to skip header and content
processed_sections.insert(insert_pos, new_section)
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
processed_sections.insert(insert_pos, '\n\n')
# Combine sections ensuring proper spacing
result = ""
for i, section in enumerate(processed_sections):
if i > 0 and section.startswith(f"{TAG_MARKER} "):
# Ensure single blank line before headers
if not result.endswith("\n\n"):
result = result.rstrip("\n") + "\n\n"
result += section
return result.rstrip() + "\n"
def retrieve_tag_content(tag, file_path):
if not os.path.exists(file_path):
return ""
with open(file_path, "r") as f:
content = f.read()
# Create a pattern for the tag header (case-insensitive)
pattern = re.compile(fr'^{TAG_MARKER} ' + re.escape(tag) + r'$', re.MULTILINE | re.IGNORECASE)
# Find the tag header
match = pattern.search(content)
if not match:
return ""
# Start after the tag line
start_pos = match.end()
# Skip a newline if present
if start_pos < len(content) and content[start_pos] == "\n":
start_pos += 1
# Find the next tag header after the current tag's content
next_tag_match = re.search(fr'^{TAG_MARKER} ', content[start_pos:], re.MULTILINE)
if next_tag_match:
end_pos = start_pos + next_tag_match.start()
return content[start_pos:end_pos].strip()
else:
# Return until the end of the file if this is the last tag
return content[start_pos:].strip()
def main():
# Update input file
release_file = "release-notes.md"
# Usage: python release.py <last_successful_commit> [tag] [branch]
# Or: python release.py --retrieve <tagname>
args = sys.argv[1:]
if len(args) < 1:
print("Usage: python release.py <last_successful_commit> [tag] [branch]")
print(" or: python release.py --retrieve <tagname>")
sys.exit(1)
# Check if we're retrieving a tag
if args[0] == "--retrieve":
if len(args) < 2:
print("Error: Missing tag name after --retrieve")
sys.exit(1)
tag_content = retrieve_tag_content(args[1], file_path=release_file)
if tag_content:
print(tag_content)
else:
print(f"Tag '{args[1]}' not found in '{release_file}'")
return
# Original functionality for generating release notes
last_successful = args[0]
tag = args[1] if len(args) > 1 else get_head_commit()
branch = args[2] if len(args) > 2 else (os.environ.get("GITHUB_REF") or get_branch())
# Generate release notes
new_section = generate_release_notes(last_successful, tag, branch)
existing_content = ""
if os.path.exists(release_file):
with open(release_file, "r") as f:
existing_content = f.read()
updated_content = update_release_md(existing_content, new_section, tag)
with open(release_file, "w") as f:
f.write(updated_content)
# Output the new section for display
print(new_section)
if __name__ == "__main__":
main()