mirror of
https://github.com/SideStore/SideStore.git
synced 2026-03-27 21:05:39 +01:00
CI: full rewrite - moved logic into ci.py and kept workflow scripts mostly dummy
This commit is contained in:
255
scripts/ci.py
Normal file
255
scripts/ci.py
Normal file
@@ -0,0 +1,255 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# helpers
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def run(cmd, check=True):
|
||||
print(f"$ {cmd}", flush=True)
|
||||
subprocess.run(cmd, shell=True, cwd=ROOT, check=check)
|
||||
|
||||
|
||||
def output(name, value):
|
||||
print(f"{name}={value}")
|
||||
out = os.environ.get("GITHUB_OUTPUT")
|
||||
if out:
|
||||
with open(out, "a") as f:
|
||||
f.write(f"{name}={value}\n")
|
||||
|
||||
|
||||
def getenv(name, default=""):
|
||||
return os.environ.get(name, default)
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# SHARED
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def short_commit():
|
||||
sha = subprocess.check_output(
|
||||
"git rev-parse --short HEAD",
|
||||
shell=True,
|
||||
cwd=ROOT
|
||||
).decode().strip()
|
||||
|
||||
output("short_commit", sha)
|
||||
return sha
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# VERSION BUMP
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def bump_beta():
|
||||
date = datetime.datetime.utcnow().strftime("%Y.%m.%d")
|
||||
release_channel = getenv("RELEASE_CHANNEL", "beta")
|
||||
|
||||
build_file = ROOT / "build_number.txt"
|
||||
xcconfig = ROOT / "Build.xcconfig"
|
||||
|
||||
short = subprocess.check_output(
|
||||
"git rev-parse --short HEAD",
|
||||
shell=True,
|
||||
cwd=ROOT
|
||||
).decode().strip()
|
||||
|
||||
def write(num):
|
||||
run(
|
||||
f"""sed -e "/MARKETING_VERSION = .*/s/$/-{release_channel}.{date}.{num}+{short}/" -i '' Build.xcconfig"""
|
||||
)
|
||||
build_file.write_text(f"{date},{num}")
|
||||
|
||||
if not build_file.exists():
|
||||
write(1)
|
||||
return
|
||||
|
||||
last = build_file.read_text().strip().split(",")[1]
|
||||
write(int(last) + 1)
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# VERSION EXTRACTION
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def extract_version():
|
||||
v = subprocess.check_output(
|
||||
"grep MARKETING_VERSION Build.xcconfig | sed -e 's/MARKETING_VERSION = //g'",
|
||||
shell=True,
|
||||
cwd=ROOT
|
||||
).decode().strip()
|
||||
|
||||
output("version", v)
|
||||
return v
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# CLEAN
|
||||
# ----------------------------------------------------------
|
||||
def clean():
|
||||
run("make clean")
|
||||
clean_derived_data()
|
||||
|
||||
def clean_derived_data():
|
||||
run("rm -rf ~/Library/Developer/Xcode/DerivedData/*", check=False)
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# BUILD
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def build():
|
||||
run("make clean")
|
||||
run("rm -rf ~/Library/Developer/Xcode/DerivedData/*", check=False)
|
||||
run("mkdir -p build/logs")
|
||||
|
||||
run(
|
||||
"set -o pipefail && "
|
||||
"NSUnbufferedIO=YES make -B build "
|
||||
"2>&1 | tee -a build/logs/build.log | xcbeautify --renderer github-actions"
|
||||
)
|
||||
|
||||
run("make fakesign | tee -a build/logs/build.log")
|
||||
run("make ipa | tee -a build/logs/build.log")
|
||||
|
||||
run("zip -r -9 ./SideStore.dSYMs.zip ./SideStore.xcarchive/dSYMs")
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# TESTS BUILD
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def tests_build():
|
||||
run("mkdir -p build/logs")
|
||||
run(
|
||||
"NSUnbufferedIO=YES make -B build-tests "
|
||||
"2>&1 | tee -a build/logs/tests-build.log | xcbeautify --renderer github-actions"
|
||||
)
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# TESTS RUN
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def tests_run():
|
||||
run("mkdir -p build/logs")
|
||||
run("nohup make -B boot-sim-async </dev/null >> build/logs/tests-run.log 2>&1 &")
|
||||
|
||||
run("make -B sim-boot-check | tee -a build/logs/tests-run.log")
|
||||
|
||||
run("make run-tests 2>&1 | tee -a build/logs/tests-run.log")
|
||||
|
||||
run("zip -r -9 ./test-results.zip ./build/tests")
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# LOG ENCRYPTION
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def encrypt_logs(name):
|
||||
pwd = getenv("BUILD_LOG_ZIP_PASSWORD", "12345")
|
||||
run(
|
||||
f'cd build/logs && zip -e -P "{pwd}" ../../{name}.zip *'
|
||||
)
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# RELEASE NOTES
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def release_notes(tag):
|
||||
last = subprocess.check_output(
|
||||
"gh run list --branch $(git branch --show-current) "
|
||||
"--status success --json headSha --jq '.[0].headSha'",
|
||||
shell=True,
|
||||
cwd=ROOT
|
||||
).decode().strip()
|
||||
|
||||
if not last or last == "null":
|
||||
last = subprocess.check_output(
|
||||
"git rev-list --max-parents=0 HEAD",
|
||||
shell=True,
|
||||
cwd=ROOT
|
||||
).decode().strip()
|
||||
|
||||
run(f"python3 update_release_notes.py {last} {tag}")
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# PUBLISH SOURCE.JSON
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def publish_apps(short_commit):
|
||||
repo = ROOT / "SideStore/apps-v2.json"
|
||||
|
||||
run("git config user.name 'GitHub Actions'", check=False)
|
||||
run("git config user.email 'github-actions@github.com'", check=False)
|
||||
|
||||
run("python3 scripts/update_apps.py './_includes/source.json'", check=False)
|
||||
|
||||
run("git add ./_includes/source.json", check=False)
|
||||
run(
|
||||
f"git commit -m ' - updated for {short_commit} deployment' || true",
|
||||
check=False
|
||||
)
|
||||
run("git push", check=False)
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# ENTRYPOINT
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def main():
|
||||
cmd = sys.argv[1]
|
||||
|
||||
if cmd == "commid-id":
|
||||
short_commit()
|
||||
|
||||
elif cmd == "bump-beta":
|
||||
bump_beta()
|
||||
|
||||
elif cmd == "version":
|
||||
extract_version()
|
||||
|
||||
elif cmd == "clean":
|
||||
clean()
|
||||
|
||||
elif cmd == "cleanDerivedData":
|
||||
clean_derived_data()
|
||||
|
||||
elif cmd == "build":
|
||||
build()
|
||||
|
||||
elif cmd == "tests-build":
|
||||
tests_build()
|
||||
|
||||
elif cmd == "tests-run":
|
||||
tests_run()
|
||||
|
||||
elif cmd == "encrypt-build":
|
||||
encrypt_logs("encrypted-build-logs")
|
||||
|
||||
elif cmd == "encrypt-tests-build":
|
||||
encrypt_logs("encrypted-tests-build-logs")
|
||||
|
||||
elif cmd == "encrypt-tests-run":
|
||||
encrypt_logs("encrypted-tests-run-logs")
|
||||
|
||||
elif cmd == "release-notes":
|
||||
release_notes(sys.argv[2])
|
||||
|
||||
elif cmd == "publish":
|
||||
publish_apps(sys.argv[2])
|
||||
|
||||
else:
|
||||
raise SystemExit(f"Unknown command {cmd}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
192
scripts/update_apps.py
Executable file
192
scripts/update_apps.py
Executable file
@@ -0,0 +1,192 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# SIDESTORE_BUNDLE_ID = "com.SideStore.SideStore"
|
||||
|
||||
# Set environment variables with default values
|
||||
VERSION_IPA = os.getenv("VERSION_IPA")
|
||||
VERSION_DATE = os.getenv("VERSION_DATE")
|
||||
IS_BETA = os.getenv("IS_BETA")
|
||||
RELEASE_CHANNEL = os.getenv("RELEASE_CHANNEL")
|
||||
SIZE = os.getenv("SIZE")
|
||||
SHA256 = os.getenv("SHA256")
|
||||
LOCALIZED_DESCRIPTION = os.getenv("LOCALIZED_DESCRIPTION")
|
||||
DOWNLOAD_URL = os.getenv("DOWNLOAD_URL")
|
||||
# BUNDLE_IDENTIFIER = os.getenv("BUNDLE_IDENTIFIER", SIDESTORE_BUNDLE_ID)
|
||||
BUNDLE_IDENTIFIER = os.getenv("BUNDLE_IDENTIFIER")
|
||||
|
||||
# Uncomment to debug/test by simulating dummy input locally
|
||||
# VERSION_IPA = os.getenv("VERSION_IPA", "0.0.0")
|
||||
# VERSION_DATE = os.getenv("VERSION_DATE", "2000-12-18T00:00:00Z")
|
||||
# IS_BETA = os.getenv("IS_BETA", True)
|
||||
# RELEASE_CHANNEL = os.getenv("RELEASE_CHANNEL", "alpha")
|
||||
# SIZE = int(os.getenv("SIZE", "0")) # Convert to integer
|
||||
# SHA256 = os.getenv("SHA256", "")
|
||||
# LOCALIZED_DESCRIPTION = os.getenv("LOCALIZED_DESCRIPTION", "Invalid Update")
|
||||
# DOWNLOAD_URL = os.getenv("DOWNLOAD_URL", "https://github.com/SideStore/SideStore/releases/download/0.0.0/SideStore.ipa")
|
||||
|
||||
# Check if input file is provided
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python3 update_apps.py <input_file>")
|
||||
sys.exit(1)
|
||||
|
||||
input_file = sys.argv[1]
|
||||
print(f"Input File: {input_file}")
|
||||
|
||||
# Debugging the environment variables
|
||||
print(" ====> Required parameter list <====")
|
||||
print("Bundle Identifier:", BUNDLE_IDENTIFIER)
|
||||
print("Version:", VERSION_IPA)
|
||||
print("Version Date:", VERSION_DATE)
|
||||
print("IsBeta:", IS_BETA)
|
||||
print("ReleaseChannel:", RELEASE_CHANNEL)
|
||||
print("Size:", SIZE)
|
||||
print("Sha256:", SHA256)
|
||||
print("Localized Description:", LOCALIZED_DESCRIPTION)
|
||||
print("Download URL:", DOWNLOAD_URL)
|
||||
|
||||
if IS_BETA is None:
|
||||
print("Setting IS_BETA = False since no value was provided")
|
||||
IS_BETA = False
|
||||
|
||||
if str(IS_BETA).lower() in ["true", "1", "yes"]:
|
||||
IS_BETA = True
|
||||
|
||||
# Read the input JSON file
|
||||
try:
|
||||
with open(input_file, "r") as file:
|
||||
data = json.load(file)
|
||||
except Exception as e:
|
||||
print(f"Error reading the input file: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if (not BUNDLE_IDENTIFIER or
|
||||
not VERSION_IPA or
|
||||
not VERSION_DATE or
|
||||
not RELEASE_CHANNEL or
|
||||
not SIZE or
|
||||
not SHA256 or
|
||||
not LOCALIZED_DESCRIPTION or
|
||||
not DOWNLOAD_URL):
|
||||
print("One or more required parameter(s) were not defined as environment variable(s)")
|
||||
sys.exit(1)
|
||||
|
||||
# Convert to integer
|
||||
SIZE = int(SIZE)
|
||||
|
||||
# Process the JSON data
|
||||
updated = False
|
||||
|
||||
# apps = data.get("apps", [])
|
||||
# appsToUpdate = [app for app in apps if app.get("bundleIdentifier") == BUNDLE_IDENTIFIER]
|
||||
# if len(appsToUpdate) == 0:
|
||||
# print("No app with the specified bundle identifier found.")
|
||||
# sys.exit(1)
|
||||
|
||||
# if len(appsToUpdate) > 1:
|
||||
# print(f"Multiple apps with same `bundleIdentifier` = ${BUNDLE_IDENTIFIER} are not allowed!")
|
||||
# sys.exit(1)
|
||||
|
||||
# app = appsToUpdate[0]
|
||||
# # Update app-level metadata for store front page
|
||||
# app.update({
|
||||
# "beta": IS_BETA,
|
||||
# })
|
||||
|
||||
# versions = app.get("versions", [])
|
||||
|
||||
# versionIfExists = [version for version in versions if version == VERSION_IPA]
|
||||
# if versionIfExists: # current version is a duplicate, so reject it
|
||||
# print(f"`version` = ${VERSION_IPA} already exists!, new build cannot have an existing version, Aborting!")
|
||||
# sys.exit(1)
|
||||
|
||||
# # create an entry and keep ready
|
||||
# new_version = {
|
||||
# "version": VERSION_IPA,
|
||||
# "date": VERSION_DATE,
|
||||
# "localizedDescription": LOCALIZED_DESCRIPTION,
|
||||
# "downloadURL": DOWNLOAD_URL,
|
||||
# "size": SIZE,
|
||||
# "sha256": SHA256,
|
||||
# }
|
||||
|
||||
# if versions is []:
|
||||
# versions.append(new_version)
|
||||
# else:
|
||||
# # versions.insert(0, new_version) # insert at front
|
||||
# versions[0] = new_version # replace top one
|
||||
|
||||
|
||||
# make it lowecase
|
||||
RELEASE_CHANNEL = RELEASE_CHANNEL.lower()
|
||||
|
||||
version = data.get("version", 1)
|
||||
if int(version) < 2:
|
||||
print("Only v2 and above are supported for direct updates to sources.json on push")
|
||||
sys.exit(1)
|
||||
|
||||
for app in data.get("apps", []):
|
||||
if app.get("bundleIdentifier") == BUNDLE_IDENTIFIER:
|
||||
if RELEASE_CHANNEL == "stable" :
|
||||
# Update app-level metadata for store front page
|
||||
app.update({
|
||||
"version": VERSION_IPA,
|
||||
"versionDate": VERSION_DATE,
|
||||
"size": SIZE,
|
||||
"sha256": SHA256,
|
||||
"localizedDescription": LOCALIZED_DESCRIPTION,
|
||||
"downloadURL": DOWNLOAD_URL,
|
||||
})
|
||||
|
||||
# Process the versions array
|
||||
channels = app.get("releaseChannels", [])
|
||||
if not channels:
|
||||
app["releaseChannels"] = channels
|
||||
|
||||
# create an entry and keep ready
|
||||
new_version = {
|
||||
"version": VERSION_IPA,
|
||||
"date": VERSION_DATE,
|
||||
"localizedDescription": LOCALIZED_DESCRIPTION,
|
||||
"downloadURL": DOWNLOAD_URL,
|
||||
"size": SIZE,
|
||||
"sha256": SHA256,
|
||||
}
|
||||
|
||||
tracks = [track for track in channels if track.get("track") == RELEASE_CHANNEL]
|
||||
if len(tracks) > 1:
|
||||
print(f"Multiple tracks with same `track` name = ${RELEASE_CHANNEL} are not allowed!")
|
||||
sys.exit(1)
|
||||
|
||||
if not tracks:
|
||||
# there was no entries in this release channel so create one
|
||||
track = {
|
||||
"track": RELEASE_CHANNEL,
|
||||
"releases": [new_version]
|
||||
}
|
||||
channels.insert(0, track)
|
||||
else:
|
||||
track = tracks[0] # first result is the selected track
|
||||
# Update the existing TOP version object entry
|
||||
track["releases"][0] = new_version
|
||||
|
||||
updated = True
|
||||
break
|
||||
|
||||
if not updated:
|
||||
print("No app with the specified bundle identifier found.")
|
||||
sys.exit(1)
|
||||
|
||||
# Save the updated JSON to the input file
|
||||
try:
|
||||
print("\nUpdated Sources File:\n")
|
||||
print(json.dumps(data, indent=2, ensure_ascii=False))
|
||||
with open(input_file, "w", encoding="utf-8") as file:
|
||||
json.dump(data, file, indent=2, ensure_ascii=False)
|
||||
print("JSON successfully updated.")
|
||||
except Exception as e:
|
||||
print(f"Error writing to the file: {e}")
|
||||
sys.exit(1)
|
||||
381
scripts/update_release_notes.py
Normal file
381
scripts/update_release_notes.py
Normal file
@@ -0,0 +1,381 @@
|
||||
#!/usr/bin/env python3
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
||||
IGNORED_AUTHORS = [
|
||||
|
||||
]
|
||||
|
||||
TAG_MARKER = "###"
|
||||
HEADER_MARKER = "####"
|
||||
|
||||
def run_command(cmd):
|
||||
"""Run a shell command and return its trimmed output."""
|
||||
return subprocess.check_output(cmd, shell=True, text=True).strip()
|
||||
|
||||
def get_head_commit():
|
||||
"""Return the HEAD commit SHA."""
|
||||
return run_command("git rev-parse HEAD")
|
||||
|
||||
def get_commit_messages(last_successful, current="HEAD"):
|
||||
"""Return a list of commit messages between last_successful and current."""
|
||||
cmd = f"git log {last_successful}..{current} --pretty=format:%s"
|
||||
output = run_command(cmd)
|
||||
if not output:
|
||||
return []
|
||||
return output.splitlines()
|
||||
|
||||
def get_authors_in_range(commit_range, fmt="%an"):
|
||||
"""Return a set of commit authors in the given commit range using the given format."""
|
||||
cmd = f"git log {commit_range} --pretty=format:{fmt}"
|
||||
output = run_command(cmd)
|
||||
if not output:
|
||||
return set()
|
||||
authors = set(line.strip() for line in output.splitlines() if line.strip())
|
||||
authors = set(authors) - set(IGNORED_AUTHORS)
|
||||
return authors
|
||||
|
||||
def get_first_commit_of_repo():
|
||||
"""Return the first commit in the repository (root commit)."""
|
||||
cmd = "git rev-list --max-parents=0 HEAD"
|
||||
output = run_command(cmd)
|
||||
return output.splitlines()[0]
|
||||
|
||||
def get_branch():
|
||||
"""
|
||||
Attempt to determine the branch base (the commit where the current branch diverged
|
||||
from the default remote branch). Falls back to the repo's first commit.
|
||||
"""
|
||||
try:
|
||||
default_ref = run_command("git rev-parse --abbrev-ref origin/HEAD")
|
||||
default_branch = default_ref.split('/')[-1]
|
||||
base_commit = run_command(f"git merge-base HEAD origin/{default_branch}")
|
||||
return base_commit
|
||||
except Exception:
|
||||
return get_first_commit_of_repo()
|
||||
|
||||
def get_repo_url():
|
||||
"""Extract and clean the repository URL from the remote 'origin'."""
|
||||
url = run_command("git config --get remote.origin.url")
|
||||
if url.startswith("git@"):
|
||||
url = url.replace("git@", "https://").replace(":", "/")
|
||||
if url.endswith(".git"):
|
||||
url = url[:-4]
|
||||
return url
|
||||
|
||||
def format_contributor(author):
|
||||
"""
|
||||
Convert an author name to a GitHub username or first name.
|
||||
If the author already starts with '@', return it;
|
||||
otherwise, take the first token and prepend '@'.
|
||||
"""
|
||||
if author.startswith('@'):
|
||||
return author
|
||||
return f"@{author.split()[0]}"
|
||||
|
||||
def format_commit_message(msg):
|
||||
"""Format a commit message as a bullet point for the release notes."""
|
||||
msg_clean = msg.lstrip() # remove leading spaces
|
||||
if msg_clean.startswith("-"):
|
||||
msg_clean = msg_clean[1:].strip() # remove leading '-' and spaces
|
||||
return f"- {msg_clean}"
|
||||
|
||||
# def generate_release_notes(last_successful, tag, branch):
|
||||
"""Generate release notes for the given tag."""
|
||||
current_commit = get_head_commit()
|
||||
messages = get_commit_messages(last_successful, current_commit)
|
||||
|
||||
# Start with the tag header
|
||||
new_section = f"{TAG_MARKER} {tag}\n"
|
||||
|
||||
# What's Changed section (always present)
|
||||
new_section += f"{HEADER_MARKER} What's Changed\n"
|
||||
|
||||
if not messages or last_successful == current_commit:
|
||||
new_section += "- Nothing...\n"
|
||||
else:
|
||||
for msg in messages:
|
||||
new_section += f"{format_commit_message(msg)}\n"
|
||||
|
||||
# New Contributors section (only if there are new contributors)
|
||||
all_previous_authors = get_authors_in_range(f"{branch}")
|
||||
recent_authors = get_authors_in_range(f"{last_successful}..{current_commit}")
|
||||
new_contributors = recent_authors - all_previous_authors
|
||||
|
||||
if new_contributors:
|
||||
new_section += f"\n{HEADER_MARKER} New Contributors\n"
|
||||
for author in sorted(new_contributors):
|
||||
new_section += f"- {format_contributor(author)} made their first contribution\n"
|
||||
|
||||
# Full Changelog section (only if there are changes)
|
||||
if messages and last_successful != current_commit:
|
||||
repo_url = get_repo_url()
|
||||
changelog_link = f"{repo_url}/compare/{last_successful}...{current_commit}"
|
||||
new_section += f"\n{HEADER_MARKER} Full Changelog: [{last_successful[:8]}...{current_commit[:8]}]({changelog_link})\n"
|
||||
|
||||
return new_section
|
||||
|
||||
def generate_release_notes(last_successful, tag, branch):
|
||||
"""Generate release notes for the given tag."""
|
||||
current_commit = get_head_commit()
|
||||
try:
|
||||
# Try to get commit messages using the provided last_successful commit
|
||||
messages = get_commit_messages(last_successful, current_commit)
|
||||
except subprocess.CalledProcessError:
|
||||
# If the range is invalid (e.g. force push made last_successful obsolete),
|
||||
# fall back to using the last 10 commits in the current branch.
|
||||
print("\nInvalid revision range error, using last 10 commits as fallback.\n")
|
||||
fallback_commit = run_command("git rev-parse HEAD~5")
|
||||
messages = get_commit_messages(fallback_commit, current_commit)
|
||||
last_successful = fallback_commit
|
||||
|
||||
# Start with the tag header
|
||||
new_section = f"{TAG_MARKER} {tag}\n"
|
||||
|
||||
# What's Changed section (always present)
|
||||
new_section += f"{HEADER_MARKER} What's Changed\n"
|
||||
|
||||
if not messages or last_successful == current_commit:
|
||||
new_section += "- Nothing...\n"
|
||||
else:
|
||||
for msg in messages:
|
||||
new_section += f"{format_commit_message(msg)}\n"
|
||||
|
||||
# New Contributors section (only if there are new contributors)
|
||||
all_previous_authors = get_authors_in_range(f"{branch}")
|
||||
recent_authors = get_authors_in_range(f"{last_successful}..{current_commit}")
|
||||
new_contributors = recent_authors - all_previous_authors
|
||||
|
||||
if new_contributors:
|
||||
new_section += f"\n{HEADER_MARKER} New Contributors\n"
|
||||
for author in sorted(new_contributors):
|
||||
new_section += f"- {format_contributor(author)} made their first contribution\n"
|
||||
|
||||
# Full Changelog section (only if there are changes)
|
||||
if messages and last_successful != current_commit:
|
||||
repo_url = get_repo_url()
|
||||
changelog_link = f"{repo_url}/compare/{last_successful}...{current_commit}"
|
||||
new_section += f"\n{HEADER_MARKER} Full Changelog: [{last_successful[:8]}...{current_commit[:8]}]({changelog_link})\n"
|
||||
|
||||
return new_section
|
||||
|
||||
def update_release_md(existing_content, new_section, tag):
|
||||
"""
|
||||
Update input based on rules:
|
||||
1. If tag exists, update it
|
||||
2. Special tags (alpha, beta, nightly) stay at the top in that order
|
||||
3. Numbered tags follow special tags
|
||||
4. Remove duplicate tags
|
||||
5. Insert new numbered tags at the top of the numbered section
|
||||
"""
|
||||
tag_lower = tag.lower()
|
||||
is_special_tag = tag_lower in ["alpha", "beta", "nightly"]
|
||||
|
||||
# Parse the existing content into sections
|
||||
if not existing_content:
|
||||
return new_section
|
||||
|
||||
# Split the content into sections by headers
|
||||
pattern = fr'(^{TAG_MARKER} .*$)'
|
||||
sections = re.split(pattern, existing_content, flags=re.MULTILINE)
|
||||
|
||||
# Create a list to store the processed content
|
||||
processed_sections = []
|
||||
|
||||
# Track special tag positions and whether tag was found
|
||||
special_tags_map = {"alpha": False, "beta": False, "nightly": False}
|
||||
last_special_index = -1
|
||||
tag_found = False
|
||||
numbered_tag_index = -1
|
||||
|
||||
i = 0
|
||||
while i < len(sections):
|
||||
# Check if this is a header
|
||||
if i % 2 == 1: # Headers are at odd indices
|
||||
header = sections[i]
|
||||
content = sections[i+1] if i+1 < len(sections) else ""
|
||||
current_tag = header[3:].strip().lower()
|
||||
|
||||
# Check for special tags to track their positions
|
||||
if current_tag in special_tags_map:
|
||||
special_tags_map[current_tag] = True
|
||||
last_special_index = len(processed_sections)
|
||||
|
||||
# Check if this is the first numbered tag
|
||||
elif re.match(r'^[0-9]+\.[0-9]+(\.[0-9]+)?$', current_tag) and numbered_tag_index == -1:
|
||||
numbered_tag_index = len(processed_sections)
|
||||
|
||||
# If this is the tag we're updating, mark it but don't add yet
|
||||
if current_tag == tag_lower:
|
||||
if not tag_found: # Replace the first occurrence
|
||||
tag_found = True
|
||||
i += 2 # Skip the content
|
||||
continue
|
||||
else: # Skip duplicate occurrences
|
||||
i += 2
|
||||
continue
|
||||
|
||||
# Add the current section
|
||||
processed_sections.append(sections[i])
|
||||
i += 1
|
||||
|
||||
# Determine where to insert the new section
|
||||
if tag_found:
|
||||
# We need to determine the insertion point
|
||||
if is_special_tag:
|
||||
# For special tags, insert after last special tag or at beginning
|
||||
desired_index = -1
|
||||
for pos, t in enumerate(["alpha", "beta", "nightly"]):
|
||||
if t == tag_lower:
|
||||
desired_index = pos
|
||||
|
||||
# Find position to insert
|
||||
insert_pos = 0
|
||||
for pos, t in enumerate(["alpha", "beta", "nightly"]):
|
||||
if t == tag_lower:
|
||||
break
|
||||
if special_tags_map[t]:
|
||||
insert_pos = processed_sections.index(f"{TAG_MARKER} {t}")
|
||||
insert_pos += 2 # Move past the header and content
|
||||
|
||||
# Insert at the determined position
|
||||
processed_sections.insert(insert_pos, new_section)
|
||||
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
|
||||
processed_sections.insert(insert_pos, '\n\n')
|
||||
else:
|
||||
# For numbered tags, insert after special tags but before other numbered tags
|
||||
insert_pos = 0
|
||||
|
||||
if last_special_index >= 0:
|
||||
# Insert after the last special tag
|
||||
insert_pos = last_special_index + 2 # +2 to skip header and content
|
||||
|
||||
processed_sections.insert(insert_pos, new_section)
|
||||
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
|
||||
processed_sections.insert(insert_pos, '\n\n')
|
||||
else:
|
||||
# Tag doesn't exist yet, determine insertion point
|
||||
if is_special_tag:
|
||||
# For special tags, maintain alpha, beta, nightly order
|
||||
special_tags = ["alpha", "beta", "nightly"]
|
||||
insert_pos = 0
|
||||
|
||||
for i, t in enumerate(special_tags):
|
||||
if t == tag_lower:
|
||||
# Check if preceding special tags exist
|
||||
for prev_tag in special_tags[:i]:
|
||||
if special_tags_map[prev_tag]:
|
||||
# Find the position after this tag
|
||||
prev_index = processed_sections.index(f"{TAG_MARKER} {prev_tag}")
|
||||
insert_pos = prev_index + 2 # Skip header and content
|
||||
|
||||
processed_sections.insert(insert_pos, new_section)
|
||||
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
|
||||
processed_sections.insert(insert_pos, '\n\n')
|
||||
else:
|
||||
# For numbered tags, insert after special tags but before other numbered tags
|
||||
insert_pos = 0
|
||||
|
||||
if last_special_index >= 0:
|
||||
# Insert after the last special tag
|
||||
insert_pos = last_special_index + 2 # +2 to skip header and content
|
||||
|
||||
processed_sections.insert(insert_pos, new_section)
|
||||
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
|
||||
processed_sections.insert(insert_pos, '\n\n')
|
||||
|
||||
# Combine sections ensuring proper spacing
|
||||
result = ""
|
||||
for i, section in enumerate(processed_sections):
|
||||
if i > 0 and section.startswith(f"{TAG_MARKER} "):
|
||||
# Ensure single blank line before headers
|
||||
if not result.endswith("\n\n"):
|
||||
result = result.rstrip("\n") + "\n\n"
|
||||
result += section
|
||||
|
||||
return result.rstrip() + "\n"
|
||||
|
||||
|
||||
def retrieve_tag_content(tag, file_path):
|
||||
if not os.path.exists(file_path):
|
||||
return ""
|
||||
|
||||
with open(file_path, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
# Create a pattern for the tag header (case-insensitive)
|
||||
pattern = re.compile(fr'^{TAG_MARKER} ' + re.escape(tag) + r'$', re.MULTILINE | re.IGNORECASE)
|
||||
|
||||
# Find the tag header
|
||||
match = pattern.search(content)
|
||||
if not match:
|
||||
return ""
|
||||
|
||||
# Start after the tag line
|
||||
start_pos = match.end()
|
||||
|
||||
# Skip a newline if present
|
||||
if start_pos < len(content) and content[start_pos] == "\n":
|
||||
start_pos += 1
|
||||
|
||||
# Find the next tag header after the current tag's content
|
||||
next_tag_match = re.search(fr'^{TAG_MARKER} ', content[start_pos:], re.MULTILINE)
|
||||
|
||||
if next_tag_match:
|
||||
end_pos = start_pos + next_tag_match.start()
|
||||
return content[start_pos:end_pos].strip()
|
||||
else:
|
||||
# Return until the end of the file if this is the last tag
|
||||
return content[start_pos:].strip()
|
||||
|
||||
def main():
|
||||
# Update input file
|
||||
release_file = "release-notes.md"
|
||||
|
||||
# Usage: python release.py <last_successful_commit> [tag] [branch]
|
||||
# Or: python release.py --retrieve <tagname>
|
||||
args = sys.argv[1:]
|
||||
|
||||
if len(args) < 1:
|
||||
print("Usage: python release.py <last_successful_commit> [tag] [branch]")
|
||||
print(" or: python release.py --retrieve <tagname>")
|
||||
sys.exit(1)
|
||||
|
||||
# Check if we're retrieving a tag
|
||||
if args[0] == "--retrieve":
|
||||
if len(args) < 2:
|
||||
print("Error: Missing tag name after --retrieve")
|
||||
sys.exit(1)
|
||||
|
||||
tag_content = retrieve_tag_content(args[1], file_path=release_file)
|
||||
if tag_content:
|
||||
print(tag_content)
|
||||
else:
|
||||
print(f"Tag '{args[1]}' not found in '{release_file}'")
|
||||
return
|
||||
|
||||
# Original functionality for generating release notes
|
||||
last_successful = args[0]
|
||||
tag = args[1] if len(args) > 1 else get_head_commit()
|
||||
branch = args[2] if len(args) > 2 else (os.environ.get("GITHUB_REF") or get_branch())
|
||||
|
||||
# Generate release notes
|
||||
new_section = generate_release_notes(last_successful, tag, branch)
|
||||
|
||||
existing_content = ""
|
||||
if os.path.exists(release_file):
|
||||
with open(release_file, "r") as f:
|
||||
existing_content = f.read()
|
||||
|
||||
updated_content = update_release_md(existing_content, new_section, tag)
|
||||
|
||||
with open(release_file, "w") as f:
|
||||
f.write(updated_content)
|
||||
|
||||
# Output the new section for display
|
||||
print(new_section)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user