mirror of
https://github.com/SideStore/SideStore.git
synced 2026-02-27 23:47:39 +01:00
CI: full rewrite - moved logic into ci.py and kept workflow scripts mostly dummy
This commit is contained in:
258
scripts/ci/generate_release_notes.py
Normal file
258
scripts/ci/generate_release_notes.py
Normal file
@@ -0,0 +1,258 @@
|
||||
#!/usr/bin/env python3
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
IGNORED_AUTHORS = []
|
||||
|
||||
TAG_MARKER = "###"
|
||||
HEADER_MARKER = "####"
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# helpers
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def run(cmd: str) -> str:
|
||||
return subprocess.check_output(cmd, shell=True, text=True).strip()
|
||||
|
||||
|
||||
def head_commit():
|
||||
return run("git rev-parse HEAD")
|
||||
|
||||
|
||||
def first_commit():
|
||||
return run("git rev-list --max-parents=0 HEAD").splitlines()[0]
|
||||
|
||||
|
||||
def repo_url():
|
||||
url = run("git config --get remote.origin.url")
|
||||
if url.startswith("git@"):
|
||||
url = url.replace("git@", "https://").replace(":", "/")
|
||||
return url.removesuffix(".git")
|
||||
|
||||
|
||||
def commit_messages(start, end="HEAD"):
|
||||
try:
|
||||
out = run(f"git log {start}..{end} --pretty=format:%s")
|
||||
return out.splitlines() if out else []
|
||||
except subprocess.CalledProcessError:
|
||||
fallback = run("git rev-parse HEAD~5")
|
||||
return run(f"git log {fallback}..{end} --pretty=format:%s").splitlines()
|
||||
|
||||
|
||||
def authors(range_expr, fmt="%an"):
|
||||
try:
|
||||
out = run(f"git log {range_expr} --pretty=format:{fmt}")
|
||||
result = {a.strip() for a in out.splitlines() if a.strip()}
|
||||
return result - set(IGNORED_AUTHORS)
|
||||
except subprocess.CalledProcessError:
|
||||
return set()
|
||||
|
||||
|
||||
def branch_base():
|
||||
try:
|
||||
default_ref = run("git rev-parse --abbrev-ref origin/HEAD")
|
||||
default_branch = default_ref.split("/")[-1]
|
||||
return run(f"git merge-base HEAD origin/{default_branch}")
|
||||
except Exception:
|
||||
return first_commit()
|
||||
|
||||
|
||||
def fmt_msg(msg):
|
||||
msg = msg.lstrip()
|
||||
if msg.startswith("-"):
|
||||
msg = msg[1:].strip()
|
||||
return f"- {msg}"
|
||||
|
||||
|
||||
def fmt_author(author):
|
||||
return author if author.startswith("@") else f"@{author.split()[0]}"
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# release note generation
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def generate_release_notes(last_successful, tag, branch):
|
||||
current = head_commit()
|
||||
messages = commit_messages(last_successful, current)
|
||||
|
||||
section = f"{TAG_MARKER} {tag}\n"
|
||||
section += f"{HEADER_MARKER} What's Changed\n"
|
||||
|
||||
if not messages or last_successful == current:
|
||||
section += "- Nothing...\n"
|
||||
else:
|
||||
for m in messages:
|
||||
section += f"{fmt_msg(m)}\n"
|
||||
|
||||
prev_authors = authors(branch)
|
||||
new_authors = authors(f"{last_successful}..{current}") - prev_authors
|
||||
|
||||
if new_authors:
|
||||
section += f"\n{HEADER_MARKER} New Contributors\n"
|
||||
for a in sorted(new_authors):
|
||||
section += f"- {fmt_author(a)} made their first contribution\n"
|
||||
|
||||
if messages and last_successful != current:
|
||||
url = repo_url()
|
||||
section += (
|
||||
f"\n{HEADER_MARKER} Full Changelog: "
|
||||
f"[{last_successful[:8]}...{current[:8]}]"
|
||||
f"({url}/compare/{last_successful}...{current})\n"
|
||||
)
|
||||
|
||||
return section
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# markdown update
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def update_release_md(existing, new_section, tag):
|
||||
if not existing:
|
||||
return new_section
|
||||
|
||||
tag_lower = tag.lower()
|
||||
is_special = tag_lower in {"alpha", "beta", "nightly"}
|
||||
|
||||
pattern = fr"(^{TAG_MARKER} .*$)"
|
||||
parts = re.split(pattern, existing, flags=re.MULTILINE)
|
||||
|
||||
processed = []
|
||||
special_seen = {"alpha": False, "beta": False, "nightly": False}
|
||||
last_special_idx = -1
|
||||
|
||||
i = 0
|
||||
while i < len(parts):
|
||||
if i % 2 == 1:
|
||||
header = parts[i]
|
||||
name = header[3:].strip().lower()
|
||||
|
||||
if name in special_seen:
|
||||
special_seen[name] = True
|
||||
last_special_idx = len(processed)
|
||||
|
||||
if name == tag_lower:
|
||||
i += 2
|
||||
continue
|
||||
|
||||
processed.append(parts[i])
|
||||
i += 1
|
||||
|
||||
insert_pos = 0
|
||||
if is_special:
|
||||
order = ["alpha", "beta", "nightly"]
|
||||
for t in order:
|
||||
if t == tag_lower:
|
||||
break
|
||||
if special_seen[t]:
|
||||
idx = processed.index(f"{TAG_MARKER} {t}")
|
||||
insert_pos = idx + 2
|
||||
elif last_special_idx >= 0:
|
||||
insert_pos = last_special_idx + 2
|
||||
|
||||
processed.insert(insert_pos, new_section)
|
||||
|
||||
result = ""
|
||||
for part in processed:
|
||||
if part.startswith(f"{TAG_MARKER} ") and not result.endswith("\n\n"):
|
||||
result = result.rstrip("\n") + "\n\n"
|
||||
result += part
|
||||
|
||||
return result.rstrip() + "\n"
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# retrieval
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def retrieve_tag(tag, file_path):
|
||||
if not file_path.exists():
|
||||
return ""
|
||||
|
||||
content = file_path.read_text()
|
||||
|
||||
match = re.search(
|
||||
fr"^{TAG_MARKER} {re.escape(tag)}$",
|
||||
content,
|
||||
re.MULTILINE | re.IGNORECASE,
|
||||
)
|
||||
|
||||
if not match:
|
||||
return ""
|
||||
|
||||
start = match.end()
|
||||
if start < len(content) and content[start] == "\n":
|
||||
start += 1
|
||||
|
||||
next_tag = re.search(fr"^{TAG_MARKER} ", content[start:], re.MULTILINE)
|
||||
end = start + next_tag.start() if next_tag else len(content)
|
||||
|
||||
return content[start:end].strip()
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# entrypoint
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def main():
|
||||
args = sys.argv[1:]
|
||||
|
||||
if not args:
|
||||
sys.exit(
|
||||
"Usage:\n"
|
||||
" generate_release_notes.py <last_successful> [tag] [branch] [--output-dir DIR]\n"
|
||||
" generate_release_notes.py --retrieve <tag> [--output-dir DIR]"
|
||||
)
|
||||
|
||||
# parse optional output dir
|
||||
output_dir = Path.cwd()
|
||||
|
||||
if "--output-dir" in args:
|
||||
idx = args.index("--output-dir")
|
||||
try:
|
||||
output_dir = Path(args[idx + 1]).resolve()
|
||||
except IndexError:
|
||||
sys.exit("Missing value for --output-dir")
|
||||
|
||||
del args[idx:idx + 2]
|
||||
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
release_file = output_dir / "release-notes.md"
|
||||
|
||||
# retrieval mode
|
||||
if args[0] == "--retrieve":
|
||||
if len(args) < 2:
|
||||
sys.exit("Missing tag after --retrieve")
|
||||
|
||||
print(retrieve_tag(args[1], release_file))
|
||||
return
|
||||
|
||||
# generation mode
|
||||
last_successful = args[0]
|
||||
tag = args[1] if len(args) > 1 else head_commit()
|
||||
branch = args[2] if len(args) > 2 else (
|
||||
os.environ.get("GITHUB_REF") or branch_base()
|
||||
)
|
||||
|
||||
new_section = generate_release_notes(last_successful, tag, branch)
|
||||
|
||||
existing = (
|
||||
release_file.read_text()
|
||||
if release_file.exists()
|
||||
else ""
|
||||
)
|
||||
|
||||
updated = update_release_md(existing, new_section, tag)
|
||||
|
||||
release_file.write_text(updated)
|
||||
|
||||
print(new_section)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
159
scripts/ci/generate_source_metadata.py
Normal file
159
scripts/ci/generate_source_metadata.py
Normal file
@@ -0,0 +1,159 @@
|
||||
#!/usr/bin/env python3
|
||||
import datetime
|
||||
import hashlib
|
||||
import json
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
import argparse
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# helpers
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def sh(cmd: str, cwd: Path) -> str:
|
||||
return subprocess.check_output(
|
||||
cmd, shell=True, cwd=cwd
|
||||
).decode().strip()
|
||||
|
||||
|
||||
def file_size(path: Path) -> int:
|
||||
if not path.exists():
|
||||
raise SystemExit(f"Missing file: {path}")
|
||||
return path.stat().st_size
|
||||
|
||||
|
||||
def sha256(path: Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with open(path, "rb") as f:
|
||||
while chunk := f.read(1024 * 1024):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# entry
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def main():
|
||||
p = argparse.ArgumentParser()
|
||||
|
||||
p.add_argument(
|
||||
"--repo-root",
|
||||
required=True,
|
||||
help="Repo used for git history + release notes",
|
||||
)
|
||||
|
||||
p.add_argument(
|
||||
"--ipa",
|
||||
required=True,
|
||||
help="Path to IPA file",
|
||||
)
|
||||
|
||||
p.add_argument(
|
||||
"--output-dir",
|
||||
required=True,
|
||||
help="Output Directory where source_metadata.json is written",
|
||||
)
|
||||
|
||||
p.add_argument(
|
||||
"--release-notes-dir",
|
||||
required=True,
|
||||
help="Output Directory where release-notes.md is generated/read",
|
||||
)
|
||||
|
||||
p.add_argument("--release-tag", required=True)
|
||||
p.add_argument("--version", required=True)
|
||||
p.add_argument("--marketing-version", required=True)
|
||||
p.add_argument("--short-commit", required=True)
|
||||
p.add_argument("--release-channel", required=True)
|
||||
p.add_argument("--bundle-id", required=True)
|
||||
p.add_argument("--is-beta", action="store_true")
|
||||
|
||||
args = p.parse_args()
|
||||
|
||||
repo_root = Path(args.repo_root).resolve()
|
||||
ipa_path = Path(args.ipa).resolve()
|
||||
out_dir = Path(args.output_dir).resolve()
|
||||
notes_dir = Path(args.release_notes_dir).resolve()
|
||||
|
||||
if not repo_root.is_dir():
|
||||
raise SystemExit(f"Invalid repo root: {repo_root}")
|
||||
|
||||
if not ipa_path.is_file():
|
||||
raise SystemExit(f"Invalid IPA path: {ipa_path}")
|
||||
|
||||
notes_dir.mkdir(parents=True, exist_ok=True)
|
||||
out_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
out_file = out_dir / "source_metadata.json"
|
||||
|
||||
# ------------------------------------------------------
|
||||
# ensure release notes exist
|
||||
# ------------------------------------------------------
|
||||
|
||||
print("Generating release notes…")
|
||||
|
||||
sh(
|
||||
(
|
||||
"python3 generate_release_notes.py "
|
||||
f"{args.short_commit} {args.release_tag} "
|
||||
f"--output-dir \"{notes_dir}\""
|
||||
),
|
||||
cwd=repo_root,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------
|
||||
# retrieve release notes
|
||||
# ------------------------------------------------------
|
||||
|
||||
notes = sh(
|
||||
(
|
||||
"python3 generate_release_notes.py "
|
||||
f"--retrieve {args.release_tag} "
|
||||
f"--output-dir \"{notes_dir}\""
|
||||
),
|
||||
cwd=repo_root,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------
|
||||
# compute metadata
|
||||
# ------------------------------------------------------
|
||||
|
||||
now = datetime.datetime.now(datetime.UTC)
|
||||
formatted = now.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
human = now.strftime("%c")
|
||||
|
||||
localized_description = f"""
|
||||
This is release for:
|
||||
- version: "{args.version}"
|
||||
- revision: "{args.short_commit}"
|
||||
- timestamp: "{human}"
|
||||
|
||||
Release Notes:
|
||||
{notes}
|
||||
""".strip()
|
||||
|
||||
metadata = {
|
||||
"is_beta": bool(args.is_beta),
|
||||
"bundle_identifier": args.bundle_id,
|
||||
"version_ipa": args.marketing_version,
|
||||
"version_date": formatted,
|
||||
"release_channel": args.release_channel.lower(),
|
||||
"size": file_size(ipa_path),
|
||||
"sha256": sha256(ipa_path),
|
||||
"download_url": (
|
||||
"https://github.com/SideStore/SideStore/releases/download/"
|
||||
f"{args.release_tag}/SideStore.ipa"
|
||||
),
|
||||
"localized_description": localized_description,
|
||||
}
|
||||
|
||||
with open(out_file, "w", encoding="utf-8") as f:
|
||||
json.dump(metadata, f, indent=2, ensure_ascii=False)
|
||||
|
||||
print(f"Wrote {out_file}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
180
scripts/ci/update_source_metadata.py
Executable file
180
scripts/ci/update_source_metadata.py
Executable file
@@ -0,0 +1,180 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
'''
|
||||
metadata.json template
|
||||
|
||||
{
|
||||
"version_ipa": "0.0.0",
|
||||
"version_date": "2000-12-18T00:00:00Z",
|
||||
"is_beta": true,
|
||||
"release_channel": "alpha",
|
||||
"size": 0,
|
||||
"sha256": "",
|
||||
"localized_description": "Invalid Update",
|
||||
"download_url": "https://github.com/SideStore/SideStore/releases/download/0.0.0/SideStore.ipa",
|
||||
"bundle_identifier": "com.SideStore.SideStore"
|
||||
}
|
||||
'''
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# args
|
||||
# ----------------------------------------------------------
|
||||
|
||||
if len(sys.argv) < 3:
|
||||
print("Usage: python3 update_apps.py <metadata.json> <source.json>")
|
||||
sys.exit(1)
|
||||
|
||||
metadata_file = Path(sys.argv[1])
|
||||
source_file = Path(sys.argv[2])
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# load metadata
|
||||
# ----------------------------------------------------------
|
||||
|
||||
if not metadata_file.exists():
|
||||
print(f"Missing metadata file: {metadata_file}")
|
||||
sys.exit(1)
|
||||
|
||||
with open(metadata_file, "r", encoding="utf-8") as f:
|
||||
meta = json.load(f)
|
||||
|
||||
VERSION_IPA = meta.get("version_ipa")
|
||||
VERSION_DATE = meta.get("version_date")
|
||||
IS_BETA = meta.get("is_beta")
|
||||
RELEASE_CHANNEL = meta.get("release_channel")
|
||||
SIZE = meta.get("size")
|
||||
SHA256 = meta.get("sha256")
|
||||
LOCALIZED_DESCRIPTION = meta.get("localized_description")
|
||||
DOWNLOAD_URL = meta.get("download_url")
|
||||
BUNDLE_IDENTIFIER = meta.get("bundle_identifier")
|
||||
|
||||
print(" ====> Required parameter list <====")
|
||||
print("Bundle Identifier:", BUNDLE_IDENTIFIER)
|
||||
print("Version:", VERSION_IPA)
|
||||
print("Version Date:", VERSION_DATE)
|
||||
print("IsBeta:", IS_BETA)
|
||||
print("ReleaseChannel:", RELEASE_CHANNEL)
|
||||
print("Size:", SIZE)
|
||||
print("Sha256:", SHA256)
|
||||
print("Localized Description:", LOCALIZED_DESCRIPTION)
|
||||
print("Download URL:", DOWNLOAD_URL)
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# validation
|
||||
# ----------------------------------------------------------
|
||||
|
||||
if (
|
||||
not BUNDLE_IDENTIFIER
|
||||
or not VERSION_IPA
|
||||
or not VERSION_DATE
|
||||
or not RELEASE_CHANNEL
|
||||
or not SIZE
|
||||
or not SHA256
|
||||
or not LOCALIZED_DESCRIPTION
|
||||
or not DOWNLOAD_URL
|
||||
):
|
||||
print("One or more required metadata fields missing")
|
||||
sys.exit(1)
|
||||
|
||||
SIZE = int(SIZE)
|
||||
RELEASE_CHANNEL = RELEASE_CHANNEL.lower()
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# load or create source.json
|
||||
# ----------------------------------------------------------
|
||||
|
||||
if source_file.exists():
|
||||
with open(source_file, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
else:
|
||||
print("source.json missing — creating minimal structure")
|
||||
data = {
|
||||
"version": 2,
|
||||
"apps": []
|
||||
}
|
||||
|
||||
if int(data.get("version", 1)) < 2:
|
||||
print("Only v2 and above are supported")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# ensure app entry exists
|
||||
# ----------------------------------------------------------
|
||||
|
||||
apps = data.setdefault("apps", [])
|
||||
|
||||
app = next(
|
||||
(a for a in apps if a.get("bundleIdentifier") == BUNDLE_IDENTIFIER),
|
||||
None
|
||||
)
|
||||
|
||||
if app is None:
|
||||
print("App entry missing — creating new app entry")
|
||||
app = {
|
||||
"bundleIdentifier": BUNDLE_IDENTIFIER,
|
||||
"releaseChannels": []
|
||||
}
|
||||
apps.append(app)
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# update logic
|
||||
# ----------------------------------------------------------
|
||||
|
||||
if RELEASE_CHANNEL == "stable":
|
||||
app.update({
|
||||
"version": VERSION_IPA,
|
||||
"versionDate": VERSION_DATE,
|
||||
"size": SIZE,
|
||||
"sha256": SHA256,
|
||||
"localizedDescription": LOCALIZED_DESCRIPTION,
|
||||
"downloadURL": DOWNLOAD_URL,
|
||||
})
|
||||
|
||||
channels = app.setdefault("releaseChannels", [])
|
||||
|
||||
new_version = {
|
||||
"version": VERSION_IPA,
|
||||
"date": VERSION_DATE,
|
||||
"localizedDescription": LOCALIZED_DESCRIPTION,
|
||||
"downloadURL": DOWNLOAD_URL,
|
||||
"size": SIZE,
|
||||
"sha256": SHA256,
|
||||
}
|
||||
|
||||
tracks = [t for t in channels if t.get("track") == RELEASE_CHANNEL]
|
||||
|
||||
if len(tracks) > 1:
|
||||
print(f"Multiple tracks named {RELEASE_CHANNEL}")
|
||||
sys.exit(1)
|
||||
|
||||
if not tracks:
|
||||
channels.insert(0, {
|
||||
"track": RELEASE_CHANNEL,
|
||||
"releases": [new_version],
|
||||
})
|
||||
else:
|
||||
tracks[0]["releases"][0] = new_version
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# save
|
||||
# ----------------------------------------------------------
|
||||
|
||||
print("\nUpdated Sources File:\n")
|
||||
print(json.dumps(data, indent=2, ensure_ascii=False))
|
||||
|
||||
with open(source_file, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||
|
||||
print("JSON successfully updated.")
|
||||
238
scripts/ci/workflow.py
Normal file
238
scripts/ci/workflow.py
Normal file
@@ -0,0 +1,238 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
|
||||
# REPO ROOT relative to script dir
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# helpers
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def run(cmd, check=True):
|
||||
print(f"$ {cmd}", flush=True)
|
||||
subprocess.run(cmd, shell=True, cwd=ROOT, check=check)
|
||||
print("", flush=True)
|
||||
|
||||
|
||||
def getenv(name, default=""):
|
||||
return os.environ.get(name, default)
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# SHARED
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def short_commit():
|
||||
sha = subprocess.check_output(
|
||||
"git rev-parse --short HEAD",
|
||||
shell=True,
|
||||
cwd=ROOT
|
||||
).decode().strip()
|
||||
return sha
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# VERSION BUMP
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def bump_beta():
|
||||
date = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d")
|
||||
release_channel = getenv("RELEASE_CHANNEL", "beta")
|
||||
build_file = ROOT / "build_number.txt"
|
||||
|
||||
short = subprocess.check_output(
|
||||
"git rev-parse --short HEAD",
|
||||
shell=True,
|
||||
cwd=ROOT
|
||||
).decode().strip()
|
||||
|
||||
def write(num):
|
||||
run(
|
||||
f"""sed -e "/MARKETING_VERSION = .*/s/$/-{release_channel}.{date}.{num}+{short}/" -i '' {ROOT}/Build.xcconfig"""
|
||||
)
|
||||
build_file.write_text(f"{date},{num}")
|
||||
|
||||
if not build_file.exists():
|
||||
write(1)
|
||||
return
|
||||
|
||||
last = build_file.read_text().strip().split(",")[1]
|
||||
write(int(last) + 1)
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# VERSION EXTRACTION
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def extract_version():
|
||||
v = subprocess.check_output(
|
||||
"grep MARKETING_VERSION Build.xcconfig | sed -e 's/MARKETING_VERSION = //g'",
|
||||
shell=True,
|
||||
cwd=ROOT
|
||||
).decode().strip()
|
||||
return v
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# CLEAN
|
||||
# ----------------------------------------------------------
|
||||
def clean():
|
||||
run("make clean")
|
||||
|
||||
def clean_derived_data():
|
||||
run("rm -rf ~/Library/Developer/Xcode/DerivedData/*", check=False)
|
||||
|
||||
def clean_spm_cache():
|
||||
run("rm -rf ~/Library/Caches/org.swift.swiftpm/*", check=False)
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# BUILD
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def build():
|
||||
run("make clean")
|
||||
run("rm -rf ~/Library/Developer/Xcode/DerivedData/*", check=False)
|
||||
run("mkdir -p build/logs")
|
||||
|
||||
run(
|
||||
"set -o pipefail && "
|
||||
"NSUnbufferedIO=YES make -B build "
|
||||
"2>&1 | tee -a build/logs/build.log | xcbeautify --renderer github-actions"
|
||||
)
|
||||
|
||||
run("make fakesign | tee -a build/logs/build.log")
|
||||
run("make ipa | tee -a build/logs/build.log")
|
||||
|
||||
run("zip -r -9 ./SideStore.dSYMs.zip ./SideStore.xcarchive/dSYMs")
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# TESTS BUILD
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def tests_build():
|
||||
run("mkdir -p build/logs")
|
||||
run(
|
||||
"NSUnbufferedIO=YES make -B build-tests "
|
||||
"2>&1 | tee -a build/logs/tests-build.log | xcbeautify --renderer github-actions"
|
||||
)
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# TESTS RUN
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def tests_run():
|
||||
run("mkdir -p build/logs")
|
||||
run("nohup make -B boot-sim-async </dev/null >> build/logs/tests-run.log 2>&1 &")
|
||||
|
||||
run("make -B sim-boot-check | tee -a build/logs/tests-run.log")
|
||||
|
||||
run("make run-tests 2>&1 | tee -a build/logs/tests-run.log")
|
||||
|
||||
run("zip -r -9 ./test-results.zip ./build/tests")
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# LOG ENCRYPTION
|
||||
# ----------------------------------------------------------
|
||||
|
||||
def encrypt_logs(name):
|
||||
pwd = getenv("BUILD_LOG_ZIP_PASSWORD", "12345")
|
||||
run(
|
||||
f'cd build/logs && zip -e -P "{pwd}" ../../{name}.zip *'
|
||||
)
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# RELEASE NOTES
|
||||
# ----------------------------------------------------------
|
||||
def release_notes(tag):
|
||||
run(f"python3 generate_release_notes.py {tag}")
|
||||
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# PUBLISH SOURCE.JSON
|
||||
# ----------------------------------------------------------
|
||||
def publish_apps(release_tag, short_commit):
|
||||
repo = ROOT / "Dependencies/apps-v2.json"
|
||||
|
||||
if not repo.exists():
|
||||
raise SystemExit("Dependencies/apps-v2.json repo missing")
|
||||
|
||||
# generate metadata + release notes
|
||||
run(
|
||||
f"python3 generate_source_metadata.py "
|
||||
f"--release-tag {release_tag} "
|
||||
f"--short-commit {short_commit}"
|
||||
)
|
||||
|
||||
# update source.json using generated metadata
|
||||
run("pushd Dependencies/apps-v2.json", check=False)
|
||||
|
||||
run("git config user.name 'GitHub Actions'", check=False)
|
||||
run("git config user.email 'github-actions@github.com'", check=False)
|
||||
|
||||
run("python3 ../../scripts/update_source_metadata.py './_includes/source.json'")
|
||||
|
||||
run("git add --verbose ./_includes/source.json", check=False)
|
||||
run(f"git commit -m ' - updated for {short_commit} deployment' || true",check=False)
|
||||
run("git push --verbose", check=False)
|
||||
|
||||
run("popd", check=False)
|
||||
|
||||
# ----------------------------------------------------------
|
||||
# ENTRYPOINT
|
||||
# ----------------------------------------------------------
|
||||
COMMANDS = {
|
||||
"commid-id" : (short_commit, 0, ""),
|
||||
"bump-beta" : (bump_beta, 0, ""),
|
||||
"version" : (extract_version, 0, ""),
|
||||
"clean" : (clean, 0, ""),
|
||||
"clean-derived-data" : (clean_derived_data, 0, ""),
|
||||
"clean-spm-cache" : (clean_spm_cache, 0, ""),
|
||||
"build" : (build, 0, ""),
|
||||
"tests-build" : (tests_build, 0, ""),
|
||||
"tests-run" : (tests_run, 0, ""),
|
||||
"encrypt-build" : (lambda: encrypt_logs("encrypted-build-logs"), 0, ""),
|
||||
"encrypt-tests-build": (lambda: encrypt_logs("encrypted-tests-build-logs"), 0, ""),
|
||||
"encrypt-tests-run" : (lambda: encrypt_logs("encrypted-tests-run-logs"), 0, ""),
|
||||
"release-notes" : (release_notes, 1, "<tag>"),
|
||||
"deploy" : (publish_apps, 2, "<release_tag> <short_commit>"),
|
||||
}
|
||||
|
||||
def main():
|
||||
def usage():
|
||||
lines = ["Available commands:"]
|
||||
for name, (_, argc, arg_usage) in COMMANDS.items():
|
||||
suffix = f" {arg_usage}" if arg_usage else ""
|
||||
lines.append(f" - {name}{suffix}")
|
||||
return "\n".join(lines)
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
raise SystemExit(usage())
|
||||
|
||||
cmd = sys.argv[1]
|
||||
|
||||
if cmd not in COMMANDS:
|
||||
raise SystemExit(
|
||||
f"Unknown command '{cmd}'.\n\n{usage()}"
|
||||
)
|
||||
|
||||
func, argc, arg_usage = COMMANDS[cmd]
|
||||
|
||||
if len(sys.argv) - 2 < argc:
|
||||
suffix = f" {arg_usage}" if arg_usage else ""
|
||||
raise SystemExit(f"Usage: workflow.py {cmd}{suffix}")
|
||||
|
||||
args = sys.argv[2:2 + argc]
|
||||
func(*args) if argc else func()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user