mirror of
https://github.com/SideStore/SideStore.git
synced 2026-02-08 22:33:26 +01:00
- CI: replaced irgaly/xcode-cache with gh-actions-cache
- CI: serialization bug fix - reverted concurrency lock to workflow level instead of job level which was causing issues due to incorrect cache at expected jobs - CI: Boot simulator in async instead of doing in a blocking step - CI: integrate publishing release notes - CI: moved posting to beta-build-num repo into build-job instead of deploy coz we want to roll the beta-build-num for each success of build-job even if the workflow failed.
This commit is contained in:
63
.github/maintenance/cache.py
vendored
Normal file
63
.github/maintenance/cache.py
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
import requests
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Your GitHub Personal Access Token
|
||||
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
|
||||
|
||||
# Repository details
|
||||
REPO_OWNER = "SideStore"
|
||||
REPO_NAME = "SideStore"
|
||||
|
||||
|
||||
API_URL = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/actions/caches"
|
||||
|
||||
# Common headers for GitHub API calls
|
||||
HEADERS = {
|
||||
"Accept": "application/vnd.github+json",
|
||||
"Authorization": f"Bearer {GITHUB_TOKEN}"
|
||||
}
|
||||
|
||||
def list_caches():
|
||||
response = requests.get(API_URL, headers=HEADERS)
|
||||
if response.status_code != 200:
|
||||
print(f"Failed to list caches. HTTP {response.status_code}")
|
||||
print("Response:", response.text)
|
||||
sys.exit(1)
|
||||
data = response.json()
|
||||
return data.get("actions_caches", [])
|
||||
|
||||
def delete_cache(cache_id):
|
||||
delete_url = f"{API_URL}/{cache_id}"
|
||||
response = requests.delete(delete_url, headers=HEADERS)
|
||||
return response.status_code
|
||||
|
||||
def main():
|
||||
caches = list_caches()
|
||||
if not caches:
|
||||
print("No caches found.")
|
||||
return
|
||||
|
||||
print("Found caches:")
|
||||
for cache in caches:
|
||||
print(f"ID: {cache.get('id')}, Key: {cache.get('key')}")
|
||||
|
||||
print("\nDeleting caches...")
|
||||
for cache in caches:
|
||||
cache_id = cache.get("id")
|
||||
status = delete_cache(cache_id)
|
||||
if status == 204:
|
||||
print(f"Successfully deleted cache with ID: {cache_id}")
|
||||
else:
|
||||
print(f"Failed to delete cache with ID: {cache_id}. HTTP status code: {status}")
|
||||
|
||||
print("All caches processed.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
### How to use
|
||||
'''
|
||||
just export the GITHUB_TOKEN and then run this script via `python3 cache.py' to delete the caches
|
||||
'''
|
||||
1
.github/workflows/alpha.yml
vendored
1
.github/workflows/alpha.yml
vendored
@@ -11,6 +11,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
Reuseable-build:
|
||||
# uses: ./.github/workflows/reuseable-workflows/reusable-build-workflow.yml
|
||||
uses: ./.github/workflows/reusable-build-workflow.yml
|
||||
with:
|
||||
# bundle_id: "com.SideStore.SideStore.Alpha"
|
||||
|
||||
7
.github/workflows/nightly.yml
vendored
7
.github/workflows/nightly.yml
vendored
@@ -1,8 +1,8 @@
|
||||
name: Nightly SideStore Build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
# push:
|
||||
# branches:
|
||||
# - develop
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # Runs every night at midnight UTC
|
||||
workflow_dispatch: # Allows manual trigger
|
||||
@@ -64,6 +64,7 @@ jobs:
|
||||
(github.event_name == 'push' ||
|
||||
(github.event_name == 'schedule' && needs.check-changes.result == 'success' && needs.check-changes.outputs.has_changes == 'true'))
|
||||
needs: check-changes
|
||||
# uses: ./.github/workflows/reuseable-workflows/reusable-build-workflow.yml
|
||||
uses: ./.github/workflows/reusable-build-workflow.yml
|
||||
with:
|
||||
# bundle_id: "com.SideStore.SideStore.Nightly"
|
||||
|
||||
300
.github/workflows/reusable-build-workflow.yml
vendored
300
.github/workflows/reusable-build-workflow.yml
vendored
@@ -47,13 +47,15 @@ on:
|
||||
jobs:
|
||||
serialize:
|
||||
name: Wait for other jobs
|
||||
concurrency:
|
||||
group: build-number-increment # serialize for build num cache access
|
||||
# since build cache, test-build cache, test-run cache are involved, out of order exec if serialization is on individual jobs will wreak all sorts of havoc
|
||||
# so we serialize on the entire workflow
|
||||
concurrency:
|
||||
group: serialize-workflow
|
||||
strategy:
|
||||
fail-fast: false
|
||||
runs-on: 'macos-15'
|
||||
steps:
|
||||
- run: echo "No other contending jobs are running now...Build is ready to start"
|
||||
- run: echo "No other contending jobs are running now..."
|
||||
- name: Set short commit hash
|
||||
id: commit-id
|
||||
run: |
|
||||
@@ -72,7 +74,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- os: 'macos-15'
|
||||
version: '16.1'
|
||||
version: '16.2'
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
outputs:
|
||||
@@ -88,6 +90,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install dependencies - ldid & xcbeautify
|
||||
run: |
|
||||
@@ -174,14 +177,32 @@ jobs:
|
||||
with:
|
||||
xcode-version: ${{ matrix.version }}
|
||||
|
||||
- name: (Build) Cache Build
|
||||
uses: irgaly/xcode-cache@v1
|
||||
- name: (Build) Restore Xcode & SwiftPM Cache (Exact match)
|
||||
id: xcode-cache-restore
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
key: xcode-cache-deriveddata-build-${{ github.sha }}
|
||||
restore-keys: xcode-cache-deriveddata-build-
|
||||
swiftpm-cache-key: xcode-cache-sourcedata-build-${{ github.sha }}
|
||||
swiftpm-cache-restore-keys: |
|
||||
xcode-cache-sourcedata-build-
|
||||
path: |
|
||||
~/Library/Developer/Xcode/DerivedData
|
||||
~/Library/Caches/org.swift.swiftpm
|
||||
key: xcode-cache-build-${{ github.ref_name }}-${{ github.sha }}
|
||||
|
||||
- name: (Build) Restore Xcode & SwiftPM Cache (Last Available)
|
||||
id: xcode-cache-restore-recent
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
path: |
|
||||
~/Library/Developer/Xcode/DerivedData
|
||||
~/Library/Caches/org.swift.swiftpm
|
||||
key: xcode-cache-build-${{ github.ref_name }}-
|
||||
|
||||
# - name: (Build) Cache Build
|
||||
# uses: irgaly/xcode-cache@v1.8.1
|
||||
# with:
|
||||
# key: xcode-cache-deriveddata-build-${{ github.ref_name }}-${{ github.sha }}
|
||||
# restore-keys: xcode-cache-deriveddata-build-${{ github.ref_name }}-
|
||||
# swiftpm-cache-key: xcode-cache-sourcedata-build-${{ github.ref_name }}-${{ github.sha }}
|
||||
# swiftpm-cache-restore-keys: |
|
||||
# xcode-cache-sourcedata-build-${{ github.ref_name }}-
|
||||
|
||||
- name: (Build) Restore Pods from Cache (Exact match)
|
||||
id: pods-restore
|
||||
@@ -191,7 +212,7 @@ jobs:
|
||||
./Podfile.lock
|
||||
./Pods/
|
||||
./AltStore.xcworkspace/
|
||||
key: pods-cache-build-${{ hashFiles('Podfile') }}
|
||||
key: pods-cache-build-${{ github.ref_name }}-${{ hashFiles('Podfile') }}
|
||||
# restore-keys: | # commented out to strictly check cache for this particular podfile
|
||||
# pods-cache-
|
||||
|
||||
@@ -204,7 +225,7 @@ jobs:
|
||||
./Podfile.lock
|
||||
./Pods/
|
||||
./AltStore.xcworkspace/
|
||||
key: pods-cache-build-
|
||||
key: pods-cache-build-${{ github.ref_name }}-
|
||||
|
||||
|
||||
- name: (Build) Install CocoaPods
|
||||
@@ -219,7 +240,7 @@ jobs:
|
||||
./Podfile.lock
|
||||
./Pods/
|
||||
./AltStore.xcworkspace/
|
||||
key: pods-cache-build-${{ hashFiles('Podfile') }}
|
||||
key: pods-cache-build-${{ github.ref_name }}-${{ hashFiles('Podfile') }}
|
||||
|
||||
- name: (Build) Clean previous build artifacts
|
||||
# using 'tee' to intercept stdout and log for detailed build-log
|
||||
@@ -266,6 +287,16 @@ jobs:
|
||||
- name: Convert to IPA
|
||||
run: make ipa | tee -a build/logs/build.log
|
||||
|
||||
- name: (Build) Save Xcode & SwiftPM Cache
|
||||
id: cache-save
|
||||
if: ${{ steps.xcode-cache-restore.outputs.cache-hit != 'true' }}
|
||||
uses: actions/cache/save@v3
|
||||
with:
|
||||
path: |
|
||||
~/Library/Developer/Xcode/DerivedData
|
||||
~/Library/Caches/org.swift.swiftpm
|
||||
key: xcode-cache-build-${{ github.ref_name }}-${{ github.sha }}
|
||||
|
||||
- name: (Build) List Files and Build artifacts
|
||||
run: |
|
||||
echo ">>>>>>>>> Workdir <<<<<<<<<<"
|
||||
@@ -284,6 +315,10 @@ jobs:
|
||||
find SideStore.xcarchive -maxdepth 3 -exec ls -ld {} + || true # List contents if directory exists
|
||||
echo ""
|
||||
|
||||
echo ">>>>>>>>> Xcode-Derived-Data <<<<<<<<<<"
|
||||
ls -la ~/Library/Developer/Xcode/DerivedData || true # List contents if directory exists
|
||||
echo ""
|
||||
|
||||
- name: Encrypt build-logs for upload
|
||||
id: encrypt-build-log
|
||||
run: |
|
||||
@@ -322,19 +357,69 @@ jobs:
|
||||
name: SideStore-${{ steps.version.outputs.version }}-dSYMs.zip
|
||||
path: SideStore.dSYMs.zip
|
||||
|
||||
- name: Zip beta-beta-build-num & update_apps.py
|
||||
run: |
|
||||
zip -r -9 ./beta-build-num.zip ./SideStore/beta-build-num update_apps.py
|
||||
|
||||
- name: Upload beta-build-num artifact
|
||||
- name: Keep rolling the build numbers for each successful build
|
||||
if: ${{ inputs.is_beta }}
|
||||
run: |
|
||||
pushd SideStore/beta-build-num/
|
||||
|
||||
echo "Configure Git user (committer details)"
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "github-actions@github.com"
|
||||
|
||||
echo "Adding files to commit"
|
||||
git add --verbose build_number.txt
|
||||
git commit -m " - updated for ${{ inputs.release_tag }} - ${{ needs.serialize.outputs.short-commit }} deployment" || echo "No changes to commit"
|
||||
|
||||
echo "Pushing to remote repo"
|
||||
git push --verbose
|
||||
popd
|
||||
|
||||
- name: Get last successful commit
|
||||
id: get_last_commit
|
||||
run: |
|
||||
# Try to get the last successful workflow run commit
|
||||
LAST_SUCCESS_SHA=$(gh run list --branch "${{ github.ref_name }}" --status success --json headSha --jq '.[0].headSha')
|
||||
echo "LAST_SUCCESS_SHA=$LAST_SUCCESS_SHA" >> $GITHUB_OUTPUT
|
||||
echo "LAST_SUCCESS_SHA=$LAST_SUCCESS_SHA" >> $GITHUB_ENV
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create release notes
|
||||
run: |
|
||||
LAST_SUCCESS_SHA=${{ steps.get_last_commit.outputs.LAST_SUCCESS_SHA}}
|
||||
echo "Last successful commit SHA: $LAST_SUCCESS_SHA"
|
||||
|
||||
FROM_COMMIT=$LAST_SUCCESS_SHA
|
||||
# Check if we got a valid SHA
|
||||
if [ -z "$LAST_SUCCESS_SHA" ] || [ "$LAST_SUCCESS_SHA" = "null" ]; then
|
||||
echo "No successful run found, using initial commit of branch"
|
||||
# Get the first commit of the branch (initial commit)
|
||||
FROM_COMMIT=$(git rev-list --max-parents=0 HEAD)
|
||||
fi
|
||||
|
||||
python3 update_release_notes.py $FROM_COMMIT ${{ inputs.release_tag }} ${{ github.ref_name }}
|
||||
# cat release-notes.md
|
||||
|
||||
- name: Upload release-notes.md
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: beta-build-num-${{ steps.version.outputs.version }}.zip
|
||||
path: beta-build-num.zip
|
||||
name: release-notes-${{ needs.serialize.outputs.short-commit }}.md
|
||||
path: release-notes.md
|
||||
|
||||
- name: Upload update_release_notes.py
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: update_release_notes-${{ needs.serialize.outputs.short-commit }}.py
|
||||
path: update_release_notes.py
|
||||
|
||||
- name: Upload update_apps.py
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: update_apps-${{ needs.serialize.outputs.short-commit }}.py
|
||||
path: update_apps.py
|
||||
|
||||
tests-build:
|
||||
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_BUILD == '1' }}
|
||||
name: Tests-Build SideStore - ${{ inputs.release_tag }}
|
||||
needs: serialize
|
||||
strategy:
|
||||
@@ -342,7 +427,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- os: 'macos-15'
|
||||
version: '16.1'
|
||||
version: '16.2'
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
@@ -358,16 +443,36 @@ jobs:
|
||||
- name: Setup Xcode
|
||||
uses: maxim-lobanov/setup-xcode@v1.6.0
|
||||
with:
|
||||
xcode-version: '16.1'
|
||||
xcode-version: '16.2'
|
||||
|
||||
- name: (Tests-Build) Cache Build
|
||||
uses: irgaly/xcode-cache@v1
|
||||
# - name: (Tests-Build) Cache Build
|
||||
# uses: irgaly/xcode-cache@v1.8.1
|
||||
# with:
|
||||
# key: xcode-cache-deriveddata-test-${{ github.ref_name }}-${{ github.sha }}
|
||||
# # tests shouldn't restore cache unless it is same build
|
||||
# # restore-keys: xcode-cache-deriveddata-test-${{ github.ref_name }}-
|
||||
# swiftpm-cache-key: xcode-cache-sourcedata-test-${{ github.ref_name }}-${{ github.sha }}
|
||||
# swiftpm-cache-restore-keys: |
|
||||
# xcode-cache-sourcedata-test-${{ github.ref_name }}-
|
||||
# delete-used-deriveddata-cache: true
|
||||
|
||||
- name: (Tests-Build) Restore Xcode & SwiftPM Cache (Exact match)
|
||||
id: xcode-cache-restore
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
key: xcode-cache-deriveddata-test-${{ github.sha }}
|
||||
restore-keys: xcode-cache-deriveddata-test-
|
||||
swiftpm-cache-key: xcode-cache-sourcedata-test-${{ github.sha }}
|
||||
swiftpm-cache-restore-keys: |
|
||||
xcode-cache-sourcedata-test-
|
||||
path: |
|
||||
~/Library/Developer/Xcode/DerivedData
|
||||
~/Library/Caches/org.swift.swiftpm
|
||||
key: xcode-cache-tests-${{ github.ref_name }}-${{ github.sha }}
|
||||
|
||||
- name: (Tests-Build) Restore Xcode & SwiftPM Cache (Last Available)
|
||||
id: xcode-cache-restore-recent
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
path: |
|
||||
~/Library/Developer/Xcode/DerivedData
|
||||
~/Library/Caches/org.swift.swiftpm
|
||||
key: xcode-cache-tests-${{ github.ref_name }}-
|
||||
|
||||
- name: (Tests-Build) Restore Pods from Cache (Exact match)
|
||||
id: pods-restore
|
||||
@@ -377,7 +482,7 @@ jobs:
|
||||
./Podfile.lock
|
||||
./Pods/
|
||||
./AltStore.xcworkspace/
|
||||
key: pods-cache-test-${{ hashFiles('Podfile') }}
|
||||
key: pods-cache-test-${{ github.ref_name }}-${{ hashFiles('Podfile') }}
|
||||
|
||||
- name: (Tests-Build) Restore Pods from Cache (Last Available)
|
||||
if: ${{ steps.pods-restore.outputs.cache-hit != 'true' }}
|
||||
@@ -388,7 +493,7 @@ jobs:
|
||||
./Podfile.lock
|
||||
./Pods/
|
||||
./AltStore.xcworkspace/
|
||||
key: pods-cache-test-
|
||||
key: pods-cache-test-${{ github.ref_name }}-
|
||||
|
||||
- name: (Tests-Build) Install CocoaPods
|
||||
run: pod install
|
||||
@@ -401,8 +506,15 @@ jobs:
|
||||
./Podfile.lock
|
||||
./Pods/
|
||||
./AltStore.xcworkspace/
|
||||
key: pods-cache-test-${{ hashFiles('Podfile') }}
|
||||
key: pods-cache-test-${{ github.ref_name }}-${{ hashFiles('Podfile') }}
|
||||
|
||||
- name: Clean Derived Data (if required)
|
||||
if: ${{ vars.PERFORM_CLEAN_TESTS_BUILD == '1' }}
|
||||
run: |
|
||||
rm -rf ~/Library/Developer/Xcode/DerivedData/
|
||||
make clean
|
||||
xcodebuild clean
|
||||
|
||||
- name: (Tests-Build) Clean previous build artifacts
|
||||
run: |
|
||||
make clean
|
||||
@@ -435,6 +547,16 @@ jobs:
|
||||
run: |
|
||||
NSUnbufferedIO=YES make -B build-tests 2>&1 | tee -a build/logs/tests-build.log | xcbeautify --renderer github-actions && exit ${PIPESTATUS[0]}
|
||||
|
||||
- name: (Tests-Build) Save Xcode & SwiftPM Cache
|
||||
id: cache-save
|
||||
if: ${{ steps.xcode-cache-restore.outputs.cache-hit != 'true' }}
|
||||
uses: actions/cache/save@v3
|
||||
with:
|
||||
path: |
|
||||
~/Library/Developer/Xcode/DerivedData
|
||||
~/Library/Caches/org.swift.swiftpm
|
||||
key: xcode-cache-tests-${{ github.ref_name }}-${{ github.sha }}
|
||||
|
||||
- name: (Tests-Build) List Files and Build artifacts
|
||||
if: always()
|
||||
run: |
|
||||
@@ -446,6 +568,16 @@ jobs:
|
||||
find build -maxdepth 3 -exec ls -ld {} + || true # List contents if directory exists
|
||||
echo ""
|
||||
|
||||
echo ">>>>>>>>> Xcode-Derived-Data <<<<<<<<<<"
|
||||
find ~/Library/Developer/Xcode/DerivedData -maxdepth 8 -exec ls -ld {} + | grep "Build/Products" >> tests-build-deriveddata.txt || true
|
||||
echo ""
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: tests-build-deriveddata-${{ needs.serialize.outputs.short-commit }}.txt
|
||||
path: tests-build-deriveddata.txt
|
||||
|
||||
- name: Encrypt tests-build-logs for upload
|
||||
id: encrypt-test-log
|
||||
if: always()
|
||||
@@ -471,6 +603,7 @@ jobs:
|
||||
path: encrypted-tests-build-logs.zip
|
||||
|
||||
tests-run:
|
||||
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_RUN == '1' }}
|
||||
name: Tests-Run SideStore - ${{ inputs.release_tag }}
|
||||
needs: [serialize, tests-build]
|
||||
strategy:
|
||||
@@ -478,7 +611,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- os: 'macos-15'
|
||||
version: '16.1'
|
||||
version: '16.2'
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
@@ -487,25 +620,31 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Boot Simulator for testing
|
||||
- name: Boot Simulator async(nohup) for testing
|
||||
run: |
|
||||
mkdir -p build/logs
|
||||
make -B boot-sim-async | tee -a build/logs/tests-run.log
|
||||
exit ${PIPESTATUS[0]}
|
||||
nohup make -B boot-sim-async </dev/null >> build/logs/tests-run.log 2>&1 &
|
||||
|
||||
- name: Setup Xcode
|
||||
uses: maxim-lobanov/setup-xcode@v1.6.0
|
||||
with:
|
||||
xcode-version: '16.1'
|
||||
xcode-version: '16.2'
|
||||
|
||||
- name: (Tests-Run) Cache Build
|
||||
uses: irgaly/xcode-cache@v1
|
||||
# - name: (Tests-Run) Cache Build
|
||||
# uses: irgaly/xcode-cache@v1.8.1
|
||||
# with:
|
||||
# # This comes from
|
||||
# key: xcode-cache-deriveddata-test-${{ github.ref_name }}-${{ github.sha }}
|
||||
# swiftpm-cache-key: xcode-cache-sourcedata-test-${{ github.ref_name }}-${{ github.sha }}
|
||||
|
||||
- name: (Tests-Build) Restore Xcode & SwiftPM Cache (Exact match) [from tests-build job]
|
||||
id: xcode-cache-restore
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
key: xcode-cache-deriveddata-test-${{ github.sha }}
|
||||
restore-keys: xcode-cache-deriveddata-test-
|
||||
swiftpm-cache-key: xcode-cache-sourcedata-test-${{ github.sha }}
|
||||
swiftpm-cache-restore-keys: |
|
||||
xcode-cache-sourcedata-test-
|
||||
path: |
|
||||
~/Library/Developer/Xcode/DerivedData
|
||||
~/Library/Caches/org.swift.swiftpm
|
||||
key: xcode-cache-tests-${{ github.ref_name }}-${{ github.sha }}
|
||||
|
||||
- name: (Tests-Run) Restore Pods from Cache (Exact match)
|
||||
id: pods-restore
|
||||
@@ -515,7 +654,7 @@ jobs:
|
||||
./Podfile.lock
|
||||
./Pods/
|
||||
./AltStore.xcworkspace/
|
||||
key: pods-cache-test-${{ hashFiles('Podfile') }}
|
||||
key: pods-cache-test-${{ github.ref_name }}-${{ hashFiles('Podfile') }}
|
||||
|
||||
- name: (Tests-Run) Restore Pods from Cache (Last Available)
|
||||
if: ${{ steps.pods-restore.outputs.cache-hit != 'true' }}
|
||||
@@ -526,7 +665,7 @@ jobs:
|
||||
./Podfile.lock
|
||||
./Pods/
|
||||
./AltStore.xcworkspace/
|
||||
key: pods-cache-test-
|
||||
key: pods-cache-test-${{ github.ref_name }}-
|
||||
|
||||
- name: (Tests-Run) Install CocoaPods
|
||||
run: pod install
|
||||
@@ -539,7 +678,7 @@ jobs:
|
||||
./Podfile.lock
|
||||
./Pods/
|
||||
./AltStore.xcworkspace/
|
||||
key: pods-cache-test-${{ hashFiles('Podfile') }}
|
||||
key: pods-cache-test-${{ github.ref_name }}-${{ hashFiles('Podfile') }}
|
||||
|
||||
- name: (Tests-Run) Clean previous build artifacts
|
||||
run: |
|
||||
@@ -565,9 +704,15 @@ jobs:
|
||||
echo ""
|
||||
|
||||
echo ">>>>>>>>> Xcode-Derived-Data <<<<<<<<<<"
|
||||
ls -la ~/Library/Developer/Xcode/DerivedData || true # List contents if directory exists
|
||||
find ~/Library/Developer/Xcode/DerivedData -maxdepth 8 -exec ls -ld {} + | grep "Build/Products" >> tests-run-deriveddata.txt || true
|
||||
echo ""
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: tests-run-deriveddata-${{ needs.serialize.outputs.short-commit }}.txt
|
||||
path: tests-run-deriveddata.txt
|
||||
|
||||
# we expect simulator to have been booted by now, so exit otherwise
|
||||
- name: Simulator Boot Check
|
||||
run: |
|
||||
@@ -689,34 +834,51 @@ jobs:
|
||||
name: encrypted-build-logs-${{ needs.build.outputs.version }}.zip
|
||||
|
||||
- name: Download encrypted-tests-build-logs artifact
|
||||
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_BUILD == '1' }}
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: encrypted-tests-build-logs-${{ needs.serialize.outputs.short-commit }}.zip
|
||||
|
||||
- name: Download encrypted-tests-run-logs artifact
|
||||
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_RUN == '1' }}
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: encrypted-tests-run-logs-${{ needs.serialize.outputs.short-commit }}.zip
|
||||
|
||||
- name: Download tests-recording artifact
|
||||
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_RUN == '1' }}
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: tests-recording-${{ needs.serialize.outputs.short-commit }}.mp4
|
||||
|
||||
- name: Download test-results artifact
|
||||
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_RUN == '1' }}
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: test-results-${{ needs.serialize.outputs.short-commit }}.zip
|
||||
|
||||
- name: Download beta-build-num artifact
|
||||
if: ${{ inputs.is_beta }}
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: beta-build-num-${{ needs.build.outputs.version }}.zip
|
||||
- name: Un-Zip beta-beta-build-num & update_apps.py
|
||||
run: |
|
||||
unzip beta-build-num.zip -d .
|
||||
name: test-results-${{ needs.serialize.outputs.short-commit }}.zip
|
||||
|
||||
- name: Download release-notes.md
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: release-notes-${{ needs.serialize.outputs.short-commit }}.md
|
||||
|
||||
- name: Download update_release_notes.py
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: update_release_notes-${{ needs.serialize.outputs.short-commit }}.py
|
||||
|
||||
- name: Download update_apps.py
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: update_apps-${{ needs.serialize.outputs.short-commit }}.py
|
||||
|
||||
- name: Read release notes
|
||||
id: release_notes
|
||||
run: |
|
||||
CONTENT=$(python3 update_release_notes.py --retrieve ${{ inputs.release_tag }})
|
||||
echo "content<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$CONTENT" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: List files before upload
|
||||
run: |
|
||||
@@ -754,22 +916,7 @@ jobs:
|
||||
Commit SHA: `${{ github.sha }}`
|
||||
Version: `${{ needs.build.outputs.version }}`
|
||||
|
||||
- name: Publish to SideStore/beta-build-num
|
||||
if: ${{ inputs.is_beta }}
|
||||
run: |
|
||||
pushd SideStore/beta-build-num/
|
||||
|
||||
echo "Configure Git user (committer details)"
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "github-actions@github.com"
|
||||
|
||||
echo "Adding files to commit"
|
||||
git add --verbose build_number.txt
|
||||
git commit -m " - updated for ${{ inputs.release_tag }} - ${{ needs.serialize.outputs.short-commit }} deployment" || echo "No changes to commit"
|
||||
|
||||
echo "Pushing to remote repo"
|
||||
git push --verbose
|
||||
popd
|
||||
${{ steps.release_notes.outputs.content }}
|
||||
|
||||
- name: Get formatted date
|
||||
run: |
|
||||
@@ -803,6 +950,9 @@ jobs:
|
||||
- version: "${{ needs.build.outputs.version }}"
|
||||
- revision: "${{ needs.serialize.outputs.short-commit }}"
|
||||
- timestamp: "${{ steps.date.outputs.date }}"
|
||||
|
||||
Release Notes:
|
||||
${{ steps.release_notes.outputs.content }}
|
||||
EOF
|
||||
)
|
||||
|
||||
|
||||
337
update_release_notes.py
Normal file
337
update_release_notes.py
Normal file
@@ -0,0 +1,337 @@
|
||||
#!/usr/bin/env python3
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
||||
IGNORED_AUTHORS = [
|
||||
|
||||
]
|
||||
|
||||
TAG_MARKER = "###"
|
||||
HEADER_MARKER = "####"
|
||||
|
||||
def run_command(cmd):
|
||||
"""Run a shell command and return its trimmed output."""
|
||||
return subprocess.check_output(cmd, shell=True, text=True).strip()
|
||||
|
||||
def get_head_commit():
|
||||
"""Return the HEAD commit SHA."""
|
||||
return run_command("git rev-parse HEAD")
|
||||
|
||||
def get_commit_messages(last_successful, current="HEAD"):
|
||||
"""Return a list of commit messages between last_successful and current."""
|
||||
cmd = f"git log {last_successful}..{current} --pretty=format:%s"
|
||||
output = run_command(cmd)
|
||||
if not output:
|
||||
return []
|
||||
return output.splitlines()
|
||||
|
||||
def get_authors_in_range(commit_range, fmt="%an"):
|
||||
"""Return a set of commit authors in the given commit range using the given format."""
|
||||
cmd = f"git log {commit_range} --pretty=format:{fmt}"
|
||||
output = run_command(cmd)
|
||||
if not output:
|
||||
return set()
|
||||
authors = set(line.strip() for line in output.splitlines() if line.strip())
|
||||
authors = set(authors) - set(IGNORED_AUTHORS)
|
||||
return authors
|
||||
|
||||
def get_first_commit_of_repo():
|
||||
"""Return the first commit in the repository (root commit)."""
|
||||
cmd = "git rev-list --max-parents=0 HEAD"
|
||||
output = run_command(cmd)
|
||||
return output.splitlines()[0]
|
||||
|
||||
def get_branch():
|
||||
"""
|
||||
Attempt to determine the branch base (the commit where the current branch diverged
|
||||
from the default remote branch). Falls back to the repo's first commit.
|
||||
"""
|
||||
try:
|
||||
default_ref = run_command("git rev-parse --abbrev-ref origin/HEAD")
|
||||
default_branch = default_ref.split('/')[-1]
|
||||
base_commit = run_command(f"git merge-base HEAD origin/{default_branch}")
|
||||
return base_commit
|
||||
except Exception:
|
||||
return get_first_commit_of_repo()
|
||||
|
||||
def get_repo_url():
|
||||
"""Extract and clean the repository URL from the remote 'origin'."""
|
||||
url = run_command("git config --get remote.origin.url")
|
||||
if url.startswith("git@"):
|
||||
url = url.replace("git@", "https://").replace(":", "/")
|
||||
if url.endswith(".git"):
|
||||
url = url[:-4]
|
||||
return url
|
||||
|
||||
def format_contributor(author):
|
||||
"""
|
||||
Convert an author name to a GitHub username or first name.
|
||||
If the author already starts with '@', return it;
|
||||
otherwise, take the first token and prepend '@'.
|
||||
"""
|
||||
if author.startswith('@'):
|
||||
return author
|
||||
return f"@{author.split()[0]}"
|
||||
|
||||
def format_commit_message(msg):
|
||||
"""Format a commit message as a bullet point for the release notes."""
|
||||
msg_clean = msg.lstrip() # remove leading spaces
|
||||
if msg_clean.startswith("-"):
|
||||
msg_clean = msg_clean[1:].strip() # remove leading '-' and spaces
|
||||
return f"- {msg_clean}"
|
||||
|
||||
def generate_release_notes(last_successful, tag, branch):
|
||||
"""Generate release notes for the given tag."""
|
||||
current_commit = get_head_commit()
|
||||
messages = get_commit_messages(last_successful, current_commit)
|
||||
|
||||
# Start with the tag header
|
||||
new_section = f"{TAG_MARKER} {tag}\n"
|
||||
|
||||
# What's Changed section (always present)
|
||||
new_section += f"{HEADER_MARKER} What's Changed\n"
|
||||
|
||||
if not messages or last_successful == current_commit:
|
||||
new_section += "- Nothing...\n"
|
||||
else:
|
||||
for msg in messages:
|
||||
new_section += f"{format_commit_message(msg)}\n"
|
||||
|
||||
# New Contributors section (only if there are new contributors)
|
||||
all_previous_authors = get_authors_in_range(f"{branch}")
|
||||
recent_authors = get_authors_in_range(f"{last_successful}..{current_commit}")
|
||||
new_contributors = recent_authors - all_previous_authors
|
||||
|
||||
if new_contributors:
|
||||
new_section += f"\n{HEADER_MARKER} New Contributors\n"
|
||||
for author in sorted(new_contributors):
|
||||
new_section += f"- {format_contributor(author)} made their first contribution\n"
|
||||
|
||||
# Full Changelog section (only if there are changes)
|
||||
if messages and last_successful != current_commit:
|
||||
repo_url = get_repo_url()
|
||||
changelog_link = f"{repo_url}/compare/{last_successful}...{current_commit}"
|
||||
new_section += f"\n{HEADER_MARKER} Full Changelog: [{last_successful[:8]}...{current_commit[:8]}]({changelog_link})\n"
|
||||
|
||||
return new_section
|
||||
|
||||
def update_release_md(existing_content, new_section, tag):
|
||||
"""
|
||||
Update input based on rules:
|
||||
1. If tag exists, update it
|
||||
2. Special tags (alpha, beta, nightly) stay at the top in that order
|
||||
3. Numbered tags follow special tags
|
||||
4. Remove duplicate tags
|
||||
5. Insert new numbered tags at the top of the numbered section
|
||||
"""
|
||||
tag_lower = tag.lower()
|
||||
is_special_tag = tag_lower in ["alpha", "beta", "nightly"]
|
||||
|
||||
# Parse the existing content into sections
|
||||
if not existing_content:
|
||||
return new_section
|
||||
|
||||
# Split the content into sections by headers
|
||||
pattern = fr'(^{TAG_MARKER} .*$)'
|
||||
sections = re.split(pattern, existing_content, flags=re.MULTILINE)
|
||||
|
||||
# Create a list to store the processed content
|
||||
processed_sections = []
|
||||
|
||||
# Track special tag positions and whether tag was found
|
||||
special_tags_map = {"alpha": False, "beta": False, "nightly": False}
|
||||
last_special_index = -1
|
||||
tag_found = False
|
||||
numbered_tag_index = -1
|
||||
|
||||
i = 0
|
||||
while i < len(sections):
|
||||
# Check if this is a header
|
||||
if i % 2 == 1: # Headers are at odd indices
|
||||
header = sections[i]
|
||||
content = sections[i+1] if i+1 < len(sections) else ""
|
||||
current_tag = header[3:].strip().lower()
|
||||
|
||||
# Check for special tags to track their positions
|
||||
if current_tag in special_tags_map:
|
||||
special_tags_map[current_tag] = True
|
||||
last_special_index = len(processed_sections)
|
||||
|
||||
# Check if this is the first numbered tag
|
||||
elif re.match(r'^[0-9]+\.[0-9]+(\.[0-9]+)?$', current_tag) and numbered_tag_index == -1:
|
||||
numbered_tag_index = len(processed_sections)
|
||||
|
||||
# If this is the tag we're updating, mark it but don't add yet
|
||||
if current_tag == tag_lower:
|
||||
if not tag_found: # Replace the first occurrence
|
||||
tag_found = True
|
||||
i += 2 # Skip the content
|
||||
continue
|
||||
else: # Skip duplicate occurrences
|
||||
i += 2
|
||||
continue
|
||||
|
||||
# Add the current section
|
||||
processed_sections.append(sections[i])
|
||||
i += 1
|
||||
|
||||
# Determine where to insert the new section
|
||||
if tag_found:
|
||||
# We need to determine the insertion point
|
||||
if is_special_tag:
|
||||
# For special tags, insert after last special tag or at beginning
|
||||
desired_index = -1
|
||||
for pos, t in enumerate(["alpha", "beta", "nightly"]):
|
||||
if t == tag_lower:
|
||||
desired_index = pos
|
||||
|
||||
# Find position to insert
|
||||
insert_pos = 0
|
||||
for pos, t in enumerate(["alpha", "beta", "nightly"]):
|
||||
if t == tag_lower:
|
||||
break
|
||||
if special_tags_map[t]:
|
||||
insert_pos = processed_sections.index(f"{TAG_MARKER} {t}")
|
||||
insert_pos += 2 # Move past the header and content
|
||||
|
||||
# Insert at the determined position
|
||||
processed_sections.insert(insert_pos, new_section)
|
||||
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
|
||||
processed_sections.insert(insert_pos, '\n\n')
|
||||
else:
|
||||
# For numbered tags, insert after special tags but before other numbered tags
|
||||
insert_pos = 0
|
||||
|
||||
if last_special_index >= 0:
|
||||
# Insert after the last special tag
|
||||
insert_pos = last_special_index + 2 # +2 to skip header and content
|
||||
|
||||
processed_sections.insert(insert_pos, new_section)
|
||||
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
|
||||
processed_sections.insert(insert_pos, '\n\n')
|
||||
else:
|
||||
# Tag doesn't exist yet, determine insertion point
|
||||
if is_special_tag:
|
||||
# For special tags, maintain alpha, beta, nightly order
|
||||
special_tags = ["alpha", "beta", "nightly"]
|
||||
insert_pos = 0
|
||||
|
||||
for i, t in enumerate(special_tags):
|
||||
if t == tag_lower:
|
||||
# Check if preceding special tags exist
|
||||
for prev_tag in special_tags[:i]:
|
||||
if special_tags_map[prev_tag]:
|
||||
# Find the position after this tag
|
||||
prev_index = processed_sections.index(f"{TAG_MARKER} {prev_tag}")
|
||||
insert_pos = prev_index + 2 # Skip header and content
|
||||
|
||||
processed_sections.insert(insert_pos, new_section)
|
||||
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
|
||||
processed_sections.insert(insert_pos, '\n\n')
|
||||
else:
|
||||
# For numbered tags, insert after special tags but before other numbered tags
|
||||
insert_pos = 0
|
||||
|
||||
if last_special_index >= 0:
|
||||
# Insert after the last special tag
|
||||
insert_pos = last_special_index + 2 # +2 to skip header and content
|
||||
|
||||
processed_sections.insert(insert_pos, new_section)
|
||||
if insert_pos > 0 and not processed_sections[insert_pos-1].endswith('\n\n'):
|
||||
processed_sections.insert(insert_pos, '\n\n')
|
||||
|
||||
# Combine sections ensuring proper spacing
|
||||
result = ""
|
||||
for i, section in enumerate(processed_sections):
|
||||
if i > 0 and section.startswith(f"{TAG_MARKER} "):
|
||||
# Ensure single blank line before headers
|
||||
if not result.endswith("\n\n"):
|
||||
result = result.rstrip("\n") + "\n\n"
|
||||
result += section
|
||||
|
||||
return result.rstrip() + "\n"
|
||||
|
||||
|
||||
def retrieve_tag_content(tag, file_path):
|
||||
if not os.path.exists(file_path):
|
||||
return ""
|
||||
|
||||
with open(file_path, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
# Create a pattern for the tag header (case-insensitive)
|
||||
pattern = re.compile(fr'^{TAG_MARKER} ' + re.escape(tag) + r'$', re.MULTILINE | re.IGNORECASE)
|
||||
|
||||
# Find the tag header
|
||||
match = pattern.search(content)
|
||||
if not match:
|
||||
return ""
|
||||
|
||||
# Start after the tag line
|
||||
start_pos = match.end()
|
||||
|
||||
# Skip a newline if present
|
||||
if start_pos < len(content) and content[start_pos] == "\n":
|
||||
start_pos += 1
|
||||
|
||||
# Find the next tag header after the current tag's content
|
||||
next_tag_match = re.search(fr'^{TAG_MARKER} ', content[start_pos:], re.MULTILINE)
|
||||
|
||||
if next_tag_match:
|
||||
end_pos = start_pos + next_tag_match.start()
|
||||
return content[start_pos:end_pos].strip()
|
||||
else:
|
||||
# Return until the end of the file if this is the last tag
|
||||
return content[start_pos:].strip()
|
||||
|
||||
def main():
|
||||
# Update input file
|
||||
release_file = "release-notes.md"
|
||||
|
||||
# Usage: python release.py <last_successful_commit> [tag] [branch]
|
||||
# Or: python release.py --retrieve <tagname>
|
||||
args = sys.argv[1:]
|
||||
|
||||
if len(args) < 1:
|
||||
print("Usage: python release.py <last_successful_commit> [tag] [branch]")
|
||||
print(" or: python release.py --retrieve <tagname>")
|
||||
sys.exit(1)
|
||||
|
||||
# Check if we're retrieving a tag
|
||||
if args[0] == "--retrieve":
|
||||
if len(args) < 2:
|
||||
print("Error: Missing tag name after --retrieve")
|
||||
sys.exit(1)
|
||||
|
||||
tag_content = retrieve_tag_content(args[1], file_path=release_file)
|
||||
if tag_content:
|
||||
print(tag_content)
|
||||
else:
|
||||
print(f"Tag '{args[1]}' not found in '{release_file}'")
|
||||
return
|
||||
|
||||
# Original functionality for generating release notes
|
||||
last_successful = args[0]
|
||||
tag = args[1] if len(args) > 1 else get_head_commit()
|
||||
branch = args[2] if len(args) > 2 else (os.environ.get("GITHUB_REF") or get_branch())
|
||||
|
||||
# Generate release notes
|
||||
new_section = generate_release_notes(last_successful, tag, branch)
|
||||
|
||||
existing_content = ""
|
||||
if os.path.exists(release_file):
|
||||
with open(release_file, "r") as f:
|
||||
existing_content = f.read()
|
||||
|
||||
updated_content = update_release_md(existing_content, new_section, tag)
|
||||
|
||||
with open(release_file, "w") as f:
|
||||
f.write(updated_content)
|
||||
|
||||
# Output the new section for display
|
||||
print(new_section)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user