Compare commits

..

29 Commits

Author SHA1 Message Date
mahee96
85ff9b09ca ci: more fixes 2026-02-24 10:23:31 +05:30
mahee96
4100e8b1b9 ci: more fixes 2026-02-24 10:10:22 +05:30
mahee96
42fae569ca ci: more fixes 2026-02-24 10:04:22 +05:30
mahee96
aa224f68c7 ci: more fixes 2026-02-24 09:13:09 +05:30
mahee96
a02d1c49e8 ci: more fixes 2026-02-24 08:42:52 +05:30
mahee96
226f0dcc6b CI: improve more ci worflow 2026-02-24 08:19:56 +05:30
mahee96
bce38c8743 CI: improve more ci worflow 2026-02-24 07:41:06 +05:30
mahee96
0e72a33af8 CI: improve more ci worflow 2026-02-24 07:23:20 +05:30
mahee96
0677cc287e CI: improve more ci worflow 2026-02-24 07:21:14 +05:30
mahee96
b0bfbf5513 CI: improve more ci worflow 2026-02-24 07:12:56 +05:30
mahee96
ea86b98674 CI: improve more ci worflow 2026-02-24 07:00:17 +05:30
mahee96
3d47d486ef CI: improve more ci worflow 2026-02-24 06:22:03 +05:30
mahee96
3a05485c40 CI: improve more ci worflow 2026-02-24 05:47:38 +05:30
mahee96
31d07534d0 CI: improve more ci worflow 2026-02-24 05:27:15 +05:30
mahee96
99712f0020 CI: improve more ci worflow 2026-02-24 03:58:47 +05:30
mahee96
c5394be883 CI: improve more ci worflow 2026-02-24 03:53:26 +05:30
mahee96
a07657261d CI: improve more ci worflow 2026-02-24 03:47:15 +05:30
mahee96
db00202b37 CI: improve more ci worflow 2026-02-24 03:33:26 +05:30
mahee96
b16dda5590 CI: improve more ci worflow 2026-02-24 03:28:46 +05:30
mahee96
f8c4c558f6 CI: improve more ci worflow 2026-02-24 03:21:19 +05:30
mahee96
ae1bd49a99 CI: improve more ci worflow 2026-02-24 03:16:26 +05:30
mahee96
97b04094eb CI: improve more ci worflow 2026-02-24 03:13:55 +05:30
mahee96
675bdc63ae CI: improve more ci worflow 2026-02-24 02:59:08 +05:30
mahee96
8be9de3b11 CI: improve more ci worflow 2026-02-24 02:53:51 +05:30
mahee96
0403dc3278 CI: improve more ci worflow 2026-02-24 02:43:02 +05:30
mahee96
c546ff6642 CI: improve more ci worflow 2026-02-24 02:40:34 +05:30
mahee96
dc058938ef altsign updated to latest 2026-02-24 02:40:30 +05:30
mahee96
4984e5119f CI: improve more ci worflow 2026-02-24 02:29:13 +05:30
mahee96
bcadc92057 CI: improve more ci worflow 2026-02-24 02:25:50 +05:30
11 changed files with 973 additions and 311 deletions

View File

@@ -2,7 +2,8 @@ name: Alpha SideStore Build
on:
push:
branches: [develop-alpha]
branches: [alpha]
workflow_dispatch:
concurrency:
group: ${{ github.ref }}
@@ -10,7 +11,11 @@ concurrency:
jobs:
build:
runs-on: macos-15
runs-on: macos-26
env:
RELEASE_NAME: Alpha
CHANNEL: alpha
UPSTREAM_CHANNEL: "nightly"
steps:
- uses: actions/checkout@v4
@@ -20,28 +25,181 @@ jobs:
- run: brew install ldid xcbeautify
- name: Shared
id: shared
run: python3 scripts/ci/workflow.py shared
# --------------------------------------------------
# runtime env setup
# --------------------------------------------------
- uses: actions/checkout@v4
with:
repository: "SideStore/beta-build-num"
ref: ${{ env.CHANNEL }}
token: ${{ secrets.CROSS_REPO_PUSH_KEY }}
path: "Dependencies/beta-build-num"
fetch-depth: 1
- name: Beta bump
env:
RELEASE_CHANNEL: alpha
run: python3 scripts/ci/workflow.py bump-beta
- name: Setup Env
run: |
BUILD_NUM=$(python3 scripts/ci/workflow.py reserve_build_number 'Dependencies/beta-build-num')
MARKETING_VERSION=$(python3 scripts/ci/workflow.py get-marketing-version)
SHORT_COMMIT=$(python3 scripts/ci/workflow.py commid-id)
- name: Version
id: version
run: python3 scripts/ci/workflow.py version
QUALIFIED_VERSION=$(python3 scripts/ci/workflow.py compute-qualified \
"$MARKETING_VERSION" \
"$BUILD_NUM" \
"${{ env.CHANNEL }}" \
"$SHORT_COMMIT")
python3 scripts/ci/workflow.py set-marketing-version "$QUALIFIED_VERSION"
echo "BUILD_NUM=$BUILD_NUM" | tee -a $GITHUB_ENV
echo "SHORT_COMMIT=$SHORT_COMMIT" | tee -a $GITHUB_ENV
echo "MARKETING_VERSION=$QUALIFIED_VERSION" | tee -a $GITHUB_ENV
- name: Setup Xcode
uses: maxim-lobanov/setup-xcode@v1.6.0
with:
xcode-version: "26.2"
- name: Restore Cache (exact)
id: xcode-cache-exact
uses: actions/cache/restore@v3
with:
path: |
~/Library/Developer/Xcode/DerivedData
~/Library/Caches/org.swift.swiftpm
key: xcode-build-cache-${{ github.ref_name }}-${{ github.sha }}
- name: Restore Cache (last)
if: steps.xcode-cache-exact.outputs.cache-hit != 'true'
id: xcode-cache-fallback
uses: actions/cache/restore@v3
with:
path: |
~/Library/Developer/Xcode/DerivedData
~/Library/Caches/org.swift.swiftpm
key: xcode-build-cache-${{ github.ref_name }}-
# --------------------------------------------------
# build and test
# --------------------------------------------------
- name: Clean
if: contains(github.event.head_commit.message, '[--clean-build]')
run: |
python3 scripts/ci/workflow.py clean
python3 scripts/ci/workflow.py clean-derived-data
python3 scripts/ci/workflow.py clean-spm-cache
- name: Boot simulator (async)
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_RUN == '1' }}
run: |
mkdir -p build/logs
python3 scripts/ci/workflow.py boot-sim-async "iPhone 17 Pro"
- name: Build
run: python3 scripts/ci/workflow.py build
- name: Encrypt logs
id: build
env:
BUILD_LOG_ZIP_PASSWORD: ${{ secrets.BUILD_LOG_ZIP_PASSWORD }}
run: python3 scripts/ci/workflow.py encrypt-build
run: |
python3 scripts/ci/workflow.py build; STATUS=$?
python3 scripts/ci/workflow.py encrypt-build
echo "encrypted=true" >> $GITHUB_OUTPUT
exit $STATUS
- name: Tests Build
id: test-build
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_BUILD == '1' }}
env:
BUILD_LOG_ZIP_PASSWORD: ${{ secrets.BUILD_LOG_ZIP_PASSWORD }}
run: |
python3 scripts/ci/workflow.py tests-build; STATUS=$?
python3 scripts/ci/workflow.py encrypt-tests-build
exit $STATUS
- name: Save Cache
if: ${{ steps.xcode-cache-fallback.outputs.cache-hit != 'true' }}
uses: actions/cache/save@v3
with:
path: |
~/Library/Developer/Xcode/DerivedData
~/Library/Caches/org.swift.swiftpm
key: xcode-build-cache-${{ github.ref_name }}-${{ github.sha }}
- name: Tests Run
id: test-run
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_RUN == '1' }}
env:
BUILD_LOG_ZIP_PASSWORD: ${{ secrets.BUILD_LOG_ZIP_PASSWORD }}
run: |
python3 scripts/ci/workflow.py tests-run "iPhone 17 Pro"; STATUS=$?
python3 scripts/ci/workflow.py encrypt-tests-run
exit $STATUS
# --------------------------------------------------
# artifacts
# --------------------------------------------------
- uses: actions/upload-artifact@v4
with:
name: encrypted-build-logs-${{ env.MARKETING_VERSION }}.zip
path: encrypted-build-logs.zip
- uses: actions/upload-artifact@v4
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_BUILD == '1' }}
with:
name: encrypted-tests-build-logs-${{ env.SHORT_COMMIT }}.zip
path: encrypted-tests-build-logs.zip
- uses: actions/upload-artifact@v4
if: ${{ vars.ENABLE_TESTS == '1' && vars.ENABLE_TESTS_RUN == '1' }}
with:
name: encrypted-tests-run-logs-${{ env.SHORT_COMMIT }}.zip
path: encrypted-tests-run-logs.zip
- uses: actions/upload-artifact@v4
with:
name: SideStore-${{ steps.version.outputs.version }}.ipa
path: SideStore.ipa
name: SideStore-${{ env.MARKETING_VERSION }}.ipa
path: SideStore.ipa
- uses: actions/upload-artifact@v4
with:
name: SideStore-${{ env.MARKETING_VERSION }}-dSYMs.zip
path: SideStore.dSYMs.zip
- uses: actions/checkout@v4
with:
repository: "SideStore/apps-v2.json"
ref: "main"
token: ${{ secrets.CROSS_REPO_PUSH_KEY }}
path: "SideStore/apps-v2.json"
# --------------------------------------------------
# deploy
# --------------------------------------------------
- name: Deploy
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PRODUCT_NAME=$(python3 scripts/ci/workflow.py get-product-name)
BUNDLE_ID=$(python3 scripts/ci/workflow.py get-bundle-id)
SOURCE_JSON="_includes/source.json"
IPA_NAME="$PRODUCT_NAME.ipa"
LAST_SUCCESSFUL_COMMIT=$(python3 scripts/ci/workflow.py last-successful-commit \
"${{ github.workflow }}" "$CHANNEL")
python3 scripts/ci/workflow.py deploy \
SideStore/apps-v2.json \
"$SOURCE_JSON" \
"$CHANNEL" \
"$SHORT_COMMIT" \
"$MARKETING_VERSION" \
"$CHANNEL" \
"$BUNDLE_ID" \
"$IPA_NAME" \
"$LAST_SUCCESSFUL_COMMIT"
RELEASE_NOTES=$(python3 scripts/ci/workflow.py retrieve-release-notes "$CHANNEL")
python3 scripts/ci/workflow.py upload-release \
"$RELEASE_NAME" \
"$CHANNEL" \
"$GITHUB_SHA" \
"$GITHUB_REPOSITORY" \
"$UPSTREAM_CHANNEL"

View File

@@ -4,7 +4,7 @@ on:
push:
branches: [develop]
schedule:
- cron: '0 0 * * *'
- cron: "0 0 * * *"
workflow_dispatch:
concurrency:
@@ -14,6 +14,10 @@ concurrency:
jobs:
build:
runs-on: macos-26
env:
RELEASE_NAME: Nightly
CHANNEL: nightly
UPSTREAM_CHANNEL: ""
steps:
- uses: actions/checkout@v4
@@ -28,13 +32,13 @@ jobs:
# --------------------------------------------------
- uses: actions/checkout@v4
with:
repository: 'SideStore/beta-build-num'
ref: ${{ env.ref }}
repository: "SideStore/beta-build-num"
ref: ${{ env.CHANNEL }}
token: ${{ secrets.CROSS_REPO_PUSH_KEY }}
path: 'Dependencies/beta-build-num'
path: "Dependencies/beta-build-num"
fetch-depth: 1
- name: Setup
- name: Setup Env
run: |
BUILD_NUM=$(python3 scripts/ci/workflow.py reserve_build_number 'Dependencies/beta-build-num')
MARKETING_VERSION=$(python3 scripts/ci/workflow.py get-marketing-version)
@@ -43,31 +47,40 @@ jobs:
QUALIFIED_VERSION=$(python3 scripts/ci/workflow.py compute-qualified \
"$MARKETING_VERSION" \
"$BUILD_NUM" \
"${{ env.ref }}" \
"${{ env.CHANNEL }}" \
"$SHORT_COMMIT")
echo "BUILD_NUM=$BUILD_NUM" >> $GITHUB_ENV
echo "MARKETING_VERSION=$MARKETING_VERSION" >> $GITHUB_ENV
echo "SHORT_COMMIT=$SHORT_COMMIT" >> $GITHUB_ENV
echo "VERSION=$QUALIFIED_VERSION" >> $GITHUB_ENV
python3 scripts/ci/workflow.py set-marketing-version "$QUALIFIED_VERSION"
echo "BUILD_NUM=$BUILD_NUM" | tee -a $GITHUB_ENV
echo "SHORT_COMMIT=$SHORT_COMMIT" | tee -a $GITHUB_ENV
echo "MARKETING_VERSION=$QUALIFIED_VERSION" | tee -a $GITHUB_ENV
- name: Setup Xcode
uses: maxim-lobanov/setup-xcode@v1.6.0
with:
xcode-version: '26.2'
xcode-version: "26.2"
- name: Restore Cache
id: xcode-cache
- name: Restore Cache (exact)
id: xcode-cache-exact
uses: actions/cache/restore@v3
with:
path: |
~/Library/Developer/Xcode/DerivedData
~/Library/Caches/org.swift.swiftpm
key: xcode-build-cache-${{ github.ref_name }}-${{ github.sha }}
restore-keys: |
xcode-build-cache-${{ github.ref_name }}-
# --------------------------------------------------
- name: Restore Cache (last)
if: steps.xcode-cache-exact.outputs.cache-hit != 'true'
id: xcode-cache-fallback
uses: actions/cache/restore@v3
with:
path: |
~/Library/Developer/Xcode/DerivedData
~/Library/Caches/org.swift.swiftpm
key: xcode-build-cache-${{ github.ref_name }}-
# --------------------------------------------------
# build and test
# --------------------------------------------------
- name: Clean
@@ -104,7 +117,7 @@ jobs:
exit $STATUS
- name: Save Cache
if: ${{ steps.xcode-cache.outputs.cache-hit != 'true' }}
if: ${{ steps.xcode-cache-fallback.outputs.cache-hit != 'true' }}
uses: actions/cache/save@v3
with:
path: |
@@ -123,11 +136,11 @@ jobs:
exit $STATUS
# --------------------------------------------------
# artifacts
# artifacts
# --------------------------------------------------
- uses: actions/upload-artifact@v4
with:
name: encrypted-build-logs-${{ env.VERSION }}.zip
name: encrypted-build-logs-${{ env.MARKETING_VERSION }}.zip
path: encrypted-build-logs.zip
- uses: actions/upload-artifact@v4
@@ -144,25 +157,51 @@ jobs:
- uses: actions/upload-artifact@v4
with:
name: SideStore-${{ env.VERSION }}.ipa
name: SideStore-${{ env.MARKETING_VERSION }}.ipa
path: SideStore.ipa
- uses: actions/upload-artifact@v4
with:
name: SideStore-${{ env.VERSION }}-dSYMs.zip
name: SideStore-${{ env.MARKETING_VERSION }}-dSYMs.zip
path: SideStore.dSYMs.zip
- uses: actions/checkout@v4
with:
repository: "SideStore/apps-v2.json"
ref: "main"
token: ${{ secrets.CROSS_REPO_PUSH_KEY }}
path: "SideStore/apps-v2.json"
# --------------------------------------------------
# deploy
# deploy
# --------------------------------------------------
- name: Deploy
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PRODUCT_NAME=$(python3 scripts/ci/workflow.py get-product-name)
BUNDLE_ID=$(python3 scripts/ci/workflow.py get-bundle-id)
SOURCE_JSON="_includes/source.json"
IPA_NAME="$PRODUCT_NAME.ipa"
LAST_SUCCESSFUL_COMMIT=$(python3 scripts/ci/workflow.py last-successful-commit \
"${{ github.workflow }}" "$CHANNEL")
python3 scripts/ci/workflow.py deploy \
Dependencies/apps-v2.json \
"_includes/source.json" \
"${{ env.ref_name }}" \
SideStore/apps-v2.json \
"$SOURCE_JSON" \
"$CHANNEL" \
"$SHORT_COMMIT" \
"$MARKETING_VERSION" \
"$VERSION" \
"${{ env.ref_name }}" \
"com.SideStore.SideStore" \
"SideStore.ipa"
"$CHANNEL" \
"$BUNDLE_ID" \
"$IPA_NAME" \
"$LAST_SUCCESSFUL_COMMIT"
RELEASE_NOTES=$(python3 scripts/ci/workflow.py retrieve-release-notes "$CHANNEL")
python3 scripts/ci/workflow.py upload-release \
"$RELEASE_NAME" \
"$CHANNEL" \
"$GITHUB_SHA" \
"$GITHUB_REPOSITORY" \
"$UPSTREAM_CHANNEL"

View File

@@ -881,6 +881,90 @@
remoteGlobalIDString = 191E5FAA290A5D92001A3B7C;
remoteInfo = minimuxer;
};
A86372AE2F4D1E4400E66784 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A8636E922F4CF74D00E66784 /* libfragmentzip.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = 87B8C3401E0E9C37002F817D;
remoteInfo = "fragmentzip-cli-macOS";
};
A86372B02F4D1E4400E66784 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A8636E922F4CF74D00E66784 /* libfragmentzip.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = B315FDB02866CCF8002E243C;
remoteInfo = "fragmentzip-cli-iOS";
};
A86372B22F4D1E4400E66784 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A8636E922F4CF74D00E66784 /* libfragmentzip.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = B315FDB52866CD91002E243C;
remoteInfo = "fragmentzip-macOS";
};
A86372B42F4D1E4400E66784 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A8636E922F4CF74D00E66784 /* libfragmentzip.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = B315FDCE2866CDD3002E243C;
remoteInfo = "fragmentzip-iOS";
};
A86372B72F4D1E4400E66784 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A8636E6E2F4CF74D00E66784 /* libgeneral.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = 87977F6F227C4B71004F31DA;
remoteInfo = libgeneral;
};
A86372BC2F4D1E4400E66784 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A86372112F4CF74D00E66784 /* Roxas.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = BFADAFF819AE7BB70050CF31;
remoteInfo = Roxas;
};
A86372BE2F4D1E4400E66784 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A86372112F4CF74D00E66784 /* Roxas.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = BF8624801BB742E700C12EEE;
remoteInfo = RoxasTV;
};
A86372C02F4D1E4400E66784 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A86372112F4CF74D00E66784 /* Roxas.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = BFADB00319AE7BB80050CF31;
remoteInfo = RoxasTests;
};
A86372C32F4D1E4400E66784 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A86371872F4CF74D00E66784 /* SampleApp.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = 44E8FA8923D90632009E1D13;
remoteInfo = SampleApp;
};
A86372C72F4D1E4400E66784 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A863716E2F4CF74D00E66784 /* SampleApp.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = 44B1EE7C23DB90D5004E2E29;
remoteInfo = SampleApp;
};
A86372C92F4D1E4400E66784 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A863716E2F4CF74D00E66784 /* SampleApp.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = 445A906A2400612800B487B4;
remoteInfo = "NSAttributedString+MarkdownTests";
};
A86372CC2F4D1E4400E66784 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A86371932F4CF74D00E66784 /* SwiftSampleApp.xcodeproj */;
proxyType = 2;
remoteGlobalIDString = 44962FDA23E7A54A00E2A598;
remoteInfo = SwiftSampleApp;
};
A8A5AC022F4C2CFC00572B4A /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = A8A5AB6D2F4C2CFC00572B4A /* minimuxer.xcodeproj */;
@@ -2168,6 +2252,12 @@
A85AEC662F4B22F6002E2E11 /* em_proxy.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; path = em_proxy.xcodeproj; sourceTree = "<group>"; };
A85AEC682F4B22F6002E2E11 /* minimuxer.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; path = minimuxer.xcodeproj; sourceTree = "<group>"; };
A8635D052F4CF16D00E66784 /* OpenSSL.xcframework */ = {isa = PBXFileReference; expectedSignature = "AppleDeveloperProgram:67RAULRX93:Marcin Krzyzanowski"; lastKnownFileType = wrapper.xcframework; name = OpenSSL.xcframework; path = Dependencies/AltSign/Dependencies/OpenSSL.xcframework; sourceTree = "<group>"; };
A8636E6E2F4CF74D00E66784 /* libgeneral.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; path = libgeneral.xcodeproj; sourceTree = "<group>"; };
A8636E922F4CF74D00E66784 /* libfragmentzip.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; path = libfragmentzip.xcodeproj; sourceTree = "<group>"; };
A863716E2F4CF74D00E66784 /* SampleApp.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; path = SampleApp.xcodeproj; sourceTree = "<group>"; };
A86371872F4CF74D00E66784 /* SampleApp.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; path = SampleApp.xcodeproj; sourceTree = "<group>"; };
A86371932F4CF74D00E66784 /* SwiftSampleApp.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; path = SwiftSampleApp.xcodeproj; sourceTree = "<group>"; };
A86372112F4CF74D00E66784 /* Roxas.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; path = Roxas.xcodeproj; sourceTree = "<group>"; };
A8945AA52D059B6100D86CBE /* Roxas.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = Roxas.framework; sourceTree = BUILT_PRODUCTS_DIR; };
A8A5A7E82F4C2CFC00572B4A /* .gitignore */ = {isa = PBXFileReference; lastKnownFileType = text; path = .gitignore; sourceTree = "<group>"; };
A8A5A7E92F4C2CFC00572B4A /* build.rs */ = {isa = PBXFileReference; lastKnownFileType = text; path = build.rs; sourceTree = "<group>"; };
@@ -3231,6 +3321,60 @@
name = Products;
sourceTree = "<group>";
};
A8636E992F4CF74D00E66784 /* Products */ = {
isa = PBXGroup;
children = (
A86372B82F4D1E4400E66784 /* libgeneral */,
);
name = Products;
sourceTree = "<group>";
};
A8636E9B2F4CF74D00E66784 /* Products */ = {
isa = PBXGroup;
children = (
A86372AF2F4D1E4400E66784 /* libfragmentzip */,
A86372B12F4D1E4400E66784 /* libfragmentzip */,
A86372B32F4D1E4400E66784 /* libfragmentzip.a */,
A86372B52F4D1E4400E66784 /* libfragmentzip.a */,
);
name = Products;
sourceTree = "<group>";
};
A863719B2F4CF74D00E66784 /* Products */ = {
isa = PBXGroup;
children = (
A86372C82F4D1E4400E66784 /* SampleApp.app */,
A86372CA2F4D1E4400E66784 /* NSAttributedString+MarkdownTests.xctest */,
);
name = Products;
sourceTree = "<group>";
};
A863719D2F4CF74D00E66784 /* Products */ = {
isa = PBXGroup;
children = (
A86372CD2F4D1E4400E66784 /* SwiftSampleApp.app */,
);
name = Products;
sourceTree = "<group>";
};
A863719F2F4CF74D00E66784 /* Products */ = {
isa = PBXGroup;
children = (
A86372C42F4D1E4400E66784 /* SampleApp.app */,
);
name = Products;
sourceTree = "<group>";
};
A86372122F4CF74D00E66784 /* Products */ = {
isa = PBXGroup;
children = (
A86372BD2F4D1E4400E66784 /* Roxas.framework */,
A86372BF2F4D1E4400E66784 /* Roxas.framework */,
A86372C12F4D1E4400E66784 /* RoxasTests.xctest */,
);
name = Products;
sourceTree = "<group>";
};
A8A5A7F42F4C2CFC00572B4A /* em_proxy */ = {
isa = PBXGroup;
children = (
@@ -4497,6 +4641,10 @@
ProductGroup = A8A5B06A2F4C347700572B4A /* Products */;
ProjectRef = A8A5B0622F4C347700572B4A /* libfragmentzip.xcodeproj */;
},
{
ProductGroup = A8636E9B2F4CF74D00E66784 /* Products */;
ProjectRef = A8636E922F4CF74D00E66784 /* libfragmentzip.xcodeproj */;
},
{
ProductGroup = A8FAC0612F4B50D10061A851 /* Products */;
ProjectRef = A8FABCE72F4B50D10061A851 /* libfragmentzip.xcodeproj */;
@@ -4609,6 +4757,10 @@
ProductGroup = A8EEDA212F4B19B000F2436D /* Products */;
ProjectRef = A8EED1D72F4B19B000F2436D /* libgeneral.xcodeproj */;
},
{
ProductGroup = A8636E992F4CF74D00E66784 /* Products */;
ProjectRef = A8636E6E2F4CF74D00E66784 /* libgeneral.xcodeproj */;
},
{
ProductGroup = A81197412F4C1C710013ABD0 /* Products */;
ProjectRef = A81193E22F4C1C710013ABD0 /* libgeneral.xcodeproj */;
@@ -4773,6 +4925,10 @@
ProductGroup = A8A5B7332F4C4C8600572B4A /* Products */;
ProjectRef = A8A5B7322F4C4C8600572B4A /* Roxas.xcodeproj */;
},
{
ProductGroup = A86372122F4CF74D00E66784 /* Products */;
ProjectRef = A86372112F4CF74D00E66784 /* Roxas.xcodeproj */;
},
{
ProductGroup = A8EEDA272F4B19B000F2436D /* Products */;
ProjectRef = A8EEDA1E2F4B19B000F2436D /* Roxas.xcodeproj */;
@@ -4817,6 +4973,10 @@
ProductGroup = A81180DC2F4C1B230013ABD0 /* Products */;
ProjectRef = A81180302F4C1B230013ABD0 /* SampleApp.xcodeproj */;
},
{
ProductGroup = A863719F2F4CF74D00E66784 /* Products */;
ProjectRef = A86371872F4CF74D00E66784 /* SampleApp.xcodeproj */;
},
{
ProductGroup = A81197372F4C1C710013ABD0 /* Products */;
ProjectRef = A81196912F4C1C710013ABD0 /* SampleApp.xcodeproj */;
@@ -4865,6 +5025,10 @@
ProductGroup = A8A5ACE92F4C339400572B4A /* Products */;
ProjectRef = A8A5ACD02F4C339400572B4A /* SampleApp.xcodeproj */;
},
{
ProductGroup = A863719B2F4CF74D00E66784 /* Products */;
ProjectRef = A863716E2F4CF74D00E66784 /* SampleApp.xcodeproj */;
},
{
ProductGroup = A8A5ACE72F4C339400572B4A /* Products */;
ProjectRef = A8A5ACB72F4C339400572B4A /* SampleApp.xcodeproj */;
@@ -4973,6 +5137,10 @@
ProductGroup = A81197392F4C1C710013ABD0 /* Products */;
ProjectRef = A811969D2F4C1C710013ABD0 /* SwiftSampleApp.xcodeproj */;
},
{
ProductGroup = A863719D2F4CF74D00E66784 /* Products */;
ProjectRef = A86371932F4CF74D00E66784 /* SwiftSampleApp.xcodeproj */;
},
{
ProductGroup = A8FAC0592F4B50D10061A851 /* Products */;
ProjectRef = A8FABF7E2F4B50D10061A851 /* SwiftSampleApp.xcodeproj */;
@@ -5860,6 +6028,90 @@
remoteRef = A85AED342F4B2315002E2E11 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A86372AF2F4D1E4400E66784 /* libfragmentzip */ = {
isa = PBXReferenceProxy;
fileType = "compiled.mach-o.executable";
path = libfragmentzip;
remoteRef = A86372AE2F4D1E4400E66784 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A86372B12F4D1E4400E66784 /* libfragmentzip */ = {
isa = PBXReferenceProxy;
fileType = "compiled.mach-o.executable";
path = libfragmentzip;
remoteRef = A86372B02F4D1E4400E66784 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A86372B32F4D1E4400E66784 /* libfragmentzip.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;
path = libfragmentzip.a;
remoteRef = A86372B22F4D1E4400E66784 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A86372B52F4D1E4400E66784 /* libfragmentzip.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;
path = libfragmentzip.a;
remoteRef = A86372B42F4D1E4400E66784 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A86372B82F4D1E4400E66784 /* libgeneral */ = {
isa = PBXReferenceProxy;
fileType = "compiled.mach-o.executable";
path = libgeneral;
remoteRef = A86372B72F4D1E4400E66784 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A86372BD2F4D1E4400E66784 /* Roxas.framework */ = {
isa = PBXReferenceProxy;
fileType = wrapper.framework;
path = Roxas.framework;
remoteRef = A86372BC2F4D1E4400E66784 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A86372BF2F4D1E4400E66784 /* Roxas.framework */ = {
isa = PBXReferenceProxy;
fileType = wrapper.framework;
path = Roxas.framework;
remoteRef = A86372BE2F4D1E4400E66784 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A86372C12F4D1E4400E66784 /* RoxasTests.xctest */ = {
isa = PBXReferenceProxy;
fileType = wrapper.cfbundle;
path = RoxasTests.xctest;
remoteRef = A86372C02F4D1E4400E66784 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A86372C42F4D1E4400E66784 /* SampleApp.app */ = {
isa = PBXReferenceProxy;
fileType = wrapper.application;
path = SampleApp.app;
remoteRef = A86372C32F4D1E4400E66784 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A86372C82F4D1E4400E66784 /* SampleApp.app */ = {
isa = PBXReferenceProxy;
fileType = wrapper.application;
path = SampleApp.app;
remoteRef = A86372C72F4D1E4400E66784 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A86372CA2F4D1E4400E66784 /* NSAttributedString+MarkdownTests.xctest */ = {
isa = PBXReferenceProxy;
fileType = wrapper.cfbundle;
path = "NSAttributedString+MarkdownTests.xctest";
remoteRef = A86372C92F4D1E4400E66784 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A86372CD2F4D1E4400E66784 /* SwiftSampleApp.app */ = {
isa = PBXReferenceProxy;
fileType = wrapper.application;
path = SwiftSampleApp.app;
remoteRef = A86372CC2F4D1E4400E66784 /* PBXContainerItemProxy */;
sourceTree = BUILT_PRODUCTS_DIR;
};
A8A5AC032F4C2CFC00572B4A /* libminimuxer_static.a */ = {
isa = PBXReferenceProxy;
fileType = archive.ar;

37
release-notes.md Normal file
View File

@@ -0,0 +1,37 @@
### nightly
#### What's Changed
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- altsign updated to latest
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- re added openSSL from new path
- updated altsign to use xcframework for openSSL which was causing huge download of 1.2 GB each time
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: improve more ci worflow
- CI: full rewrite - moved logic into ci.py and kept workflow scripts mostly dummy
#### Full Changelog: [38715283...99712f00](https://github.com/SideStore/SideStore/compare/38715283073ea37949a462b889ce3cad403ea499...99712f0020a4f2ae57d8d781514fa735f893c23a)

View File

@@ -19,6 +19,20 @@ def run(cmd: str) -> str:
return subprocess.check_output(cmd, shell=True, text=True).strip()
def commit_exists(rev: str) -> bool:
if not rev:
return False
try:
subprocess.check_output(
f"git rev-parse --verify {rev}^{{commit}}",
shell=True,
stderr=subprocess.DEVNULL,
)
return True
except subprocess.CalledProcessError:
return False
def head_commit():
return run("git rev-parse HEAD")
@@ -35,12 +49,8 @@ def repo_url():
def commit_messages(start, end="HEAD"):
try:
out = run(f"git log {start}..{end} --pretty=format:%s")
return out.splitlines() if out else []
except subprocess.CalledProcessError:
fallback = run("git rev-parse HEAD~5")
return run(f"git log {fallback}..{end} --pretty=format:%s").splitlines()
out = run(f"git log {start}..{end} --pretty=format:%s")
return out.splitlines() if out else []
def authors(range_expr, fmt="%an"):
@@ -76,10 +86,35 @@ def fmt_author(author):
# release note generation
# ----------------------------------------------------------
def resolve_start_commit(last_successful: str):
if commit_exists(last_successful):
return last_successful
try:
return run("git rev-parse HEAD~10")
except Exception:
return first_commit()
def generate_release_notes(last_successful, tag, branch):
current = head_commit()
# fallback if missing/invalid
if not last_successful or not commit_exists(last_successful):
try:
last_successful = run("git rev-parse HEAD~10")
except Exception:
last_successful = first_commit()
messages = commit_messages(last_successful, current)
# fallback if empty range
if not messages:
try:
last_successful = run("git rev-parse HEAD~10")
except Exception:
last_successful = first_commit()
messages = commit_messages(last_successful, current)
section = f"{TAG_MARKER} {tag}\n"
section += f"{HEADER_MARKER} What's Changed\n"
@@ -90,7 +125,8 @@ def generate_release_notes(last_successful, tag, branch):
section += f"{fmt_msg(m)}\n"
prev_authors = authors(branch)
new_authors = authors(f"{last_successful}..{current}") - prev_authors
recent_authors = authors(f"{last_successful}..{current}")
new_authors = recent_authors - prev_authors
if new_authors:
section += f"\n{HEADER_MARKER} New Contributors\n"
@@ -170,7 +206,7 @@ def update_release_md(existing, new_section, tag):
# retrieval
# ----------------------------------------------------------
def retrieve_tag(tag, file_path):
def retrieve_tag(tag, file_path: Path):
if not file_path.exists():
return ""
@@ -209,30 +245,20 @@ def main():
" generate_release_notes.py --retrieve <tag> [--output-dir DIR]"
)
# parse optional output dir
output_dir = Path.cwd()
if "--output-dir" in args:
idx = args.index("--output-dir")
try:
output_dir = Path(args[idx + 1]).resolve()
except IndexError:
sys.exit("Missing value for --output-dir")
output_dir = Path(args[idx + 1]).resolve()
del args[idx:idx + 2]
output_dir.mkdir(parents=True, exist_ok=True)
release_file = output_dir / "release-notes.md"
# retrieval mode
if args[0] == "--retrieve":
if len(args) < 2:
sys.exit("Missing tag after --retrieve")
print(retrieve_tag(args[1], release_file))
return
# generation mode
last_successful = args[0]
tag = args[1] if len(args) > 1 else head_commit()
branch = args[2] if len(args) > 2 else (
@@ -241,12 +267,7 @@ def main():
new_section = generate_release_notes(last_successful, tag, branch)
existing = (
release_file.read_text()
if release_file.exists()
else ""
)
existing = release_file.read_text() if release_file.exists() else ""
updated = update_release_md(existing, new_section, tag)
release_file.write_text(updated)

View File

@@ -5,16 +5,34 @@ import json
import subprocess
from pathlib import Path
import argparse
import textwrap
import sys
SCRIPT_DIR = Path(__file__).resolve().parent
# ----------------------------------------------------------
# helpers
# ----------------------------------------------------------
def resolve_script(name: str) -> Path:
p = Path.cwd() / name
if p.exists():
return p
return SCRIPT_DIR / name
def sh(cmd: str, cwd: Path) -> str:
return subprocess.check_output(
cmd, shell=True, cwd=cwd
).decode().strip()
try:
return subprocess.check_output(
cmd,
shell=True,
cwd=cwd,
stderr=subprocess.STDOUT,
).decode().strip()
except subprocess.CalledProcessError as e:
print(e.output.decode(), file=sys.stderr)
raise SystemExit(f"Command failed: {cmd}")
def file_size(path: Path) -> int:
@@ -38,36 +56,26 @@ def sha256(path: Path) -> str:
def main():
p = argparse.ArgumentParser()
p.add_argument(
"--repo-root",
required=True,
help="Repo used for git history + release notes",
)
p.add_argument("--repo-root", required=True)
p.add_argument("--ipa", required=True)
p.add_argument("--output-dir", required=True)
p.add_argument(
"--ipa",
required=True,
help="Path to IPA file",
"--output-name",
default="source_metadata.json",
)
p.add_argument(
"--output-dir",
required=True,
help="Output Directory where source_metadata.json is written",
)
p.add_argument(
"--release-notes-dir",
required=True,
help="Output Directory where release-notes.md is generated/read",
)
p.add_argument("--release-notes-dir", required=True)
p.add_argument("--release-tag", required=True)
p.add_argument("--version", required=True)
p.add_argument("--marketing-version", required=True)
p.add_argument("--short-commit", required=True)
p.add_argument("--release-channel", required=True)
p.add_argument("--bundle-id", required=True)
# optional
p.add_argument("--last-successful-commit")
p.add_argument("--is-beta", action="store_true")
args = p.parse_args()
@@ -86,22 +94,30 @@ def main():
notes_dir.mkdir(parents=True, exist_ok=True)
out_dir.mkdir(parents=True, exist_ok=True)
out_file = out_dir / "source_metadata.json"
out_file = out_dir / args.output_name
# ------------------------------------------------------
# ensure release notes exist
# generate release notes
# ------------------------------------------------------
print("Generating release notes…")
sh(
(
"python3 generate_release_notes.py "
script = resolve_script("generate_release_notes.py")
if args.last_successful_commit:
gen_cmd = (
f"python3 {script} "
f"{args.last_successful_commit} {args.release_tag} "
f"--output-dir \"{notes_dir}\""
)
else:
gen_cmd = (
f"python3 {script} "
f"{args.short_commit} {args.release_tag} "
f"--output-dir \"{notes_dir}\""
),
cwd=repo_root,
)
)
sh(gen_cmd, cwd=repo_root)
# ------------------------------------------------------
# retrieve release notes
@@ -109,7 +125,7 @@ def main():
notes = sh(
(
"python3 generate_release_notes.py "
f"python3 {script} "
f"--retrieve {args.release_tag} "
f"--output-dir \"{notes_dir}\""
),
@@ -120,19 +136,19 @@ def main():
# compute metadata
# ------------------------------------------------------
now = datetime.datetime.now(datetime.UTC)
now = datetime.datetime.now(datetime.timezone.utc)
formatted = now.strftime("%Y-%m-%dT%H:%M:%SZ")
human = now.strftime("%c")
localized_description = f"""
This is release for:
- version: "{args.version}"
- revision: "{args.short_commit}"
- timestamp: "{human}"
localized_description = textwrap.dedent(f"""
This is release for:
- version: "{args.marketing_version}"
- revision: "{args.short_commit}"
- timestamp: "{human}"
Release Notes:
{notes}
""".strip()
Release Notes:
{notes}
""").strip()
metadata = {
"is_beta": bool(args.is_beta),

View File

@@ -5,176 +5,177 @@ import sys
from pathlib import Path
'''
metadata.json template
# ----------------------------------------------------------
# metadata
# ----------------------------------------------------------
{
"version_ipa": "0.0.0",
"version_date": "2000-12-18T00:00:00Z",
"is_beta": true,
"release_channel": "alpha",
"size": 0,
"sha256": "",
"localized_description": "Invalid Update",
"download_url": "https://github.com/SideStore/SideStore/releases/download/0.0.0/SideStore.ipa",
"bundle_identifier": "com.SideStore.SideStore"
}
'''
def load_metadata(metadata_file: Path):
if not metadata_file.exists():
raise SystemExit(f"Missing metadata file: {metadata_file}")
with open(metadata_file, "r", encoding="utf-8") as f:
meta = json.load(f)
print(" ====> Required parameter list <====")
for k, v in meta.items():
print(f"{k}: {v}")
required = [
"bundle_identifier",
"version_ipa",
"version_date",
"release_channel",
"size",
"sha256",
"localized_description",
"download_url",
]
for r in required:
if not meta.get(r):
raise SystemExit("One or more required metadata fields missing")
meta["size"] = int(meta["size"])
meta["release_channel"] = meta["release_channel"].lower()
return meta
# ----------------------------------------------------------
# args
# source loading
# ----------------------------------------------------------
if len(sys.argv) < 3:
print("Usage: python3 update_apps.py <metadata.json> <source.json>")
sys.exit(1)
def load_source(source_file: Path):
if source_file.exists():
with open(source_file, "r", encoding="utf-8") as f:
data = json.load(f)
else:
print("source.json missing — creating minimal structure")
data = {"version": 2, "apps": []}
metadata_file = Path(sys.argv[1])
source_file = Path(sys.argv[2])
if int(data.get("version", 1)) < 2:
raise SystemExit("Only v2 and above are supported")
return data
# ----------------------------------------------------------
# load metadata
# locate app
# ----------------------------------------------------------
if not metadata_file.exists():
print(f"Missing metadata file: {metadata_file}")
sys.exit(1)
def ensure_app(data, bundle_id):
apps = data.setdefault("apps", [])
with open(metadata_file, "r", encoding="utf-8") as f:
meta = json.load(f)
app = next(
(a for a in apps if a.get("bundleIdentifier") == bundle_id),
None,
)
VERSION_IPA = meta.get("version_ipa")
VERSION_DATE = meta.get("version_date")
IS_BETA = meta.get("is_beta")
RELEASE_CHANNEL = meta.get("release_channel")
SIZE = meta.get("size")
SHA256 = meta.get("sha256")
LOCALIZED_DESCRIPTION = meta.get("localized_description")
DOWNLOAD_URL = meta.get("download_url")
BUNDLE_IDENTIFIER = meta.get("bundle_identifier")
if app is None:
print("App entry missing — creating new app entry")
app = {
"bundleIdentifier": bundle_id,
"releaseChannels": [],
}
apps.append(app)
print(" ====> Required parameter list <====")
print("Bundle Identifier:", BUNDLE_IDENTIFIER)
print("Version:", VERSION_IPA)
print("Version Date:", VERSION_DATE)
print("IsBeta:", IS_BETA)
print("ReleaseChannel:", RELEASE_CHANNEL)
print("Size:", SIZE)
print("Sha256:", SHA256)
print("Localized Description:", LOCALIZED_DESCRIPTION)
print("Download URL:", DOWNLOAD_URL)
return app
# ----------------------------------------------------------
# validation
# update storefront
# ----------------------------------------------------------
if (
not BUNDLE_IDENTIFIER
or not VERSION_IPA
or not VERSION_DATE
or not RELEASE_CHANNEL
or not SIZE
or not SHA256
or not LOCALIZED_DESCRIPTION
or not DOWNLOAD_URL
):
print("One or more required metadata fields missing")
sys.exit(1)
SIZE = int(SIZE)
RELEASE_CHANNEL = RELEASE_CHANNEL.lower()
def update_storefront_if_needed(app, meta):
if meta["release_channel"] == "stable":
app.update({
"version": meta["version_ipa"],
"versionDate": meta["version_date"],
"size": meta["size"],
"sha256": meta["sha256"],
"localizedDescription": meta["localized_description"],
"downloadURL": meta["download_url"],
})
# ----------------------------------------------------------
# load or create source.json
# update release channel (ORIGINAL FORMAT)
# ----------------------------------------------------------
if source_file.exists():
with open(source_file, "r", encoding="utf-8") as f:
data = json.load(f)
else:
print("source.json missing — creating minimal structure")
data = {
"version": 2,
"apps": []
def update_release_channel(app, meta):
channels = app.setdefault("releaseChannels", [])
new_version = {
"version": meta["version_ipa"],
"date": meta["version_date"],
"localizedDescription": meta["localized_description"],
"downloadURL": meta["download_url"],
"size": meta["size"],
"sha256": meta["sha256"],
}
if int(data.get("version", 1)) < 2:
print("Only v2 and above are supported")
sys.exit(1)
tracks = [
t for t in channels
if isinstance(t, dict)
and t.get("track") == meta["release_channel"]
]
if len(tracks) > 1:
raise SystemExit(f"Multiple tracks named {meta['release_channel']}")
# ----------------------------------------------------------
# ensure app entry exists
# ----------------------------------------------------------
if not tracks:
channels.insert(0, {
"track": meta["release_channel"],
"releases": [new_version],
})
else:
track = tracks[0]
releases = track.setdefault("releases", [])
apps = data.setdefault("apps", [])
app = next(
(a for a in apps if a.get("bundleIdentifier") == BUNDLE_IDENTIFIER),
None
)
if app is None:
print("App entry missing — creating new app entry")
app = {
"bundleIdentifier": BUNDLE_IDENTIFIER,
"releaseChannels": []
}
apps.append(app)
# ----------------------------------------------------------
# update logic
# ----------------------------------------------------------
if RELEASE_CHANNEL == "stable":
app.update({
"version": VERSION_IPA,
"versionDate": VERSION_DATE,
"size": SIZE,
"sha256": SHA256,
"localizedDescription": LOCALIZED_DESCRIPTION,
"downloadURL": DOWNLOAD_URL,
})
channels = app.setdefault("releaseChannels", [])
new_version = {
"version": VERSION_IPA,
"date": VERSION_DATE,
"localizedDescription": LOCALIZED_DESCRIPTION,
"downloadURL": DOWNLOAD_URL,
"size": SIZE,
"sha256": SHA256,
}
tracks = [t for t in channels if t.get("track") == RELEASE_CHANNEL]
if len(tracks) > 1:
print(f"Multiple tracks named {RELEASE_CHANNEL}")
sys.exit(1)
if not tracks:
channels.insert(0, {
"track": RELEASE_CHANNEL,
"releases": [new_version],
})
else:
tracks[0]["releases"][0] = new_version
if not releases:
releases.append(new_version)
else:
releases[0] = new_version
# ----------------------------------------------------------
# save
# ----------------------------------------------------------
print("\nUpdated Sources File:\n")
print(json.dumps(data, indent=2, ensure_ascii=False))
def save_source(source_file: Path, data):
print("\nUpdated Sources File:\n")
print(json.dumps(data, indent=2, ensure_ascii=False))
with open(source_file, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
source_file.parent.mkdir(parents=True, exist_ok=True)
print("JSON successfully updated.")
with open(source_file, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
print("JSON successfully updated.")
# ----------------------------------------------------------
# main
# ----------------------------------------------------------
def main():
if len(sys.argv) < 3:
print("Usage: python3 update_apps.py <metadata.json> <source.json>")
sys.exit(1)
metadata_file = Path(sys.argv[1])
source_file = Path(sys.argv[2])
meta = load_metadata(metadata_file)
data = load_source(source_file)
app = ensure_app(data, meta["bundle_identifier"])
update_storefront_if_needed(app, meta)
update_release_channel(app, meta)
save_source(source_file, data)
if __name__ == "__main__":
main()

View File

@@ -6,10 +6,12 @@ import datetime
from pathlib import Path
import time
import json
import inspect
import re
# REPO ROOT relative to script dir
ROOT = Path(__file__).resolve().parents[2]
SCRIPTS = ROOT / 'scripts/ci'
# ----------------------------------------------------------
# helpers
@@ -104,13 +106,6 @@ def reserve_build_number(repo, max_attempts=5):
write(data)
run("git add version.json", check=False, cwd=repo)
print("---- DEBUG reserve_build_number ----", file=sys.stderr)
print(f"attempt: {attempt}", file=sys.stderr)
print(f"data: {data!r}", file=sys.stderr)
print(version_json.read_text(), file=sys.stderr)
print("------------------------------------", file=sys.stderr)
run(f"git commit -m '{data['tag']} - build no: {data['build']}' || true", check=False, cwd=repo)
rc = subprocess.call("git push", shell=True, cwd=repo)
@@ -125,17 +120,33 @@ def reserve_build_number(repo, max_attempts=5):
raise SystemExit("Failed reserving build number")
# ----------------------------------------------------------
# MARKETING VERSION
# PROJECT INFO
# ----------------------------------------------------------
def get_product_name():
return runAndGet(
"xcodebuild -showBuildSettings "
"| grep PRODUCT_NAME "
"| tail -1 "
"| sed -e 's/.*= //g'"
)
def get_bundle_id():
return runAndGet(
"xcodebuild -showBuildSettings 2>&1 "
"| grep 'PRODUCT_BUNDLE_IDENTIFIER = ' "
"| tail -1 "
"| sed -e 's/.*= //g'"
)
def get_marketing_version():
return runAndGet("grep MARKETING_VERSION Build.xcconfig | sed -e 's/MARKETING_VERSION = //g'")
return runAndGet(f"grep MARKETING_VERSION {ROOT}/Build.xcconfig | sed -e 's/MARKETING_VERSION = //g'")
def set_marketing_version(qualified):
run(
f"sed -E "
f"sed -E -i '' "
f"'s/^MARKETING_VERSION = .*/MARKETING_VERSION = {qualified}/' "
f"-i '' {ROOT}/Build.xcconfig"
f"{ROOT}/Build.xcconfig"
)
def compute_qualified_version(marketing, build_num, channel, short):
@@ -160,16 +171,12 @@ def clean_spm_cache():
# ----------------------------------------------------------
def build():
run("make clean")
run("rm -rf ~/Library/Developer/Xcode/DerivedData/*", check=False)
run("mkdir -p build/logs")
run(
"set -o pipefail && "
"NSUnbufferedIO=YES make -B build "
"2>&1 | tee -a build/logs/build.log | xcbeautify --renderer github-actions"
)
run("make fakesign | tee -a build/logs/build.log")
run("make ipa | tee -a build/logs/build.log")
run("zip -r -9 ./SideStore.dSYMs.zip ./SideStore.xcarchive/dSYMs")
@@ -253,19 +260,28 @@ def encrypt_logs(name):
def release_notes(tag):
run(
f"python3 generate_release_notes.py "
f"python3 {SCRIPTS}/generate_release_notes.py "
f"{tag} "
f"--repo-root {ROOT} "
f"--output-dir {ROOT}"
)
def retrieve_release_notes(tag):
return runAndGet(
f"python3 {SCRIPTS}/generate_release_notes.py "
f"--retrieve {tag} "
f"--output-dir {ROOT}"
)
# ----------------------------------------------------------
# DEPLOY SOURCE.JSON
# ----------------------------------------------------------
def deploy(repo, source_json, release_tag, short_commit, marketing_version, version, channel, bundle_id, ipa_name):
repo = Path(repo).resolve()
def deploy(repo, source_json, release_tag, short_commit, marketing_version, channel, bundle_id, ipa_name, last_successful_commit=None):
repo = (ROOT / repo).resolve()
ipa_path = ROOT / ipa_name
source_json_path = repo / source_json
metadata = 'source-metadata.json'
if not repo.exists():
raise SystemExit(f"{repo} repo missing")
@@ -273,54 +289,153 @@ def deploy(repo, source_json, release_tag, short_commit, marketing_version, vers
if not ipa_path.exists():
raise SystemExit(f"{ipa_path} missing")
run(f"pushd {repo}", check=True)
try:
# source_json is RELATIVE to repo
if not Path(source_json).exists():
raise SystemExit(f"{source_json} missing inside repo")
if not source_json_path.exists():
raise SystemExit(f"{source_json} missing inside repo")
run(
f"python3 {ROOT}/generate_source_metadata.py "
f"--repo-root {ROOT} "
f"--ipa {ipa_path} "
f"--output-dir . "
f"--release-notes-dir . "
f"--release-tag {release_tag} "
f"--version {version} "
f"--marketing-version {marketing_version} "
f"--short-commit {short_commit} "
f"--release-channel {channel} "
f"--bundle-id {bundle_id}"
cmd = (
f"python3 {SCRIPTS}/generate_source_metadata.py "
f"--repo-root {ROOT} "
f"--ipa {ipa_path} "
f"--output-dir {ROOT} "
f"--output-name {metadata} "
f"--release-notes-dir {ROOT} "
f"--release-tag {release_tag} "
f"--marketing-version {marketing_version} "
f"--short-commit {short_commit} "
f"--release-channel {channel} "
f"--bundle-id {bundle_id}"
)
# pass only if provided
if last_successful_commit:
cmd += f" --last-successful-commit {last_successful_commit}"
run(cmd)
run("git config user.name 'GitHub Actions'", check=False)
run("git config user.email 'github-actions@github.com'", check=False)
# ------------------------------------------------------
run("git fetch origin main", check=False, cwd=repo)
run("git switch main || git switch -c main origin/main", cwd=repo)
run("git reset --hard origin/main", cwd=repo)
# ------------------------------------------------------
max_attempts = 5
for attempt in range(1, max_attempts + 1):
if attempt > 1:
run("git fetch --depth=1 origin HEAD", check=False, cwd=repo)
run("git reset --hard FETCH_HEAD", check=False, cwd=repo)
# regenerate after reset so we don't lose changes
run(f"python3 {SCRIPTS}/update_source_metadata.py '{ROOT}/{metadata}' '{source_json_path}'", cwd=repo)
run(f"git add --verbose {source_json}", cwd=repo)
run(f"git commit -m '{release_tag} - deployed {marketing_version}' || true", cwd=repo)
rc = subprocess.call("git push", shell=True, cwd=repo)
if rc == 0:
print("Deploy push succeeded", file=sys.stderr)
break
print(f"Push rejected (attempt {attempt}/{max_attempts}), retrying...", file=sys.stderr)
time.sleep(0.5)
else:
raise SystemExit("Deploy push failed after retries")
def last_successful_commit(workflow, branch):
import json
try:
out = runAndGet(
f'gh run list '
f'--workflow "{workflow}" '
f'--json headSha,conclusion,headBranch'
)
run("git config user.name 'GitHub Actions'", check=False)
run("git config user.email 'github-actions@github.com'", check=False)
runs = json.loads(out)
run(f"python3 {ROOT}/scripts/update_source_metadata.py '{source_json}'")
for r in runs:
if r.get("conclusion") == "success" and r.get("headBranch") == branch:
return r["headSha"]
max_attempts = 5
for attempt in range(1, max_attempts + 1):
run("git fetch --depth=1 origin HEAD", check=False)
run("git reset --hard FETCH_HEAD", check=False)
except Exception:
pass
# regenerate after reset so we don't lose changes
run(f"python3 {ROOT}/scripts/update_source_metadata.py '{source_json}'")
run(f"git add --verbose {source_json}", check=False)
run(f"git commit -m '{release_tag} - deployed {version}' || true", check=False)
return None
rc = subprocess.call("git push", shell=True)
def upload_release(release_name, release_tag, commit_sha, repo, upstream_recommendation):
token = getenv("GH_TOKEN")
if token:
os.environ["GH_TOKEN"] = token
if rc == 0:
print("Deploy push succeeded", file=sys.stderr)
break
metadata_path = ROOT / "source-metadata.json"
print(f"Push rejected (attempt {attempt}/{max_attempts}), retrying...", file=sys.stderr)
time.sleep(0.5)
else:
raise SystemExit("Deploy push failed after retries")
if not metadata_path.exists():
raise SystemExit("source-metadata.json missing")
finally:
run("popd", check=False)
meta = json.loads(metadata_path.read_text())
marketing_version = meta.get("version_ipa")
is_beta = bool(meta.get("is_beta"))
build_datetime = meta.get("version_date")
dt = datetime.datetime.fromisoformat(
build_datetime.replace("Z", "+00:00")
)
built_time = dt.strftime("%a %b %d %H:%M:%S %Y")
built_date = dt.strftime("%Y-%m-%d")
release_notes = runAndGet(
f"python3 {SCRIPTS}/generate_release_notes.py "
f"--retrieve {release_tag} "
f"--output-dir {ROOT}"
)
# normalize section header
release_notes = re.sub(
r'^\s*#{1,6}\s*what(?:\'?s|\s+is)?\s+(?:new|changed).*',
"## What's Changed",
release_notes,
flags=re.IGNORECASE | re.MULTILINE,
)
upstream_block = ""
if upstream_recommendation and upstream_recommendation.strip():
upstream_block = upstream_recommendation.strip() + "\n\n"
raw_body = f"""
This is an ⚠️ **EXPERIMENTAL** ⚠️ {release_name} build for commit [{commit_sha}](https://github.com/{repo}/commit/{commit_sha}).
{release_name} builds are **extremely experimental builds only meant to be used by developers and beta testers. They often contain bugs and experimental features. Use at your own risk!**
{upstream_block}## Build Info
Built at (UTC): `{built_time}`
Built at (UTC date): `{built_date}`
Commit SHA: `{commit_sha}`
Version: `{marketing_version}`
"""
header = inspect.cleandoc(raw_body)
body = header + "\n\n" + release_notes.lstrip() + "\n"
body_file = ROOT / "release_body.md"
body_file.write_text(body, encoding="utf-8")
prerelease_flag = "--prerelease" if is_beta else ""
run(
f'gh release edit "{release_tag}" '
f'--title "{release_name}" '
f'--notes-file "{body_file}" '
f'{prerelease_flag}'
)
run(
f'gh release upload "{release_tag}" '
f'SideStore.ipa SideStore.dSYMs.zip encrypted-build-logs.zip'
f'--clobber'
)
# ----------------------------------------------------------
# ENTRYPOINT
@@ -333,12 +448,14 @@ COMMANDS = {
"commid-id" : (short_commit, 0, ""),
# ----------------------------------------------------------
# VERSION / MARKETING
# PROJECT INFO
# ----------------------------------------------------------
"get-marketing-version" : (get_marketing_version, 0, ""),
"set-marketing-version" : (set_marketing_version, 1, "<qualified_version>"),
"compute-qualified" : (compute_qualified_version, 4, "<marketing> <build_num> <channel> <short_commit>"),
"reserve_build_number" : (reserve_build_number, 1, "<repo>"),
"get-product-name" : (get_product_name, 0, ""),
"get-bundle-id" : (get_bundle_id, 0, ""),
# ----------------------------------------------------------
# CLEAN
@@ -370,8 +487,12 @@ COMMANDS = {
# ----------------------------------------------------------
# RELEASE / DEPLOY
# ----------------------------------------------------------
"release-notes" : (release_notes, 1, "<tag>"),
"deploy" : (deploy, 9, "<repo> <source_json> <release_tag> <short_commit> <marketing_version> <version> <channel> <bundle_id> <ipa_name>"),
"last-successful-commit" : (last_successful_commit, 2, "<workflow_name> <branch>"),
"release-notes" : (release_notes, 1, "<tag>"),
"retrieve-release-notes" : (retrieve_release_notes, 1, "<tag>"),
"deploy" : (deploy, 9,
"<repo> <source_json> <release_tag> <short_commit> <marketing_version> <channel> <bundle_id> <ipa_name> [last_successful_commit]"),
"upload-release" : (upload_release, 5, "<release_name> <release_tag> <commit_sha> <repo> <upstream_recommendation>"),
}
def main():
@@ -399,7 +520,13 @@ def main():
raise SystemExit(f"Usage: workflow.py {cmd}{suffix}")
args = sys.argv[2:2 + argc]
func(*args) if argc else func()
result = func(*args) if argc else func()
# ONLY real outputs go to stdout
if result is not None:
sys.stdout.write(str(result))
sys.stdout.flush()
if __name__ == "__main__":

11
source-metadata.json Normal file
View File

@@ -0,0 +1,11 @@
{
"is_beta": false,
"bundle_identifier": "com.SideStore.SideStore",
"version_ipa": "0.6.3",
"version_date": "2026-02-23T23:38:22Z",
"release_channel": "nightly",
"size": 29313346,
"sha256": "51ec327bca0b0056ccd4c2eb1a130cb7c5bb21de2f303251eea3e0a7336699c4",
"download_url": "https://github.com/SideStore/SideStore/releases/download/nightly/SideStore.ipa",
"localized_description": "This is release for:\n - version: \"0.6.3-nightly.2026.02.24.42+abc123de\"\n - revision: \"99712f00\"\n - timestamp: \"Mon Feb 23 23:38:22 2026\"\n\nRelease Notes:\n#### What's Changed\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- altsign updated to latest\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- re added openSSL from new path\n- updated altsign to use xcframework for openSSL which was causing huge download of 1.2 GB each time\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: improve more ci worflow\n- CI: full rewrite - moved logic into ci.py and kept workflow scripts mostly dummy\n\n#### Full Changelog: [38715283...99712f00](https://github.com/SideStore/SideStore/compare/38715283073ea37949a462b889ce3cad403ea499...99712f0020a4f2ae57d8d781514fa735f893c23a)"
}