mirror of
https://github.com/dortania/OpenCore-Legacy-Patcher.git
synced 2026-04-17 13:22:54 +10:00
Compare commits
153 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
29d3e98b81 | ||
|
|
462fff9d66 | ||
|
|
158f6f2da9 | ||
|
|
a57c29d35e | ||
|
|
bc18bdce03 | ||
|
|
ef9b5bb64a | ||
|
|
c225b77400 | ||
|
|
b69cf09556 | ||
|
|
85203aef5b | ||
|
|
ec80afdd17 | ||
|
|
e288e27b7b | ||
|
|
961c9c7a68 | ||
|
|
55abbab145 | ||
|
|
2986aea152 | ||
|
|
0135d6cccf | ||
|
|
fd2f58da84 | ||
|
|
efb1d81231 | ||
|
|
7fe0432a47 | ||
|
|
88fa7a4b7a | ||
|
|
e0e74ee494 | ||
|
|
e579199246 | ||
|
|
1108d248e5 | ||
|
|
6013eaffe9 | ||
|
|
ea96c546ca | ||
|
|
531fc8ab18 | ||
|
|
35a6f5183c | ||
|
|
afd1b5d2a7 | ||
|
|
d5ffa9a8cf | ||
|
|
0c68618ddf | ||
|
|
ea80d41ff6 | ||
|
|
483819caef | ||
|
|
0ca8ccd3dc | ||
|
|
a1f1da25b5 | ||
|
|
e105d6077e | ||
|
|
eedd4d771a | ||
|
|
3f2aadc016 | ||
|
|
e980054a5d | ||
|
|
a89a7740ca | ||
|
|
2767f35fc5 | ||
|
|
48cf258366 | ||
|
|
8e7706fb12 | ||
|
|
2eb6542931 | ||
|
|
e66e851933 | ||
|
|
9bfcf78ff9 | ||
|
|
8d3ab82ddd | ||
|
|
7b7f68453a | ||
|
|
adec8ebd05 | ||
|
|
ca24aa6ce5 | ||
|
|
e8f44e4eaa | ||
|
|
6a3023301a | ||
|
|
44369e2faa | ||
|
|
0d7186236e | ||
|
|
8806d29a35 | ||
|
|
12b7cf7fcd | ||
|
|
0f7f079dd8 | ||
|
|
040edfdd25 | ||
|
|
2e7afae29b | ||
|
|
b81899092b | ||
|
|
418a966081 | ||
|
|
520c9c315c | ||
|
|
cffc463bde | ||
|
|
2f7965440c | ||
|
|
56efd9743d | ||
|
|
b5b4d84bc9 | ||
|
|
594f6dcbe5 | ||
|
|
c5eb52ac5e | ||
|
|
e83e260db7 | ||
|
|
66a5f5a9ad | ||
|
|
4154b01d40 | ||
|
|
925003e3f1 | ||
|
|
d40d6607b3 | ||
|
|
3aadfe6002 | ||
|
|
d305515c28 | ||
|
|
48471ce4d3 | ||
|
|
f9c7273106 | ||
|
|
e076260a1a | ||
|
|
0671828c9b | ||
|
|
61e5ff1c83 | ||
|
|
3b5e4f10f6 | ||
|
|
dd06932fe5 | ||
|
|
7ee631859d | ||
|
|
7be168bf14 | ||
|
|
6b86e64b5b | ||
|
|
4ae494db86 | ||
|
|
c5f72c10ff | ||
|
|
971a2b0d02 | ||
|
|
8b18c59d8a | ||
|
|
0d38bc0edf | ||
|
|
4c4cacf114 | ||
|
|
dd1afd77e4 | ||
|
|
2a91b2a11c | ||
|
|
b3ed101ad9 | ||
|
|
7b33e77947 | ||
|
|
ea35eaca2e | ||
|
|
b5c613242f | ||
|
|
2eb98b6327 | ||
|
|
6629e9dcef | ||
|
|
6c441e835c | ||
|
|
fe8a2d253a | ||
|
|
68af20d2fa | ||
|
|
625da17f1e | ||
|
|
118d635264 | ||
|
|
bd471df48f | ||
|
|
d70daaf5a4 | ||
|
|
7fc9f3af7c | ||
|
|
6f4c110318 | ||
|
|
dfdb5b4c68 | ||
|
|
5ac3343205 | ||
|
|
42dddfdccf | ||
|
|
ace93b8a3e | ||
|
|
6081ec0d73 | ||
|
|
fc85789f86 | ||
|
|
52fbe23a16 | ||
|
|
99968dbb57 | ||
|
|
b141452005 | ||
|
|
b97a3bc4d8 | ||
|
|
f4ed623c76 | ||
|
|
ad9ae96c7f | ||
|
|
bece5d4b3b | ||
|
|
f915199b92 | ||
|
|
cba9d1e224 | ||
|
|
4782ccdab1 | ||
|
|
1ab2409176 | ||
|
|
3d8bae1142 | ||
|
|
baf25319d7 | ||
|
|
43ef3e18ec | ||
|
|
08710bc47b | ||
|
|
6c294902c2 | ||
|
|
976f14eeb3 | ||
|
|
eb1e29f95b | ||
|
|
4d89c220bf | ||
|
|
571f297906 | ||
|
|
7bf4fd0150 | ||
|
|
d4f42c8b32 | ||
|
|
664972fd2e | ||
|
|
f33cc2d5bb | ||
|
|
da45a964cb | ||
|
|
6c555db237 | ||
|
|
c093f400a6 | ||
|
|
92d57cb6d5 | ||
|
|
3a6f87c9c8 | ||
|
|
08a67e5d37 | ||
|
|
6ed55ff462 | ||
|
|
ef912277b6 | ||
|
|
cef860dbf1 | ||
|
|
e5b89e9c6b | ||
|
|
7ebdb83af7 | ||
|
|
da2fc5c4c3 | ||
|
|
94cfeabdfd | ||
|
|
e7727adcc6 | ||
|
|
8becb554fc | ||
|
|
97024361cd | ||
|
|
71ca6731fa |
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -95,7 +95,6 @@ body:
|
|||||||
description: What variant of our software are you running?
|
description: What variant of our software are you running?
|
||||||
options:
|
options:
|
||||||
- GUI (Graphical User Interface)
|
- GUI (Graphical User Interface)
|
||||||
- TUI (Text User Interface)
|
|
||||||
- CLI (Command Line Interface)
|
- CLI (Command Line Interface)
|
||||||
- Other/Non-Applicable
|
- Other/Non-Applicable
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
12
.github/workflows/build-app-wxpython.yml
vendored
12
.github/workflows/build-app-wxpython.yml
vendored
@@ -10,16 +10,21 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
name: Build wxPython
|
name: Build wxPython
|
||||||
runs-on: x86_64_mojave
|
runs-on: x86_64_mojave
|
||||||
|
if: github.repository_owner == 'dortania'
|
||||||
|
|
||||||
env:
|
env:
|
||||||
branch: ${{ github.ref }}
|
branch: ${{ github.ref }}
|
||||||
commiturl: ${{ github.event.head_commit.url }}${{ github.event.release.html_url }}
|
commiturl: ${{ github.event.head_commit.url }}${{ github.event.release.html_url }}
|
||||||
commitdate: ${{ github.event.head_commit.timestamp }}${{ github.event.release.published_at }}
|
commitdate: ${{ github.event.head_commit.timestamp }}${{ github.event.release.published_at }}
|
||||||
|
MAC_NOTARIZATION_USERNAME: ${{ secrets.MAC_NOTARIZATION_USERNAME }}
|
||||||
|
MAC_NOTARIZATION_PASSWORD: ${{ secrets.MAC_NOTARIZATION_PASSWORD }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- run: /Library/Frameworks/Python.framework/Versions/3.9/bin/python3 Build-Binary.command --reset_binaries --branch "${{ env.branch }}" --commit "${{ env.commiturl }}" --commit_date "${{ env.commitdate }}"
|
- run: /Library/Frameworks/Python.framework/Versions/3.10/bin/python3 Build-Binary.command --reset_binaries --branch "${{ env.branch }}" --commit "${{ env.commiturl }}" --commit_date "${{ env.commitdate }}"
|
||||||
- run: 'codesign -s "Developer ID Application: Mykola Grymalyuk (S74BDJXQMD)" -v --force --deep --timestamp --entitlements ./payloads/entitlements.plist -o runtime "dist/OpenCore-Patcher.app"'
|
- run: 'codesign -s "Developer ID Application: Mykola Grymalyuk (S74BDJXQMD)" -v --force --deep --timestamp --entitlements ./payloads/entitlements.plist -o runtime "dist/OpenCore-Patcher.app"'
|
||||||
- run: cd dist; ditto -c -k --sequesterRsrc --keepParent OpenCore-Patcher.app ../OpenCore-Patcher-wxPython.app.zip
|
- run: cd dist; ditto -c -k --sequesterRsrc --keepParent OpenCore-Patcher.app ../OpenCore-Patcher-wxPython.app.zip
|
||||||
- run: ./../sign-wxpython.sh
|
- run: xcrun altool --notarize-app --primary-bundle-id "com.dortania.opencore-legacy-patcher" --username "${{ env.MAC_NOTARIZATION_USERNAME }}" --password "${{ env.MAC_NOTARIZATION_PASSWORD }}" --file OpenCore-Patcher-wxPython.app.zip
|
||||||
- run: packagesbuild ./payloads/InstallPackage/AutoPkg-Assets-Setup.pkgproj
|
- run: packagesbuild ./payloads/InstallPackage/AutoPkg-Assets-Setup.pkgproj
|
||||||
- run: mv ./OpenCore-Patcher-wxPython.app.zip ./OpenCore-Patcher-GUI.app.zip
|
- run: mv ./OpenCore-Patcher-wxPython.app.zip ./OpenCore-Patcher-GUI.app.zip
|
||||||
- name: Upload App to Artifacts
|
- name: Upload App to Artifacts
|
||||||
@@ -27,11 +32,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: OpenCore-Patcher.app (GUI)
|
name: OpenCore-Patcher.app (GUI)
|
||||||
path: OpenCore-Patcher-GUI.app.zip
|
path: OpenCore-Patcher-GUI.app.zip
|
||||||
|
|
||||||
- name: Upload Package to Artifacts
|
- name: Upload Package to Artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: AutoPkg-Assets.pkg
|
name: AutoPkg-Assets.pkg
|
||||||
path: ./dist/AutoPkg-Assets.pkg
|
path: ./dist/AutoPkg-Assets.pkg
|
||||||
|
|
||||||
- name: Upload Binary to Release
|
- name: Upload Binary to Release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@e74ff71f7d8a4c4745b560a485cc5fdb9b5b999d
|
uses: svenstaro/upload-release-action@e74ff71f7d8a4c4745b560a485cc5fdb9b5b999d
|
||||||
@@ -40,6 +47,7 @@ jobs:
|
|||||||
file: OpenCore-Patcher-GUI.app.zip
|
file: OpenCore-Patcher-GUI.app.zip
|
||||||
tag: ${{ github.ref }}
|
tag: ${{ github.ref }}
|
||||||
file_glob: true
|
file_glob: true
|
||||||
|
|
||||||
- name: Upload Package to Release
|
- name: Upload Package to Release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@e74ff71f7d8a4c4745b560a485cc5fdb9b5b999d
|
uses: svenstaro/upload-release-action@e74ff71f7d8a4c4745b560a485cc5fdb9b5b999d
|
||||||
|
|||||||
30
.github/workflows/build-app.yml
vendored
30
.github/workflows/build-app.yml
vendored
@@ -1,30 +0,0 @@
|
|||||||
name: CI - Build TUI
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
workflow_dispatch:
|
|
||||||
release:
|
|
||||||
types: [published]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: Build TUI
|
|
||||||
runs-on: x86_64_mojave
|
|
||||||
env:
|
|
||||||
branch: ${{ github.ref }}
|
|
||||||
commiturl: ${{ github.event.head_commit.url }}${{ github.event.release.html_url }}
|
|
||||||
commitdate: ${{ github.event.head_commit.timestamp }}${{ github.event.release.published_at }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- run: /Library/Frameworks/Python.framework/Versions/3.9/bin/python3 Build-Binary.command --build_tui --reset_binaries --branch "${{ env.branch }}" --commit "${{ env.commiturl }}" --commit_date "${{ env.commitdate }}"
|
|
||||||
- run: 'codesign -s "Developer ID Application: Mykola Grymalyuk (S74BDJXQMD)" -v --force --deep --timestamp --entitlements ./payloads/entitlements.plist -o runtime "dist/OpenCore-Patcher.app"'
|
|
||||||
- run: cd dist; zip -r ../OpenCore-Patcher-TUI.app.zip OpenCore-Patcher.app
|
|
||||||
- run: ./../sign-tui.sh
|
|
||||||
- name: Upload App to Artifacts
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: OpenCore-Patcher-TUI.app
|
|
||||||
path: OpenCore-Patcher-TUI.app.zip
|
|
||||||
|
|
||||||
- name: Validate OpenCore
|
|
||||||
run: ./dist/OpenCore-Patcher.app/Contents/MacOS/OpenCore-Patcher --validate
|
|
||||||
1
.github/workflows/build-site.yml
vendored
1
.github/workflows/build-site.yml
vendored
@@ -9,6 +9,7 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
name: Build Site and Deploy
|
name: Build Site and Deploy
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
if: github.repository_owner == 'dortania'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
|
|||||||
20
.github/workflows/validate.yml
vendored
Normal file
20
.github/workflows/validate.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
name: CI - Validation
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
workflow_dispatch:
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Validate
|
||||||
|
runs-on: x86_64_mojave
|
||||||
|
if: github.repository_owner == 'dortania'
|
||||||
|
env:
|
||||||
|
branch: ${{ github.ref }}
|
||||||
|
commiturl: ${{ github.event.head_commit.url }}${{ github.event.release.html_url }}
|
||||||
|
commitdate: ${{ github.event.head_commit.timestamp }}${{ github.event.release.published_at }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- run: /Library/Frameworks/Python.framework/Versions/3.10/bin/python3 OpenCore-Patcher-GUI.command --validate
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -32,3 +32,4 @@ __pycache__/
|
|||||||
/payloads.dmg
|
/payloads.dmg
|
||||||
/payloads/OpenCore-Legacy-Patcher-*.plist
|
/payloads/OpenCore-Legacy-Patcher-*.plist
|
||||||
/payloads/KDK.dmg
|
/payloads/KDK.dmg
|
||||||
|
*.log
|
||||||
|
|||||||
@@ -1,14 +1,7 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# This script's main purpose is to handle the following:
|
# Generate stand alone application for OpenCore-Patcher
|
||||||
# - Download PatcherSupportPkg resources
|
# Copyright (C) 2022-2023 - Mykola Grymalyuk
|
||||||
# - Convert payloads directory into DMG (GUI only)
|
|
||||||
# - Build Binary via Pyinstaller
|
|
||||||
# - Add Launcher.sh (TUI only)
|
|
||||||
# - Patch 'LC_VERSION_MIN_MACOSX' to OS X 10.10
|
|
||||||
# - Add commit data to Info.plist
|
|
||||||
|
|
||||||
# Copyright (C) 2022 - Mykola Grymalyuk
|
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import time
|
import time
|
||||||
@@ -21,26 +14,49 @@ import sys
|
|||||||
|
|
||||||
from resources import constants
|
from resources import constants
|
||||||
|
|
||||||
class create_binary:
|
|
||||||
|
class CreateBinary:
|
||||||
|
"""
|
||||||
|
Library for creating OpenCore-Patcher application
|
||||||
|
|
||||||
|
This script's main purpose is to handle the following:
|
||||||
|
- Download external dependancies (ex. PatcherSupportPkg)
|
||||||
|
- Convert payloads directory into DMG
|
||||||
|
- Build Binary via Pyinstaller
|
||||||
|
- Patch 'LC_VERSION_MIN_MACOSX' to OS X 10.10
|
||||||
|
- Add commit data to Info.plist
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
start = time.time()
|
start = time.time()
|
||||||
print("- Starting build script")
|
print("- Starting build script")
|
||||||
self.set_cwd()
|
|
||||||
self.args = self.parse_arguments()
|
|
||||||
|
|
||||||
self.preflight_processes()
|
self.args = self._parse_arguments()
|
||||||
self.build_binary()
|
|
||||||
self.postflight_processes()
|
self._set_cwd()
|
||||||
|
|
||||||
|
self._preflight_processes()
|
||||||
|
self._build_binary()
|
||||||
|
self._postflight_processes()
|
||||||
print(f"- Build script completed in {str(round(time.time() - start, 2))} seconds")
|
print(f"- Build script completed in {str(round(time.time() - start, 2))} seconds")
|
||||||
|
|
||||||
def set_cwd(self):
|
|
||||||
|
def _set_cwd(self):
|
||||||
|
"""
|
||||||
|
Initialize current working directory to parent of this script
|
||||||
|
"""
|
||||||
|
|
||||||
os.chdir(Path(__file__).resolve().parent)
|
os.chdir(Path(__file__).resolve().parent)
|
||||||
print(f"- Current Working Directory: \n\t{os.getcwd()}")
|
print(f"- Current Working Directory: \n\t{os.getcwd()}")
|
||||||
|
|
||||||
def parse_arguments(self):
|
|
||||||
|
def _parse_arguments(self):
|
||||||
|
"""
|
||||||
|
Parse arguments passed to script
|
||||||
|
"""
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Builds OpenCore-Patcher binary')
|
parser = argparse.ArgumentParser(description='Builds OpenCore-Patcher binary')
|
||||||
parser.add_argument('--build_tui', action='store_true', help='Builds TUI binary, if omitted GUI binary is built')
|
|
||||||
parser.add_argument('--branch', type=str, help='Git branch name')
|
parser.add_argument('--branch', type=str, help='Git branch name')
|
||||||
parser.add_argument('--commit', type=str, help='Git commit URL')
|
parser.add_argument('--commit', type=str, help='Git commit URL')
|
||||||
parser.add_argument('--commit_date', type=str, help='Git commit date')
|
parser.add_argument('--commit_date', type=str, help='Git commit date')
|
||||||
@@ -48,7 +64,12 @@ class create_binary:
|
|||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
return args
|
return args
|
||||||
|
|
||||||
def setup_pathing(self):
|
|
||||||
|
def _setup_pathing(self):
|
||||||
|
"""
|
||||||
|
Initialize pathing for pyinstaller
|
||||||
|
"""
|
||||||
|
|
||||||
python_path = sys.executable
|
python_path = sys.executable
|
||||||
python_binary = python_path.split("/")[-1]
|
python_binary = python_path.split("/")[-1]
|
||||||
python_bin_dir = python_path.strip(python_binary)
|
python_bin_dir = python_path.strip(python_binary)
|
||||||
@@ -70,25 +91,36 @@ class create_binary:
|
|||||||
|
|
||||||
self.pyinstaller_path = pyinstaller_path
|
self.pyinstaller_path = pyinstaller_path
|
||||||
|
|
||||||
def preflight_processes(self):
|
|
||||||
|
def _preflight_processes(self):
|
||||||
|
"""
|
||||||
|
Start preflight processes
|
||||||
|
"""
|
||||||
|
|
||||||
print("- Starting preflight processes")
|
print("- Starting preflight processes")
|
||||||
self.setup_pathing()
|
self._setup_pathing()
|
||||||
self.delete_extra_binaries()
|
self._delete_extra_binaries()
|
||||||
self.download_resources()
|
self._download_resources()
|
||||||
if not self.args.build_tui:
|
self._generate_payloads_dmg()
|
||||||
# payloads.dmg is only needed for GUI builds
|
|
||||||
self.generate_payloads_dmg()
|
|
||||||
|
def _postflight_processes(self):
|
||||||
|
"""
|
||||||
|
Start postflight processes
|
||||||
|
"""
|
||||||
|
|
||||||
def postflight_processes(self):
|
|
||||||
print("- Starting postflight processes")
|
print("- Starting postflight processes")
|
||||||
if self.args.build_tui:
|
self._patch_load_command()
|
||||||
self.move_launcher()
|
self._add_commit_data()
|
||||||
self.patch_load_command()
|
self._post_flight_cleanup()
|
||||||
self.add_commit_data()
|
self._mini_validate()
|
||||||
self.post_flight_cleanup()
|
|
||||||
self.mini_validate()
|
|
||||||
|
def _build_binary(self):
|
||||||
|
"""
|
||||||
|
Build binary via pyinstaller
|
||||||
|
"""
|
||||||
|
|
||||||
def build_binary(self):
|
|
||||||
if Path(f"./dist/OpenCore-Patcher.app").exists():
|
if Path(f"./dist/OpenCore-Patcher.app").exists():
|
||||||
print("- Found OpenCore-Patcher.app, removing...")
|
print("- Found OpenCore-Patcher.app, removing...")
|
||||||
rm_output = subprocess.run(
|
rm_output = subprocess.run(
|
||||||
@@ -101,12 +133,8 @@ class create_binary:
|
|||||||
raise Exception("Remove failed")
|
raise Exception("Remove failed")
|
||||||
|
|
||||||
|
|
||||||
if self.args.build_tui:
|
print("- Building GUI binary...")
|
||||||
print("- Building TUI binary...")
|
build_args = [self.pyinstaller_path, "./OpenCore-Patcher-GUI.spec", "--noconfirm"]
|
||||||
build_args = [self.pyinstaller_path, "./OpenCore-Patcher.spec", "--noconfirm"]
|
|
||||||
else:
|
|
||||||
print("- Building GUI binary...")
|
|
||||||
build_args = [self.pyinstaller_path, "./OpenCore-Patcher-GUI.spec", "--noconfirm"]
|
|
||||||
|
|
||||||
build_result = subprocess.run(build_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
build_result = subprocess.run(build_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
if build_result.returncode != 0:
|
if build_result.returncode != 0:
|
||||||
@@ -114,27 +142,52 @@ class create_binary:
|
|||||||
print(build_result.stderr.decode('utf-8'))
|
print(build_result.stderr.decode('utf-8'))
|
||||||
raise Exception("Build failed")
|
raise Exception("Build failed")
|
||||||
|
|
||||||
def delete_extra_binaries(self):
|
|
||||||
delete_files = [
|
def _delete_extra_binaries(self):
|
||||||
"AutoPkg-Assets.pkg",
|
"""
|
||||||
"AutoPkg-Assets.pkg.zip",
|
Delete extra binaries from payloads directory
|
||||||
"InstallAssistant.pkg",
|
"""
|
||||||
"InstallAssistant.pkg.integrityDataV1",
|
|
||||||
"KDK.dmg",
|
whitelist_folders = [
|
||||||
|
"ACPI",
|
||||||
|
"Config",
|
||||||
|
"Drivers",
|
||||||
|
"Icon",
|
||||||
|
"InstallPackage",
|
||||||
|
"Kexts",
|
||||||
|
"OpenCore",
|
||||||
|
"Tools",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
whitelist_files = [
|
||||||
|
"com.dortania.opencore-legacy-patcher.auto-patch.plist",
|
||||||
|
"entitlements.plist",
|
||||||
|
"launcher.sh",
|
||||||
|
"OC-Patcher-TUI.icns",
|
||||||
|
"OC-Patcher.icns",
|
||||||
|
"Universal-Binaries.zip",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
print("- Deleting extra binaries...")
|
print("- Deleting extra binaries...")
|
||||||
for file in Path("payloads").glob(pattern="*"):
|
for file in Path("payloads").glob(pattern="*"):
|
||||||
if file.name in delete_files or file.name.startswith("OpenCore-Legacy-Patcher"):
|
if file.is_dir():
|
||||||
|
if file.name in whitelist_folders:
|
||||||
|
continue
|
||||||
print(f" - Deleting {file.name}")
|
print(f" - Deleting {file.name}")
|
||||||
file.unlink()
|
|
||||||
elif (Path(file) / Path("Contents/Resources/createinstallmedia")).exists():
|
|
||||||
print(f" - Deleting {file}")
|
|
||||||
subprocess.run(["rm", "-rf", file])
|
|
||||||
elif Path(file).is_dir() and file.name == "Universal-Binaries":
|
|
||||||
print(f" - Deleting {file}")
|
|
||||||
subprocess.run(["rm", "-rf", file])
|
subprocess.run(["rm", "-rf", file])
|
||||||
|
else:
|
||||||
|
if file.name in whitelist_files:
|
||||||
|
continue
|
||||||
|
print(f" - Deleting {file.name}")
|
||||||
|
subprocess.run(["rm", "-f", file])
|
||||||
|
|
||||||
|
|
||||||
|
def _download_resources(self):
|
||||||
|
"""
|
||||||
|
Download required dependencies
|
||||||
|
"""
|
||||||
|
|
||||||
def download_resources(self):
|
|
||||||
patcher_support_pkg_version = constants.Constants().patcher_support_pkg_version
|
patcher_support_pkg_version = constants.Constants().patcher_support_pkg_version
|
||||||
required_resources = [
|
required_resources = [
|
||||||
"Universal-Binaries.zip"
|
"Universal-Binaries.zip"
|
||||||
@@ -181,21 +234,29 @@ class create_binary:
|
|||||||
print(mv_output.stderr.decode('utf-8'))
|
print(mv_output.stderr.decode('utf-8'))
|
||||||
raise Exception("Move failed")
|
raise Exception("Move failed")
|
||||||
|
|
||||||
def generate_payloads_dmg(self):
|
|
||||||
|
def _generate_payloads_dmg(self):
|
||||||
|
"""
|
||||||
|
Generate disk image containing all payloads
|
||||||
|
Disk image will be password protected due to issues with
|
||||||
|
Apple's notarization system and inclusion of kernel extensions
|
||||||
|
"""
|
||||||
|
|
||||||
if Path("./payloads.dmg").exists():
|
if Path("./payloads.dmg").exists():
|
||||||
if self.args.reset_binaries:
|
if not self.args.reset_binaries:
|
||||||
print(" - Removing old payloads.dmg")
|
|
||||||
rm_output = subprocess.run(
|
|
||||||
["rm", "-rf", "./payloads.dmg"],
|
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
|
||||||
)
|
|
||||||
if rm_output.returncode != 0:
|
|
||||||
print("- Remove failed")
|
|
||||||
print(rm_output.stderr.decode('utf-8'))
|
|
||||||
raise Exception("Remove failed")
|
|
||||||
else:
|
|
||||||
print(" - payloads.dmg already exists, skipping creation")
|
print(" - payloads.dmg already exists, skipping creation")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
print(" - Removing old payloads.dmg")
|
||||||
|
rm_output = subprocess.run(
|
||||||
|
["rm", "-rf", "./payloads.dmg"],
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||||
|
)
|
||||||
|
if rm_output.returncode != 0:
|
||||||
|
print("- Remove failed")
|
||||||
|
print(rm_output.stderr.decode('utf-8'))
|
||||||
|
raise Exception("Remove failed")
|
||||||
|
|
||||||
print(" - Generating DMG...")
|
print(" - Generating DMG...")
|
||||||
dmg_output = subprocess.run([
|
dmg_output = subprocess.run([
|
||||||
'hdiutil', 'create', './payloads.dmg',
|
'hdiutil', 'create', './payloads.dmg',
|
||||||
@@ -213,7 +274,12 @@ class create_binary:
|
|||||||
|
|
||||||
print(" - DMG generation complete")
|
print(" - DMG generation complete")
|
||||||
|
|
||||||
def add_commit_data(self):
|
|
||||||
|
def _add_commit_data(self):
|
||||||
|
"""
|
||||||
|
Add commit data to Info.plist
|
||||||
|
"""
|
||||||
|
|
||||||
if not self.args.branch and not self.args.commit and not self.args.commit_date:
|
if not self.args.branch and not self.args.commit and not self.args.commit_date:
|
||||||
print(" - No commit data provided, adding source info")
|
print(" - No commit data provided, adding source info")
|
||||||
branch = "Built from source"
|
branch = "Built from source"
|
||||||
@@ -233,20 +299,25 @@ class create_binary:
|
|||||||
}
|
}
|
||||||
plistlib.dump(plist, Path(plist_path).open("wb"), sort_keys=True)
|
plistlib.dump(plist, Path(plist_path).open("wb"), sort_keys=True)
|
||||||
|
|
||||||
def patch_load_command(self):
|
|
||||||
# Patches LC_VERSION_MIN_MACOSX in Load Command to report 10.10
|
def _patch_load_command(self):
|
||||||
#
|
"""
|
||||||
# By default Pyinstaller will create binaries supporting 10.13+
|
Patch LC_VERSION_MIN_MACOSX in Load Command to report 10.10
|
||||||
# However this limitation is entirely arbitrary for our libraries
|
|
||||||
# and instead we're able to support 10.10 without issues.
|
By default Pyinstaller will create binaries supporting 10.13+
|
||||||
#
|
However this limitation is entirely arbitrary for our libraries
|
||||||
# To verify set version:
|
and instead we're able to support 10.10 without issues.
|
||||||
# otool -l ./dist/OpenCore-Patcher.app/Contents/MacOS/OpenCore-Patcher
|
|
||||||
#
|
To verify set version:
|
||||||
# cmd LC_VERSION_MIN_MACOSX
|
otool -l ./dist/OpenCore-Patcher.app/Contents/MacOS/OpenCore-Patcher
|
||||||
# cmdsize 16
|
|
||||||
# version 10.13
|
cmd LC_VERSION_MIN_MACOSX
|
||||||
# sdk 10.9
|
cmdsize 16
|
||||||
|
version 10.13
|
||||||
|
sdk 10.9
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
print(" - Patching LC_VERSION_MIN_MACOSX")
|
print(" - Patching LC_VERSION_MIN_MACOSX")
|
||||||
path = './dist/OpenCore-Patcher.app/Contents/MacOS/OpenCore-Patcher'
|
path = './dist/OpenCore-Patcher.app/Contents/MacOS/OpenCore-Patcher'
|
||||||
find = b'\x00\x0D\x0A\x00' # 10.13 (0xA0D)
|
find = b'\x00\x0D\x0A\x00' # 10.13 (0xA0D)
|
||||||
@@ -257,19 +328,12 @@ class create_binary:
|
|||||||
with open(path, 'wb') as f:
|
with open(path, 'wb') as f:
|
||||||
f.write(data)
|
f.write(data)
|
||||||
|
|
||||||
def move_launcher(self):
|
|
||||||
print(" - Adding TUI launcher")
|
|
||||||
mv_output = subprocess.run(
|
|
||||||
["cp", "./payloads/launcher.sh", "./dist/OpenCore-Patcher.app/Contents/MacOS/Launcher"],
|
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
|
||||||
)
|
|
||||||
if mv_output.returncode != 0:
|
|
||||||
print(" - Move failed")
|
|
||||||
print(mv_output.stderr.decode('utf-8'))
|
|
||||||
raise Exception("Move failed")
|
|
||||||
|
|
||||||
def post_flight_cleanup(self):
|
def _post_flight_cleanup(self):
|
||||||
# Remove ./dist/OpenCore-Patcher
|
"""
|
||||||
|
Post flight cleanup
|
||||||
|
"""
|
||||||
|
|
||||||
path = "./dist/OpenCore-Patcher"
|
path = "./dist/OpenCore-Patcher"
|
||||||
print(f" - Removing {path}")
|
print(f" - Removing {path}")
|
||||||
rm_output = subprocess.run(
|
rm_output = subprocess.run(
|
||||||
@@ -281,9 +345,12 @@ class create_binary:
|
|||||||
print(rm_output.stderr.decode('utf-8'))
|
print(rm_output.stderr.decode('utf-8'))
|
||||||
raise Exception(f"Remove failed: {path}")
|
raise Exception(f"Remove failed: {path}")
|
||||||
|
|
||||||
def mini_validate(self):
|
|
||||||
# Ensure binary can start
|
def _mini_validate(self):
|
||||||
# Only build a single config, TUI CI will do in-depth validation
|
"""
|
||||||
|
Validate generated binary
|
||||||
|
"""
|
||||||
|
|
||||||
print(" - Validating binary")
|
print(" - Validating binary")
|
||||||
validate_output = subprocess.run(
|
validate_output = subprocess.run(
|
||||||
["./dist/OpenCore-Patcher.app/Contents/MacOS/OpenCore-Patcher", "--build", "--model", "MacPro3,1"],
|
["./dist/OpenCore-Patcher.app/Contents/MacOS/OpenCore-Patcher", "--build", "--model", "MacPro3,1"],
|
||||||
@@ -294,5 +361,6 @@ class create_binary:
|
|||||||
print(validate_output.stderr.decode('utf-8'))
|
print(validate_output.stderr.decode('utf-8'))
|
||||||
raise Exception("Validation failed")
|
raise Exception("Validation failed")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
create_binary()
|
CreateBinary()
|
||||||
70
CHANGELOG.md
70
CHANGELOG.md
@@ -1,5 +1,75 @@
|
|||||||
# OpenCore Legacy Patcher changelog
|
# OpenCore Legacy Patcher changelog
|
||||||
|
|
||||||
|
## 0.6.2
|
||||||
|
- Work around Black Box rendering issues on certain Display Color Profiles
|
||||||
|
- Limited to Ventura currently due to limitations with other color profiles
|
||||||
|
- Applicable for HD3000-based machines (ex. MacBookAir4,x, MacBookPro8,x, Macmini5,x)
|
||||||
|
- Ensure `Moraea_BlurBeta` is set on non-Metal systems
|
||||||
|
- Implement proper Root Unpatching verification in GUI
|
||||||
|
- Removes arbitrary patch requirements used against unpatching (ex. network connection)
|
||||||
|
- Implement Kernel Debug Kit installation during OS installs
|
||||||
|
- Avoids network requirement for first time installs
|
||||||
|
- Paired along side AutoPkgInstaller
|
||||||
|
- Implement Kernel Debug Kit backup system
|
||||||
|
- Allows for easy restoration of KDKs if OS updates corrupted installed KDKs
|
||||||
|
- Update Wireless binaries
|
||||||
|
- Fixed WiFi preferences crash with legacy wifi patches
|
||||||
|
- Update non-Metal Binaries
|
||||||
|
- Improved menubar blur saturation
|
||||||
|
- Fixed System Settings hover effects, including Bluetooth connect button
|
||||||
|
- Add Books hacks (reimplement cover image generation, disable broken page curl animation)
|
||||||
|
- Fixed unresponsive buttons
|
||||||
|
- Implement Hardware Encoding support for AMD GCN 1-3, Polaris and Vega GPUs
|
||||||
|
- Applicable for pre-Haswell Macs on macOS Ventura
|
||||||
|
- Resolves DRM playback issues on Netflix, Disney+, etc.
|
||||||
|
- Note: GCN 1-3 DRM is functional, however hardware video encoding is still experimental
|
||||||
|
- AppleTV+ may be unstable due to this
|
||||||
|
- Implement support for AMD Navi and Lexa MXM GPUs in 2009-2011 iMacs
|
||||||
|
- Primarily applicable for MXM 3.0 variants of AMD WX3200 (0x6981) and AMD RX5500XT (0x7340)
|
||||||
|
- Credit to [Ausdauersportler](https://github.com/Ausdauersportler) for implementation
|
||||||
|
- Implement Continuity Camera Unlocking for pre-Kaby Lake CPUs
|
||||||
|
- Applicable for all legacy Macs in macOS Ventura
|
||||||
|
- Resolve boot support for 3802-based GPUs with macOS 13.3
|
||||||
|
- Applicable for following GPUs:
|
||||||
|
- Intel Ivy Bridge and Haswell iGPUs
|
||||||
|
- Nvidia Kepler dGPUs
|
||||||
|
- Note: patchset now requires AMFI to be disabled, patchset still in active development to remove this requirement
|
||||||
|
- Backend Changes:
|
||||||
|
- Refactored kdk_handler.py
|
||||||
|
- Prioritizes KdkSupportPkg repository for downloads
|
||||||
|
- Skips calls to Apple's now defunct Developer Portal API
|
||||||
|
- Support local loose matching when no network connection is available
|
||||||
|
- Implement pkg receipt verification to validate integrity of KDKs
|
||||||
|
- Implemented logging framework usage for more reliable logging
|
||||||
|
- Logs are stored under `~/Library/Logs/OpenCore-Patcher.log`
|
||||||
|
- Subsequent runs are appended to the log, allowing for easy debugging
|
||||||
|
- Implemented new network_handler.py module
|
||||||
|
- Allows for more reliable network calls and downloads
|
||||||
|
- Better supports network timeouts and disconnects
|
||||||
|
- Dramatically less noise in console during downloads
|
||||||
|
- Implemented new macOS Installer handler
|
||||||
|
- Removed unused modules:
|
||||||
|
- sys_patch_downloader.py
|
||||||
|
- run.py
|
||||||
|
- TUI modules
|
||||||
|
- Build Server Changes:
|
||||||
|
- Upgrade Python backend to 3.10.9
|
||||||
|
- Upgrade Python modules:
|
||||||
|
- requests - 2.28.2
|
||||||
|
- pyobjc - 9.0.1
|
||||||
|
- wxpython - 4.2.0
|
||||||
|
- pyinstaller - 5.7.0
|
||||||
|
- packaging - 23.0
|
||||||
|
- Increment Binaries:
|
||||||
|
- PatcherSupportPkg 0.8.7 - release
|
||||||
|
- AutoPkgInstaller 1.0.2 - release
|
||||||
|
- FeatureUnlock 1.1.4 - rolling (0e8d87f)
|
||||||
|
- Lilu 1.6.4 - release
|
||||||
|
- WhateverGreen 1.6.4 - release
|
||||||
|
- NVMeFix 1.1.0 - release
|
||||||
|
- Innie 1.3.1 - release
|
||||||
|
- OpenCorePkg 0.9.0 - release
|
||||||
|
|
||||||
## 0.6.1
|
## 0.6.1
|
||||||
- Avoid usage of KDKlessWorkaround on hardware not requiring it
|
- Avoid usage of KDKlessWorkaround on hardware not requiring it
|
||||||
- Resolves AMD Graphics Regression from 0.5.3
|
- Resolves AMD Graphics Regression from 0.5.3
|
||||||
|
|||||||
@@ -3,4 +3,4 @@
|
|||||||
from resources import main
|
from resources import main
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main.OpenCoreLegacyPatcher(True)
|
main.OpenCoreLegacyPatcher()
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# Copyright (C) 2020-2022, Dhinak G, Mykola Grymalyuk
|
|
||||||
from resources import main
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main.OpenCoreLegacyPatcher()
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
# -*- mode: python ; coding: utf-8 -*-
|
|
||||||
import sys, os
|
|
||||||
sys.path.append(os.path.abspath(os.getcwd()))
|
|
||||||
from resources import constants
|
|
||||||
block_cipher = None
|
|
||||||
|
|
||||||
|
|
||||||
a = Analysis(['OpenCore-Patcher.command'],
|
|
||||||
pathex=['resources', 'data'],
|
|
||||||
binaries=[],
|
|
||||||
datas=[('payloads', 'payloads')],
|
|
||||||
hiddenimports=[],
|
|
||||||
hookspath=[],
|
|
||||||
runtime_hooks=[],
|
|
||||||
excludes=['wxPython', 'wxpython'],
|
|
||||||
win_no_prefer_redirects=False,
|
|
||||||
win_private_assemblies=False,
|
|
||||||
cipher=block_cipher,
|
|
||||||
noarchive=False)
|
|
||||||
pyz = PYZ(a.pure, a.zipped_data,
|
|
||||||
cipher=block_cipher)
|
|
||||||
exe = EXE(pyz,
|
|
||||||
a.scripts,
|
|
||||||
a.binaries,
|
|
||||||
a.zipfiles,
|
|
||||||
a.datas,
|
|
||||||
[],
|
|
||||||
name='OpenCore-Patcher',
|
|
||||||
debug=False,
|
|
||||||
bootloader_ignore_signals=False,
|
|
||||||
strip=False,
|
|
||||||
upx=True,
|
|
||||||
upx_exclude=[],
|
|
||||||
runtime_tmpdir=None,
|
|
||||||
console=True )
|
|
||||||
app = BUNDLE(exe,
|
|
||||||
name='OpenCore-Patcher.app',
|
|
||||||
icon="payloads/OC-Patcher-TUI.icns",
|
|
||||||
bundle_identifier="com.dortania.opencore-legacy-patcher-tui",
|
|
||||||
info_plist={
|
|
||||||
"CFBundleShortVersionString": constants.Constants().patcher_version,
|
|
||||||
"CFBundleExecutable": "MacOS/Launcher",
|
|
||||||
"NSHumanReadableCopyright": constants.Constants().copyright_date,
|
|
||||||
})
|
|
||||||
@@ -15,7 +15,7 @@ Our project's main goal is to breath new life to Macs no longer supported by App
|
|||||||
|
|
||||||
Noteworthy features of OpenCore Legacy Patcher:
|
Noteworthy features of OpenCore Legacy Patcher:
|
||||||
|
|
||||||
* Support for macOS Big Sur and Monterey
|
* Support for macOS Big Sur, Monterey and Ventura
|
||||||
* Native Over the Air(OTA) System Updates
|
* Native Over the Air(OTA) System Updates
|
||||||
* Supports Penryn and newer Macs
|
* Supports Penryn and newer Macs
|
||||||
* Full support for WPA Wifi and Personal Hotspot on BCM943224 and newer chipsets
|
* Full support for WPA Wifi and Personal Hotspot on BCM943224 and newer chipsets
|
||||||
@@ -32,7 +32,7 @@ Note: Only clean-installs and upgrades are supported, macOS Big Sur installs alr
|
|||||||
|
|
||||||
* You can however reinstall macOS with this patcher and retain your original data
|
* You can however reinstall macOS with this patcher and retain your original data
|
||||||
|
|
||||||
Note 2: Currently OpenCore Legacy Patcher officially supports patching to run macOS Big Sur and Monterey installs. For older OSes, OpenCore may function however support is currently not provided from Dortania.
|
Note 2: Currently OpenCore Legacy Patcher officially supports patching to run macOS Big Sur through Ventura installs. For older OSes, OpenCore may function however support is currently not provided from Dortania.
|
||||||
|
|
||||||
* For macOS Mojave and Catalina support, we recommend the use of [dosdude1's patchers](http://dosdude1.com)
|
* For macOS Mojave and Catalina support, we recommend the use of [dosdude1's patchers](http://dosdude1.com)
|
||||||
|
|
||||||
|
|||||||
24
SOURCE.md
24
SOURCE.md
@@ -1,11 +1,10 @@
|
|||||||
# Build and run from source
|
# Build and run from source
|
||||||
|
|
||||||
OpenCore Legacy Patcher at its core is a python-based TUI/CLI based application. This means that to run the project from source, you simply need to invoke the OpenCore-Patcher.command file via Python.
|
OpenCore Legacy Patcher at its core is a python-based GUI/CLI based application. This means that to run the project from source, you simply need to invoke the OpenCore-Patcher-GUI.command file via Python.
|
||||||
|
|
||||||
For developers wishing to validate mainline changes, you may use these nightly links:
|
For developers wishing to validate mainline changes, you may use these nightly links:
|
||||||
|
|
||||||
* [GUI (Graphical Based App)](https://nightly.link/dortania/OpenCore-Legacy-Patcher/workflows/build-app-wxpython/main/OpenCore-Patcher.app%20%28GUI%29.zip)
|
* [GUI (Graphical Based App)](https://nightly.link/dortania/OpenCore-Legacy-Patcher/workflows/build-app-wxpython/main/OpenCore-Patcher.app%20%28GUI%29.zip)
|
||||||
* [TUI (Text Based App)](https://nightly.link/dortania/OpenCore-Legacy-Patcher/workflows/build-app/main/OpenCore-Patcher-TUI.app.zip)
|
|
||||||
|
|
||||||
**Warning**: These binaries should not be used without first consulting the [CHANGELOG](./CHANGELOG.md). Do not distribute these links in forums, instead direct to this file.
|
**Warning**: These binaries should not be used without first consulting the [CHANGELOG](./CHANGELOG.md). Do not distribute these links in forums, instead direct to this file.
|
||||||
|
|
||||||
@@ -32,41 +31,31 @@ pip3 install -r requirements.txt
|
|||||||
|
|
||||||
If you have installation error, see following troubleshooting options:
|
If you have installation error, see following troubleshooting options:
|
||||||
|
|
||||||
* Use Python 3.9
|
* Use Python 3.10
|
||||||
* Currently our build server uses py3.9 for generating binaries used in releases
|
* Currently our build server uses py3.10 for generating binaries used in releases
|
||||||
* Use .whl snapshots for installing additional dependencies
|
* Use .whl snapshots for installing additional dependencies
|
||||||
* [wxPython 4.1.1 wheel for py3.9](https://files.pythonhosted.org/packages/2c/a8/7027e8ca3ba20dc2ed2acd556e31941cb44097ab87d6f81d646a79de4eab/wxPython-4.1.1-cp39-cp39-macosx_10_10_x86_64.whl)
|
|
||||||
* [PyObjc 8.5 wheel for py3](https://files.pythonhosted.org/packages/69/3d/786f379dd669a078cf0c4a686e242c9b643071c23367bfbd3d9a7eb589ec/pyobjc-8.5-py3-none-any.whl)
|
|
||||||
* [Requests 2.27.1 for py2/py3](https://files.pythonhosted.org/packages/2d/61/08076519c80041bc0ffa1a8af0cbd3bf3e2b62af10435d269a9d0f40564d/requests-2.27.1-py2.py3-none-any.whl)
|
|
||||||
* [pyinstaller 5.3 for py3](https://files.pythonhosted.org/packages/65/70/625e86e5a45cb975a9c32a10a721394d10771275c69207308b80bc6a758e/pyinstaller-5.3-py3-none-macosx_10_13_universal2.whl)
|
|
||||||
|
|
||||||
|
|
||||||
## Running OpenCore Legacy Patcher
|
## Running OpenCore Legacy Patcher
|
||||||
|
|
||||||
To run the project from source, simply invoke via python3:
|
To run the project from source, simply invoke via python3:
|
||||||
|
|
||||||
```sh
|
|
||||||
# Launch TUI
|
|
||||||
python3 OpenCore-Patcher.command
|
|
||||||
```
|
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Launch GUI
|
# Launch GUI
|
||||||
python3 OpenCore-Patcher-GUI.command
|
python3 OpenCore-Patcher-GUI.command
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that the OpenCore-Patcher.command file can be run as both a TUI and a CLI utility for other programs to call. If no core arguments are passed, the TUI is initialized. Otherwise the CLI will start:
|
Note that the OpenCore-Patcher-GUI.command file can be run as both a GUI and a CLI utility for other programs to call. If no core arguments are passed, the GUI is initialized. Otherwise the CLI will start:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Launch CLI
|
# Launch CLI
|
||||||
python3 OpenCore-Patcher.command --build --model iMac12,2 --verbose
|
python3 OpenCore-Patcher-GUI.command --build --model iMac12,2 --verbose
|
||||||
```
|
```
|
||||||
|
|
||||||
See `-h`/`--help` for more information on supported CLI arguments.
|
See `-h`/`--help` for more information on supported CLI arguments.
|
||||||
|
|
||||||
## Generating prebuilt binaries
|
## Generating prebuilt binaries
|
||||||
|
|
||||||
The main goal of generating prebuilt binaries is to strip the requirement of a local python installation for users. For developers, there's very little benefit besides enabling dark mode support in the GUI. For development, simply use the OpenCore-Patcher.command file with a python3 installation.
|
The main goal of generating prebuilt binaries is to strip the requirement of a local python installation for users. For developers, there's very little benefit besides enabling dark mode support in the GUI. For development, simply use the OpenCore-Patcher-GUI.command file with a python3 installation.
|
||||||
|
|
||||||
* Note that due to PyInstaller's linking mechanism, binaries generated on Catalina and newer are not compatible with High Sierra and older
|
* Note that due to PyInstaller's linking mechanism, binaries generated on Catalina and newer are not compatible with High Sierra and older
|
||||||
* To ensure the largest compatibility, generate binaries on macOS Mojave. These binaries will be compatible with macOS 10.9 to macOS 12.
|
* To ensure the largest compatibility, generate binaries on macOS Mojave. These binaries will be compatible with macOS 10.9 to macOS 12.
|
||||||
@@ -79,7 +68,6 @@ pip3 install pyinstaller
|
|||||||
cd ~/Developer/OpenCore-Legacy-Patcher/
|
cd ~/Developer/OpenCore-Legacy-Patcher/
|
||||||
# Create the pyinstaller based Application
|
# Create the pyinstaller based Application
|
||||||
# Optional Arguments
|
# Optional Arguments
|
||||||
# '--build_tui': Create TUI vairant
|
|
||||||
# '--reset_binaries': Redownload and generate support files
|
# '--reset_binaries': Redownload and generate support files
|
||||||
python3 Build-Binary.command
|
python3 Build-Binary.command
|
||||||
# Open build folder
|
# Open build folder
|
||||||
|
|||||||
@@ -6,19 +6,20 @@
|
|||||||
# - 0x3 used in 11.0.1 dyld source:
|
# - 0x3 used in 11.0.1 dyld source:
|
||||||
# - https://github.com/apple-oss-distributions/dyld/blob/5c9192436bb195e7a8fe61f22a229ee3d30d8222/testing/test-cases/kernel-hello-world.dtest/main.c#L2
|
# - https://github.com/apple-oss-distributions/dyld/blob/5c9192436bb195e7a8fe61f22a229ee3d30d8222/testing/test-cases/kernel-hello-world.dtest/main.c#L2
|
||||||
|
|
||||||
class apple_mobile_file_integrity:
|
import enum
|
||||||
|
|
||||||
|
class AppleMobileFileIntegrity(enum.IntEnum):
|
||||||
# Names set are solely for readability
|
# Names set are solely for readability
|
||||||
# Internal names are unknown
|
# Internal names are unknown
|
||||||
amfi_values = {
|
AMFI_ALLOW_TASK_FOR_PID: int = 0x1 # Allow Task for PID (alt. amfi_unrestrict_task_for_pid=0x1)
|
||||||
"AMFI_ALLOW_TASK_FOR_PID": False, # 0x1 - Allow Task for PID (alt. amfi_unrestrict_task_for_pid=0x1)
|
AMFI_ALLOW_INVALID_SIGNATURE: int = 0x2 # Reduce sig enforcement (alt. amfi_allow_any_signature=0x1)
|
||||||
"AMFI_ALLOW_INVALID_SIGNATURE": False, # 0x2 - Reduce sig enforcement (alt. amfi_allow_any_signature=0x1)
|
AMFI_LV_ENFORCE_THIRD_PARTY: int = 0x4 # Don't mark external binaries as platform binaries
|
||||||
"AMFI_LV_ENFORCE_THIRD_PARTY": False, # 0x4 - Don't mark external binaries as platform binaries
|
AMFI_UNKNOWN_1: int = 0x8
|
||||||
"AMFI_UNKNOWN_1": False, # 0x8
|
AMFI_UNKNOWN_2: int = 0x10
|
||||||
"AMFI_UNKNOWN_2": False, # 0x10
|
AMFI_UNKNOWN_3: int = 0x20
|
||||||
"AMFI_UNKNOWN_3": False, # 0x20
|
AMFI_UNKNOWN_4: int = 0x40
|
||||||
"AMFI_UNKNOWN_4": False, # 0x40
|
AMFI_ALLOW_EVERYTHING: int = 0x80 # Disable sig enforcement and Library Validation (alt. amfi_get_out_of_my_way=0x1)
|
||||||
"AMFI_ALLOW_EVERYTHING": False, # 0x80 - Disable sig enforcement and Library Validation (alt. amfi_get_out_of_my_way=0x1)
|
|
||||||
},
|
|
||||||
|
|
||||||
# Internally within AMFI.kext, Apple references 0x2 and 0x80 as both 'Disable signature enforcement'
|
# Internally within AMFI.kext, Apple references 0x2 and 0x80 as both 'Disable signature enforcement'
|
||||||
# However 0x80 is a higher privilege than 0x2, and breaks TCC support in OS (ex. Camera, Microphone, etc prompts)
|
# However 0x80 is a higher privilege than 0x2, and breaks TCC support in OS (ex. Camera, Microphone, etc prompts)
|
||||||
|
|||||||
@@ -1,14 +0,0 @@
|
|||||||
# Data for SkyLightShim Plugin systems
|
|
||||||
class shim_list:
|
|
||||||
shim_pathing = {
|
|
||||||
"CoreWLAN.dylib": "/System/Library/CoreServices/WiFiAgent.app/Contents/MacOS/WiFiAgent",
|
|
||||||
"BacklightFixup.dylib": "/System/Library/CoreServices/loginwindow.app/Contents/MacOS/loginwindow",
|
|
||||||
}
|
|
||||||
|
|
||||||
shim_legacy_accel = [
|
|
||||||
"CoreWLAN.dylib",
|
|
||||||
]
|
|
||||||
|
|
||||||
shim_legacy_accel_keyboard = [
|
|
||||||
"BacklightFixup.dylib",
|
|
||||||
]
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
# Mirrors of Apple's InstallAssistant.pkg
|
|
||||||
# Currently only listing important Installers no longer on Apple's servers
|
|
||||||
|
|
||||||
Install_macOS_Big_Sur_11_2_3 = {
|
|
||||||
"Version": "11.2.3",
|
|
||||||
"Build": "20D91",
|
|
||||||
"Link": "https://archive.org/download/install-assistant-20D91/InstallAssistant.pkg",
|
|
||||||
"Size": 12211077798,
|
|
||||||
"Source": "Archive.org",
|
|
||||||
"integrity": None,
|
|
||||||
}
|
|
||||||
@@ -249,6 +249,9 @@ IntelNvidiaDRM = [
|
|||||||
# Mac Pro and Xserve
|
# Mac Pro and Xserve
|
||||||
MacPro = ["MacPro3,1", "MacPro4,1", "MacPro5,1", "Xserve2,1", "Xserve3,1", "Dortania1,1"]
|
MacPro = ["MacPro3,1", "MacPro4,1", "MacPro5,1", "Xserve2,1", "Xserve3,1", "Dortania1,1"]
|
||||||
|
|
||||||
|
# MXM iMac
|
||||||
|
MXMiMac = ["iMac11,1", "iMac11,2", "iMac11,3", "iMac10,1", "iMac12,1", "iMac12,2", "Dortania1,1"]
|
||||||
|
|
||||||
NoAGPMSupport = ["MacBook4,1", "MacBookPro4,1", "iMac7,1", "iMac8,1", "MacPro3,1", "Xserve2,1", "Dortania1,1"]
|
NoAGPMSupport = ["MacBook4,1", "MacBookPro4,1", "iMac7,1", "iMac8,1", "MacPro3,1", "Xserve2,1", "Dortania1,1"]
|
||||||
|
|
||||||
AGDPSupport = [
|
AGDPSupport = [
|
||||||
@@ -350,4 +353,4 @@ Missing_USB_Map_Ventura = [
|
|||||||
"iMac16,1",
|
"iMac16,1",
|
||||||
"iMac16,2",
|
"iMac16,2",
|
||||||
"iMac17,1",
|
"iMac17,1",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -796,6 +796,11 @@ class amd_ids:
|
|||||||
0x67CF, # Unknown
|
0x67CF, # Unknown
|
||||||
]
|
]
|
||||||
|
|
||||||
|
polaris_spoof_ids = [
|
||||||
|
# Polaris 12 (Lexa)
|
||||||
|
0x6981, # Lexa XT [Radeon PRO WX 3200]
|
||||||
|
]
|
||||||
|
|
||||||
vega_ids = [
|
vega_ids = [
|
||||||
# GCN v5
|
# GCN v5
|
||||||
# AMDRadeonX5000
|
# AMDRadeonX5000
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
11
data/video_bios_data.py
Normal file
11
data/video_bios_data.py
Normal file
File diff suppressed because one or more lines are too long
@@ -15,7 +15,7 @@
|
|||||||
* [Black Boxes on HD3000 iGPUs](#black-boxes-on-hd3000-igpus)
|
* [Black Boxes on HD3000 iGPUs](#black-boxes-on-hd3000-igpus)
|
||||||
* [Cannot Pair Bluetooth Devices](#cannot-pair-bluetooth-devices)
|
* [Cannot Pair Bluetooth Devices](#cannot-pair-bluetooth-devices)
|
||||||
|
|
||||||
The below page is for users experiencing issues with their overall usage of macOS Big Sur / macOS Monterey and the Legacy Graphics Acceleration patches. Note that the following GPUs currently do not have acceleration support in Big Sur / Monterey:
|
The below page is for users experiencing issues with their overall usage of macOS Big Sur / Monterey / Ventura and the Legacy Graphics Acceleration patches. Note that the following GPUs currently do not have acceleration support in Big Sur / Monterey / Ventura:
|
||||||
|
|
||||||
* Intel 3rd and 4th Gen - GMA series
|
* Intel 3rd and 4th Gen - GMA series
|
||||||
|
|
||||||
@@ -23,7 +23,7 @@ For those unfamiliar with what is considered a non-Metal GPU, see the chart belo
|
|||||||
|
|
||||||
::: details macOS GPU Chart
|
::: details macOS GPU Chart
|
||||||
|
|
||||||
Metal is Apple's in-house graphics API that acts as a replacement for OpenGL/OpenCL, introduced in 2015. With the release of macOS Mojave, every system without a Metal-capable GPU was dropped.
|
Metal is Apple's in-house graphics API that acts as a replacement for OpenGL/OpenCL, introduced in 2015. With the release of macOS Mojave, every system without a Metal-capable GPU was dropped.
|
||||||
|
|
||||||
| Graphics Vendor | Architecture | Series | Supports Metal |
|
| Graphics Vendor | Architecture | Series | Supports Metal |
|
||||||
| :--- | :--- | :--- | :--- |
|
| :--- | :--- | :--- | :--- |
|
||||||
@@ -31,7 +31,7 @@ Metal is Apple's in-house graphics API that acts as a replacement for OpenGL/Ope
|
|||||||
| ^^ | TeraScale 2 | HD5000 - HD6000 | ^^ |
|
| ^^ | TeraScale 2 | HD5000 - HD6000 | ^^ |
|
||||||
| AMD | GCN (and newer) | HD7000+ | <span style="color:green">Yes</span> |
|
| AMD | GCN (and newer) | HD7000+ | <span style="color:green">Yes</span> |
|
||||||
| NVIDIA | Tesla | 8000GT - GT300 | <span style="color:red">No</span> |
|
| NVIDIA | Tesla | 8000GT - GT300 | <span style="color:red">No</span> |
|
||||||
| ^^ | Fermi | GT400 - GT500 | ^^ |
|
| ^^ | Fermi | GT400 - GT500 | ^^ |
|
||||||
| ^^ | Kepler | GT600 - GT700 | <span style="color:green">Yes</span> |
|
| ^^ | Kepler | GT600 - GT700 | <span style="color:green">Yes</span> |
|
||||||
| Intel | GMA | GMA900 - GMA3000 | <span style="color:red">No</span> |
|
| Intel | GMA | GMA900 - GMA3000 | <span style="color:red">No</span> |
|
||||||
| ^^ | Iron Lake | HD series | ^^ |
|
| ^^ | Iron Lake | HD series | ^^ |
|
||||||
@@ -102,7 +102,7 @@ Due to the Metal Backend, the enhanced color output of these apps seems to heavi
|
|||||||
|
|
||||||
## Cannot press "Done" when editing a Sidebar Widget
|
## Cannot press "Done" when editing a Sidebar Widget
|
||||||
|
|
||||||
Workaround: Press some combination of Tab, or Tab and then Shift-Tab, or just Shift-Tab until the "Done" button is highlighted. Then press spacebar to activate the button, the same as in any other dialog with a highlighted button halo.
|
Workaround: Press some combination of Tab, or Tab and then Shift-Tab, or just Shift-Tab until the "Done" button is highlighted. Then press spacebar to activate the button, the same as in any other dialog with a highlighted button halo.
|
||||||
|
|
||||||
## Wake from sleep heavily distorted on AMD/ATI in macOS 11.3 and newer
|
## Wake from sleep heavily distorted on AMD/ATI in macOS 11.3 and newer
|
||||||
|
|
||||||
|
|||||||
@@ -18,11 +18,11 @@ The below table will list all supported and unsupported functions of the patcher
|
|||||||
|
|
||||||
Regarding OS support, see below:
|
Regarding OS support, see below:
|
||||||
|
|
||||||
* Machines listing `YES - Monterey and older` means they cannot run macOS Ventura at this time. Machines with only `YES` can run all of the supported macOS versions offered by OpenCore Legacy Patcher.
|
* Machines listing `YES - Ventura and older` means they cannot run macOS Ventura at this time. Machines with only `YES` can run all of the supported macOS versions offered by OpenCore Legacy Patcher.
|
||||||
|
|
||||||
| Support Entry | Supported OSes | Description | Comment |
|
| Support Entry | Supported OSes | Description | Comment |
|
||||||
| :--- | :--- | :--- | :--- |
|
| :--- | :--- | :--- | :--- |
|
||||||
| HostOS | macOS 10.9 - macOS 13 | Refers to OSes where running OpenCore-Patcher.app are supported | Supports 10.7+ if [Python 3.9 or higher](https://www.python.org/downloads/) is manually installed, simply run the `OpenCore-Patcher.command` located in the repo |
|
| HostOS | macOS 10.9 - macOS 13 | Refers to OSes where running OpenCore-Patcher.app are supported | Supports 10.7+ if [Python 3.9 or higher](https://www.python.org/downloads/) is manually installed, simply run the `OpenCore-Patcher-GUI.command` located in the repo |
|
||||||
| TargetOS | macOS 11 - macOS 13 | Refers to OSes that can be patched to run with OpenCore | May support 10.4 and newer (in a potentially broken state). No support provided. |
|
| TargetOS | macOS 11 - macOS 13 | Refers to OSes that can be patched to run with OpenCore | May support 10.4 and newer (in a potentially broken state). No support provided. |
|
||||||
|
|
||||||
### MacBook
|
### MacBook
|
||||||
@@ -32,12 +32,12 @@ Regarding OS support, see below:
|
|||||||
| MacBook1,1 | Mid-2006 | <span style="color:red"> NO </span> | 32-Bit CPU limitation |
|
| MacBook1,1 | Mid-2006 | <span style="color:red"> NO </span> | 32-Bit CPU limitation |
|
||||||
| MacBook2,1 | Late 2006 | ^^ | 32-Bit Firmware limitation |
|
| MacBook2,1 | Late 2006 | ^^ | 32-Bit Firmware limitation |
|
||||||
| MacBook3,1 | Late 2007 | ^^ | ^^ |
|
| MacBook3,1 | Late 2007 | ^^ | ^^ |
|
||||||
| MacBook4,1 | Early 2008 | <span style="color:#30BCD5"> YES - Monterey and older</span> | - No GPU Acceleration in Mavericks and newer<br/>- No Keyboard and Trackpad<br/>- No USB |
|
| MacBook4,1 | Early 2008 | ^^ | - No GPU Acceleration in Mavericks and newer<br/>- No Keyboard and Trackpad<br/>- No USB |
|
||||||
| MacBook5,1 | Late 2008 | ^^ | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))<br/> - Supports macOS Monterey and older ([Ventura support in development](./VENTURA-DROP.md))|
|
| MacBook5,1 | Late 2008 | <span style="color:#30BCD5"> YES </span> | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))<br/> - UHCI/OHCI support in Public Beta, see current issues ([#1021](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1021))|
|
||||||
| MacBook5,2 | Early 2009 | ^^ | ^^ |
|
| MacBook5,2 | Early 2009 | ^^ | ^^ |
|
||||||
| MacBook6,1 | Late 2009 | ^^ | ^^ |
|
| MacBook6,1 | Late 2009 | ^^ | ^^ |
|
||||||
| MacBook7,1 | Mid-2010 | ^^ | ^^ |
|
| MacBook7,1 | Mid-2010 | ^^ | ^^ |
|
||||||
| MacBook8,1 | Mid-2015 | <span style="color:#30BCD5"> YES </span> | <span style="color:green"> Everything is supported</span> |
|
| MacBook8,1 | Mid-2015 | ^^ | <span style="color:green"> Everything is supported</span> |
|
||||||
| MacBook9,1 | Early 2016 | ^^ | ^^ |
|
| MacBook9,1 | Early 2016 | ^^ | ^^ |
|
||||||
|
|
||||||
### MacBook Air
|
### MacBook Air
|
||||||
@@ -45,12 +45,12 @@ Regarding OS support, see below:
|
|||||||
| SMBIOS | Year | Supported | Comment |
|
| SMBIOS | Year | Supported | Comment |
|
||||||
| :--- | :--- | :--- | :--- |
|
| :--- | :--- | :--- | :--- |
|
||||||
| MacBookAir1,1 | Early 2008 | <span style="color:red"> NO </span> | Requires SSE4.1 CPU |
|
| MacBookAir1,1 | Early 2008 | <span style="color:red"> NO </span> | Requires SSE4.1 CPU |
|
||||||
| MacBookAir2,1 | Late 2008 | <span style="color:#30BCD5"> YES - Monterey and older</span> | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))<br/> - Supports macOS Monterey and older ([Ventura support in development](./VENTURA-DROP.md))|
|
| MacBookAir2,1 | Late 2008 | <span style="color:#30BCD5"> YES </span> | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))<br/> - UHCI/OHCI support in Public Beta, see current issues ([#1021](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1021))|
|
||||||
| MacBookAir3,1 | Late 2010 | ^^ | ^^ |
|
| MacBookAir3,1 | Late 2010 | ^^ | ^^ |
|
||||||
| MacBookAir3,2 | ^^ | ^^ | ^^ |
|
| MacBookAir3,2 | ^^ | ^^ | ^^ |
|
||||||
| MacBookAir4,1 | Mid-2011 | ^^ | ^^ |
|
| MacBookAir4,1 | Mid-2011 | ^^ | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))|
|
||||||
| MacBookAir4,2 | ^^ | ^^ | ^^ |
|
| MacBookAir4,2 | ^^ | ^^ | ^^ |
|
||||||
| MacBookAir5,1 | Mid-2012 | <span style="color:#30BCD5"> YES </span> | <span style="color:green"> Everything is supported</span> |
|
| MacBookAir5,1 | Mid-2012 | ^^ | <span style="color:green"> Everything is supported</span> |
|
||||||
| MacBookAir5,2 | ^^ | ^^ | ^^ |
|
| MacBookAir5,2 | ^^ | ^^ | ^^ |
|
||||||
| MacBookAir6,1 | Mid-2013, Early 2014 | ^^ | ^^ |
|
| MacBookAir6,1 | Mid-2013, Early 2014 | ^^ | ^^ |
|
||||||
| MacBookAir6,2 | ^^ | ^^ | ^^ |
|
| MacBookAir6,2 | ^^ | ^^ | ^^ |
|
||||||
@@ -66,19 +66,19 @@ Regarding OS support, see below:
|
|||||||
| MacBookPro2,1 | Late 2006 | ^^ | 32-Bit Firmware limitation |
|
| MacBookPro2,1 | Late 2006 | ^^ | 32-Bit Firmware limitation |
|
||||||
| MacBookPro2,2 | Late 2006 | ^^ | ^^ |
|
| MacBookPro2,2 | Late 2006 | ^^ | ^^ |
|
||||||
| MacBookPro3,1 | Mid-2007 | ^^ | Requires SSE4.1 CPU |
|
| MacBookPro3,1 | Mid-2007 | ^^ | Requires SSE4.1 CPU |
|
||||||
| MacBookPro4,1 | Early 2008 | <span style="color:#30BCD5"> YES - Monterey and older</span> | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))<br/> - Supports macOS Monterey and older ([Ventura support in development](./VENTURA-DROP.md))|
|
| MacBookPro4,1 | Early 2008 | <span style="color:#30BCD5"> YES </span> | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))<br/> - UHCI/OHCI support in Public Beta, see current issues ([#1021](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1021))|
|
||||||
| MacBookPro5,1 | Late 2008 | ^^ | ^^ |
|
| MacBookPro5,1 | Late 2008 | ^^ | ^^ |
|
||||||
| MacBookPro5,2 | Early 2009 | ^^ | ^^ |
|
| MacBookPro5,2 | Early 2009 | ^^ | ^^ |
|
||||||
| MacBookPro5,3 | Mid-2009 | ^^ | ^^ |
|
| MacBookPro5,3 | Mid-2009 | ^^ | ^^ |
|
||||||
| MacBookPro5,4 | ^^ | ^^ | ^^ |
|
| MacBookPro5,4 | ^^ | ^^ | ^^ |
|
||||||
| MacBookPro5,5 | ^^ | ^^ | ^^ |
|
| MacBookPro5,5 | ^^ | ^^ | ^^ |
|
||||||
| MacBookPro6,1 | Mid-2010 | ^^ | ^^ |
|
| MacBookPro6,1 | Mid-2010 | ^^ | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))|
|
||||||
| MacBookPro6,2 | ^^ | ^^ | ^^ |
|
| MacBookPro6,2 | ^^ | ^^ | ^^ |
|
||||||
| MacBookPro7,1 | ^^ | ^^ | ^^ |
|
| MacBookPro7,1 | ^^ | ^^ | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))<br/> - UHCI/OHCI support in Public Beta, see current issues ([#1021](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1021))|
|
||||||
| MacBookPro8,1 | Early 2011 | ^^ | ^^ |
|
| MacBookPro8,1 | Early 2011 | ^^ | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))|
|
||||||
| MacBookPro8,2 | ^^ | ^^ | ^^ |
|
| MacBookPro8,2 | ^^ | ^^ | ^^ |
|
||||||
| MacBookPro8,3 | ^^ | ^^ | ^^ |
|
| MacBookPro8,3 | ^^ | ^^ | ^^ |
|
||||||
| MacBookPro9,1 | Mid-2012 | <span style="color:#30BCD5"> YES </span> | <span style="color:green"> Everything is supported</span> |
|
| MacBookPro9,1 | Mid-2012 | ^^ | <span style="color:green"> Everything is supported</span> |
|
||||||
| MacBookPro9,2 | ^^ | ^^ | ^^ |
|
| MacBookPro9,2 | ^^ | ^^ | ^^ |
|
||||||
| MacBookPro10,1 | Mid-2012, Early 2013 | ^^ | ^^ |
|
| MacBookPro10,1 | Mid-2012, Early 2013 | ^^ | ^^ |
|
||||||
| MacBookPro10,2 | Late 2012, Early 2013 | ^^ | ^^ |
|
| MacBookPro10,2 | Late 2012, Early 2013 | ^^ | ^^ |
|
||||||
@@ -98,12 +98,12 @@ Regarding OS support, see below:
|
|||||||
| :--- | :--- | :--- | :--- |
|
| :--- | :--- | :--- | :--- |
|
||||||
| Macmini1,1 | Early 2006 | <span style="color:red"> NO </span> | 32-Bit CPU limitation |
|
| Macmini1,1 | Early 2006 | <span style="color:red"> NO </span> | 32-Bit CPU limitation |
|
||||||
| Macmini2,1 | Mid-2007 | ^^ | 32-Bit Firmware limitation |
|
| Macmini2,1 | Mid-2007 | ^^ | 32-Bit Firmware limitation |
|
||||||
| Macmini3,1 | Early 2009 | <span style="color:#30BCD5"> YES - Monterey and older</span> | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108)) <br/> - Supports macOS Monterey and older ([Ventura support in development](./VENTURA-DROP.md)) |
|
| Macmini3,1 | Early 2009 | <span style="color:#30BCD5"> YES </span> | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))<br/> - UHCI/OHCI support in Public Beta, see current issues ([#1021](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1021))|
|
||||||
| Macmini4,1 | Mid-2010 | ^^ | ^^ |
|
| Macmini4,1 | Mid-2010 | ^^ | ^^ |
|
||||||
| Macmini5,1 | Mid-2011 | ^^ | ^^ |
|
| Macmini5,1 | Mid-2011 | ^^ | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))|
|
||||||
| Macmini5,2 | ^^ | ^^ | ^^ |
|
| Macmini5,2 | ^^ | ^^ | ^^ |
|
||||||
| Macmini5,3 | ^^ | ^^ | ^^ |
|
| Macmini5,3 | ^^ | ^^ | ^^ |
|
||||||
| Macmini6,1 | Late 2012 | <span style="color:#30BCD5"> YES </span> | <span style="color:green"> Everything is supported</span> |
|
| Macmini6,1 | Late 2012 | ^^ | <span style="color:green"> Everything is supported</span> |
|
||||||
| Macmini6,2 | ^^ | ^^ | ^^ |
|
| Macmini6,2 | ^^ | ^^ | ^^ |
|
||||||
| Macmini7,1 | Mid-2014 | ^^ | ^^ |
|
| Macmini7,1 | Mid-2014 | ^^ | ^^ |
|
||||||
|
|
||||||
@@ -116,16 +116,16 @@ Regarding OS support, see below:
|
|||||||
| iMac5,1 | Late 2006 | ^^ | 32-Bit Firmware limitation |
|
| iMac5,1 | Late 2006 | ^^ | 32-Bit Firmware limitation |
|
||||||
| iMac5,2 | ^^ | ^^ | ^^ |
|
| iMac5,2 | ^^ | ^^ | ^^ |
|
||||||
| iMac6,1 | ^^ | ^^ | ^^ |
|
| iMac6,1 | ^^ | ^^ | ^^ |
|
||||||
| iMac7,1 | Mid-2007 | <span style="color:#30BCD5"> YES - Monterey and older</span> | - Requires an [SSE4.1 CPU Upgrade](https://lowendmac.com/2018/penryn-t9300-9500-cpu-upgrades-for-the-2007-imac/)<br/>- GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))<br/>- Stock Bluetooth 2.0 card non-functional<br/> - Supports macOS Monterey and older ([Ventura support in development](./VENTURA-DROP.md)) |
|
| iMac7,1 | Mid-2007 | <span style="color:#30BCD5"> YES </span> | - Requires an [SSE4.1 CPU Upgrade](https://lowendmac.com/2018/penryn-t9300-9500-cpu-upgrades-for-the-2007-imac/)<br/>- GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))<br/>- Stock Bluetooth 2.0 card non-functional<br/> - UHCI/OHCI support in Public Beta, see current issues ([#1021](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1021))|
|
||||||
| iMac8,1 | Early 2008 | ^^ | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108)))<br/> - Supports macOS Monterey and older ([Ventura support in development](./VENTURA-DROP.md))|
|
| iMac8,1 | Early 2008 | ^^ | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))<br/> - UHCI/OHCI support in Public Beta, see current issues ([#1021](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1021))|
|
||||||
| iMac9,1 | Early 2009 | ^^ | ^^ |
|
| iMac9,1 | Early 2009 | ^^ | ^^ |
|
||||||
| iMac10,1 | Late 2009 | ^^ | - GPU is socketed, [recommend upgrading to Metal GPU](https://forums.macrumors.com/threads/2011-imac-graphics-card-upgrade.1596614/?post=17425857#post-17425857) <br/>- GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108)) <br/> - Supports macOS Monterey and older ([Ventura support in development](./VENTURA-DROP.md))|
|
| iMac10,1 | Late 2009 | ^^ | - GPU is socketed, [recommend upgrading to Metal GPU](https://forums.macrumors.com/threads/2011-imac-graphics-card-upgrade.1596614/?post=17425857#post-17425857) <br/>- GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108)) <br/> - UHCI/OHCI support in Public Beta, see current issues ([#1021](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1021))|
|
||||||
| iMac11,1 | ^^ | ^^ | ^^ |
|
| iMac11,1 | ^^ | ^^ | - GPU Acceleration in Public Beta, see current issues ([#108](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/108))|
|
||||||
| iMac11,2 | Mid-2010 | ^^ | ^^ |
|
| iMac11,2 | Mid-2010 | ^^ | ^^ |
|
||||||
| iMac11,3 | ^^ | ^^ | ^^ |
|
| iMac11,3 | ^^ | ^^ | ^^ |
|
||||||
| iMac12,1 | Mid-2011 | ^^ | ^^ |
|
| iMac12,1 | Mid-2011 | ^^ | ^^ |
|
||||||
| iMac12,2 | ^^ | ^^ | ^^ |
|
| iMac12,2 | ^^ | ^^ | ^^ |
|
||||||
| iMac13,1 | Late 2012 | <span style="color:#30BCD5"> YES </span> | <span style="color:green"> Everything is supported</span> |
|
| iMac13,1 | Late 2012 | ^^ | <span style="color:green"> Everything is supported</span> |
|
||||||
| iMac13,2 | ^^ | ^^ | ^^ |
|
| iMac13,2 | ^^ | ^^ | ^^ |
|
||||||
| iMac13,3 | ^^ | ^^ | ^^ |
|
| iMac13,3 | ^^ | ^^ | ^^ |
|
||||||
| iMac14,1 | Late 2013 | ^^ | ^^ |
|
| iMac14,1 | Late 2013 | ^^ | ^^ |
|
||||||
@@ -145,8 +145,8 @@ Regarding OS support, see below:
|
|||||||
| :--- | :--- | :--- | :--- |
|
| :--- | :--- | :--- | :--- |
|
||||||
| MacPro1,1 | Mid-2006 | <span style="color:red"> NO </span> | 32-Bit Firmware limitation |
|
| MacPro1,1 | Mid-2006 | <span style="color:red"> NO </span> | 32-Bit Firmware limitation |
|
||||||
| MacPro2,1 | Mid-2007 | ^^ | ^^ |
|
| MacPro2,1 | Mid-2007 | ^^ | ^^ |
|
||||||
| MacPro3,1 | Early 2008 | <span style="color:#30BCD5"> YES - Monterey and older</span> | - Potential boot issues with built-in USB 1.1 ports (recommend using a USB 2.0 hub or dedicated USB PCIe controller) <br/>- Potential boot issues with stock Bluetooth card, recommend removing to avoid kernel panics |
|
| MacPro3,1 | Early 2008 | <span style="color:#30BCD5"> YES </span> | - UHCI/OHCI support in Public Beta, see current issues ([#1021](https://github.com/dortania/OpenCore-Legacy-Patcher/issesues/1021)) <br/>- Potential boot issues with stock Bluetooth card, recommend removing to avoid kernel panics |
|
||||||
| MacPro4,1 | Early 2009 | ^^ | - Everything is supported as long as GPU is Metal capable <br/> - Supports macOS Monterey and older ([Ventura support in development](./VENTURA-DROP.md)) |
|
| MacPro4,1 | Early 2009 | ^^ | - Everything is supported as long as GPU is Metal capable <br/> - UHCI/OHCI support in Public Beta, see current issues ([#1021](https://github.com/dortania/OpenCore-Legacy-Patcher/issesues/1021))|
|
||||||
| MacPro5,1 | Mid-2010, Mid-2012 | ^^ | ^^ |
|
| MacPro5,1 | Mid-2010, Mid-2012 | ^^ | ^^ |
|
||||||
| MacPro6,1 | Late 2013 | ^^ | - CPU Power Management currently unsupported<br/> - No DRM support |
|
| MacPro6,1 | Late 2013 | ^^ | - CPU Power Management currently unsupported<br/> - No DRM support |
|
||||||
|
|
||||||
@@ -155,7 +155,7 @@ Regarding OS support, see below:
|
|||||||
| SMBIOS | Year | Supported | Comment |
|
| SMBIOS | Year | Supported | Comment |
|
||||||
| :--- | :--- | :--- | :--- |
|
| :--- | :--- | :--- | :--- |
|
||||||
| Xserve1,1 | Mid-2006 | <span style="color:red"> NO </span> | 32-Bit Firmware limitation |
|
| Xserve1,1 | Mid-2006 | <span style="color:red"> NO </span> | 32-Bit Firmware limitation |
|
||||||
| Xserve2,1 | Early 2008 | <span style="color:#30BCD5"> YES - Monterey and older</span> | - Everything is supported as long as GPU is Metal capable <br/> - Supports macOS Monterey and older ([Ventura support in development](./VENTURA-DROP.md))|
|
| Xserve2,1 | Early 2008 | <span style="color:#30BCD5"> YES </span> | - UHCI/OHCI support in Public Beta, see current issues ([#1021](https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1021))|
|
||||||
| Xserve3,1 | Early 2009 | ^^ | ^^ |
|
| Xserve3,1 | Early 2009 | ^^ | ^^ |
|
||||||
|
|
||||||
# Once you've verified your hardware is supported, head to [Download and build macOS Installers](./INSTALLER.md)
|
# Once you've verified your hardware is supported, head to [Download and build macOS Installers](./INSTALLER.md)
|
||||||
|
|||||||
@@ -20,14 +20,11 @@ And voila! No more USB drive required.
|
|||||||
|
|
||||||
To do this, run the OpenCore Patcher and head to Patcher Settings:
|
To do this, run the OpenCore Patcher and head to Patcher Settings:
|
||||||
|
|
||||||
| GUI Settings | TUI Settings
|

|
||||||
| :--- | :--- |
|
|
||||||
| |  |
|
|
||||||
|
|
||||||
Here you can change different patcher settings, however the main interest is:
|
Here you can change different patcher settings, however the main interest is:
|
||||||
|
|
||||||
* Show Boot Picker (GUI)
|
* Show Boot Picker
|
||||||
* Set ShowPicker Mode (TUI)
|
|
||||||
|
|
||||||
Once you've toggled them both off, build your OpenCore EFI once again and install to your desired drive. Now to show the OpenCore selector, you can simply hold down the "ESC" key while clicking on EFI boot, and then you can release the "ESC" key when you see the cursor arrow at the top left.
|
Once you've toggled them both off, build your OpenCore EFI once again and install to your desired drive. Now to show the OpenCore selector, you can simply hold down the "ESC" key while clicking on EFI boot, and then you can release the "ESC" key when you see the cursor arrow at the top left.
|
||||||
|
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ With OpenCore Legacy Patcher v0.6.0, basic support has been implemented via Root
|
|||||||
MacPro3,1 suffers from Ethernet driver dying after returning from sleep, current workaround is to use a USB Ethernet adapter or disable sleep.
|
MacPro3,1 suffers from Ethernet driver dying after returning from sleep, current workaround is to use a USB Ethernet adapter or disable sleep.
|
||||||
|
|
||||||
|
|
||||||
::: details Legacy Wireless Support (Resolved in v0.6.0)
|
::: details Legacy Wireless Support (Resolved in v0.6.0 and newer)
|
||||||
|
|
||||||
|
|
||||||
### Legacy Wireless Support
|
### Legacy Wireless Support
|
||||||
@@ -109,7 +109,7 @@ Currently BCM943224, BCM94331, BCM94360 and BCM943602 are still fully supported
|
|||||||
:::
|
:::
|
||||||
|
|
||||||
|
|
||||||
::: details Non-Metal Graphics Acceleration (Resolved in 0.6.0 and newer)
|
::: details Non-Metal Graphics Acceleration (Resolved in v0.6.0 and newer)
|
||||||
|
|
||||||
|
|
||||||
### Non-Metal Graphics Acceleration
|
### Non-Metal Graphics Acceleration
|
||||||
|
|||||||
@@ -2699,6 +2699,8 @@
|
|||||||
<true/>
|
<true/>
|
||||||
<key>ResizeGpuBars</key>
|
<key>ResizeGpuBars</key>
|
||||||
<integer>-1</integer>
|
<integer>-1</integer>
|
||||||
|
<key>ResizeUsePciRbIo</key>
|
||||||
|
<false/>
|
||||||
<key>TscSyncTimeout</key>
|
<key>TscSyncTimeout</key>
|
||||||
<integer>0</integer>
|
<integer>0</integer>
|
||||||
<key>UnblockFsConnect</key>
|
<key>UnblockFsConnect</key>
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
BIN
payloads/Kexts/Acidanthera/AutoPkgInstaller-v1.0.2-DEBUG.zip
Normal file
BIN
payloads/Kexts/Acidanthera/AutoPkgInstaller-v1.0.2-DEBUG.zip
Normal file
Binary file not shown.
BIN
payloads/Kexts/Acidanthera/AutoPkgInstaller-v1.0.2-RELEASE.zip
Normal file
BIN
payloads/Kexts/Acidanthera/AutoPkgInstaller-v1.0.2-RELEASE.zip
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
payloads/Kexts/Acidanthera/FeatureUnlock-v1.1.4-DEBUG.zip
Normal file
BIN
payloads/Kexts/Acidanthera/FeatureUnlock-v1.1.4-DEBUG.zip
Normal file
Binary file not shown.
BIN
payloads/Kexts/Acidanthera/FeatureUnlock-v1.1.4-RELEASE.zip
Normal file
BIN
payloads/Kexts/Acidanthera/FeatureUnlock-v1.1.4-RELEASE.zip
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
payloads/Kexts/Acidanthera/Lilu-v1.6.4-DEBUG.zip
Normal file
BIN
payloads/Kexts/Acidanthera/Lilu-v1.6.4-DEBUG.zip
Normal file
Binary file not shown.
BIN
payloads/Kexts/Acidanthera/Lilu-v1.6.4-RELEASE.zip
Normal file
BIN
payloads/Kexts/Acidanthera/Lilu-v1.6.4-RELEASE.zip
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
payloads/Kexts/Acidanthera/NVMeFix-v1.1.0-DEBUG.zip
Normal file
BIN
payloads/Kexts/Acidanthera/NVMeFix-v1.1.0-DEBUG.zip
Normal file
Binary file not shown.
BIN
payloads/Kexts/Acidanthera/NVMeFix-v1.1.0-RELEASE.zip
Normal file
BIN
payloads/Kexts/Acidanthera/NVMeFix-v1.1.0-RELEASE.zip
Normal file
Binary file not shown.
@@ -0,0 +1,2 @@
|
|||||||
|
Following patch required for iMacs with Navi MXM GPUs
|
||||||
|
Source: https://github.com/kingo132/a51m-r2-5700m-hackintosh/issues/13
|
||||||
3319
payloads/Kexts/Acidanthera/WhateverGreen-Navi-Backlight.patch
Normal file
3319
payloads/Kexts/Acidanthera/WhateverGreen-Navi-Backlight.patch
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
BIN
payloads/Kexts/Acidanthera/WhateverGreen-v1.6.4-DEBUG.zip
Normal file
BIN
payloads/Kexts/Acidanthera/WhateverGreen-v1.6.4-DEBUG.zip
Normal file
Binary file not shown.
BIN
payloads/Kexts/Acidanthera/WhateverGreen-v1.6.4-Navi-DEBUG.zip
Normal file
BIN
payloads/Kexts/Acidanthera/WhateverGreen-v1.6.4-Navi-DEBUG.zip
Normal file
Binary file not shown.
BIN
payloads/Kexts/Acidanthera/WhateverGreen-v1.6.4-Navi-RELEASE.zip
Normal file
BIN
payloads/Kexts/Acidanthera/WhateverGreen-v1.6.4-Navi-RELEASE.zip
Normal file
Binary file not shown.
BIN
payloads/Kexts/Acidanthera/WhateverGreen-v1.6.4-RELEASE.zip
Normal file
BIN
payloads/Kexts/Acidanthera/WhateverGreen-v1.6.4-RELEASE.zip
Normal file
Binary file not shown.
Binary file not shown.
BIN
payloads/Kexts/Misc/Innie-v1.3.1-DEBUG.zip
Normal file
BIN
payloads/Kexts/Misc/Innie-v1.3.1-DEBUG.zip
Normal file
Binary file not shown.
BIN
payloads/Kexts/Misc/Innie-v1.3.1-RELEASE.zip
Normal file
BIN
payloads/Kexts/Misc/Innie-v1.3.1-RELEASE.zip
Normal file
Binary file not shown.
253
payloads/Kexts/Update-Kexts.command
Executable file
253
payloads/Kexts/Update-Kexts.command
Executable file
@@ -0,0 +1,253 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
import requests
|
||||||
|
import packaging.version
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
|
||||||
|
# For kexts with basic handling requirements
|
||||||
|
KEXT_DICTIONARY = {
|
||||||
|
|
||||||
|
"Acidanthera": {
|
||||||
|
"AirportBrcmFixup": {
|
||||||
|
"Repository": "https://github.com/acidanthera/AirportBrcmFixup",
|
||||||
|
"Constants Variable": "self.airportbcrmfixup_version",
|
||||||
|
},
|
||||||
|
# Due to issues with legacy Macs, don't update
|
||||||
|
# "AppleALC": {
|
||||||
|
# "Repository": "https://github.com/acidanthera/AppleALC",
|
||||||
|
# "Constants Variable": "self.applealc_version",
|
||||||
|
# },
|
||||||
|
"BlueToolFixup": {
|
||||||
|
"Repository": "https://github.com/acidanthera/BrcmPatchRAM",
|
||||||
|
"Constants Variable": "self.bluetoolfixup_version",
|
||||||
|
"Override": "BrcmPatchRAM",
|
||||||
|
},
|
||||||
|
"CPUFriend": {
|
||||||
|
"Repository": "https://github.com/acidanthera/CPUFriend",
|
||||||
|
"Constants Variable": "self.cpufriend_version",
|
||||||
|
},
|
||||||
|
"CryptexFixup": {
|
||||||
|
"Repository": "https://github.com/acidanthera/CryptexFixup",
|
||||||
|
"Constants Variable": "self.cryptexfixup_version",
|
||||||
|
},
|
||||||
|
"DebugEnhancer": {
|
||||||
|
"Repository": "https://github.com/acidanthera/DebugEnhancer",
|
||||||
|
"Constants Variable": "self.debugenhancer_version",
|
||||||
|
},
|
||||||
|
"FeatureUnlock": {
|
||||||
|
"Repository": "https://github.com/acidanthera/FeatureUnlock",
|
||||||
|
"Constants Variable": "self.featureunlock_version",
|
||||||
|
},
|
||||||
|
"Lilu": {
|
||||||
|
"Repository": "https://github.com/acidanthera/Lilu",
|
||||||
|
"Constants Variable": "self.lilu_version",
|
||||||
|
},
|
||||||
|
"NVMeFix": {
|
||||||
|
"Repository": "https://github.com/acidanthera/NVMeFix",
|
||||||
|
"Constants Variable": "self.nvmefix_version",
|
||||||
|
},
|
||||||
|
"RestrictEvents": {
|
||||||
|
"Repository": "https://github.com/acidanthera/RestrictEvents",
|
||||||
|
"Constants Variable": "self.restrictevents_version",
|
||||||
|
},
|
||||||
|
"RSRHelper": {
|
||||||
|
"Repository": "https://github.com/khronokernel/RSRHelper",
|
||||||
|
"Constants Variable": "self.rsrhelper_version",
|
||||||
|
},
|
||||||
|
"WhateverGreen": {
|
||||||
|
"Repository": "https://github.com/acidanthera/WhateverGreen",
|
||||||
|
"Constants Variable": "self.whatevergreen_version",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
"Misc": {
|
||||||
|
"Innie": {
|
||||||
|
"Repository": "https://github.com/cdf/Innie",
|
||||||
|
"Constants Variable": "self.innie_version",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateKexts:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.weg_version = None
|
||||||
|
self.weg_old = None
|
||||||
|
self.lilu_version = None
|
||||||
|
|
||||||
|
self._set_cwd()
|
||||||
|
self._iterate_over_kexts()
|
||||||
|
self._special_kext_handling()
|
||||||
|
|
||||||
|
|
||||||
|
def _set_cwd(self):
|
||||||
|
# Set working directory to script location
|
||||||
|
script_path = Path(__file__).parent.absolute()
|
||||||
|
os.chdir(script_path)
|
||||||
|
|
||||||
|
def _special_kext_handling(self):
|
||||||
|
# Generate custom WhateverGreen
|
||||||
|
if self.weg_version is None or self.lilu_version is None or self.weg_old is None:
|
||||||
|
raise Exception("Unable to find latest WEG version!")
|
||||||
|
|
||||||
|
if packaging.version.parse(self.weg_version) <= packaging.version.parse(self.weg_old):
|
||||||
|
print(" WEG is up to date!")
|
||||||
|
return
|
||||||
|
|
||||||
|
# WhateverGreen
|
||||||
|
print("Building modified WhateverGreen...")
|
||||||
|
# We have to compile WEG ourselves
|
||||||
|
weg_source_url = f"https://github.com/acidanthera/WhateverGreen/archive/refs/tags/{self.weg_version}.zip"
|
||||||
|
lilu_url = f"https://github.com/acidanthera/Lilu/releases/download/{self.lilu_version}/Lilu-{self.lilu_version}-DEBUG.zip"
|
||||||
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
|
# Download source
|
||||||
|
weg_source_zip = f"{temp_dir}/WhateverGreen-{self.weg_version}.zip"
|
||||||
|
subprocess.run(["curl", "-L", weg_source_url, "-o", weg_source_zip], check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
|
# Unzip source
|
||||||
|
subprocess.run(["unzip", weg_source_zip, "-d", temp_dir], check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
|
# Git clone MacKernelSDK into source
|
||||||
|
subprocess.run(["git", "clone", "https://github.com/acidanthera/MacKernelSDK", f"{temp_dir}/WhateverGreen-{self.weg_version}/MacKernelSDK"], check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
|
# Grab latest Lilu release, debug version
|
||||||
|
lilu_zip = f"{temp_dir}/Lilu-{self.lilu_version}-DEBUG.zip"
|
||||||
|
subprocess.run(["curl", "-L", lilu_url, "-o", lilu_zip], check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
|
# Unzip Lilu into WEG source
|
||||||
|
subprocess.run(["unzip", lilu_zip, "-d", f"{temp_dir}/WhateverGreen-{self.weg_version}"], check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
|
# Apply patch
|
||||||
|
patch_path = Path("./Acidanthera/WhateverGreen-Navi-Backlight.patch").absolute()
|
||||||
|
subprocess.run(["git", "apply", patch_path], check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, cwd=f"{temp_dir}/WhateverGreen-{self.weg_version}")
|
||||||
|
|
||||||
|
# Build WEG
|
||||||
|
for variant in ["Release", "Debug"]:
|
||||||
|
subprocess.run(["xcodebuild", "-configuration", variant], cwd=f"{temp_dir}/WhateverGreen-{self.weg_version}", check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
|
|
||||||
|
# Zip Release
|
||||||
|
for variant in ["RELEASE", "DEBUG"]:
|
||||||
|
dst_path = Path(f"./Acidanthera/WhateverGreen-v{self.weg_version}-Navi-{variant}.zip").absolute()
|
||||||
|
subprocess.run(["zip", "-r", dst_path, "WhateverGreen.kext"], check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, cwd=f"{temp_dir}/WhateverGreen-{self.weg_version}/build/{'Release' if variant == 'RELEASE' else 'Debug'}")
|
||||||
|
if Path(f"./Acidanthera/WhateverGreen-v{self.weg_old}-Navi-{variant}.zip").exists():
|
||||||
|
subprocess.run(["rm", f"./Acidanthera/WhateverGreen-v{self.weg_old}-Navi-{variant}.zip"], check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
|
self._update_constants_file("self.whatevergreen_navi_version", f"{self.weg_old}-Navi", f"{self.weg_version}-Navi")
|
||||||
|
|
||||||
|
|
||||||
|
def _iterate_over_kexts(self):
|
||||||
|
for kext_folder in KEXT_DICTIONARY:
|
||||||
|
for kext_name in KEXT_DICTIONARY[kext_folder]:
|
||||||
|
print(f"Checking {kext_name}...")
|
||||||
|
if "Override" in KEXT_DICTIONARY[kext_folder][kext_name]:
|
||||||
|
self._get_latest_release(kext_folder, kext_name, override_kext_zip_name=KEXT_DICTIONARY[kext_folder][kext_name]["Override"])
|
||||||
|
else:
|
||||||
|
self._get_latest_release(kext_folder, kext_name)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_latest_release(self, kext_folder, kext_name, override_kext_zip_name=None):
|
||||||
|
# Get latest release from GitHub API
|
||||||
|
repo_url = KEXT_DICTIONARY[kext_folder][kext_name]["Repository"].replace("https://github.com", "https://api.github.com/repos")
|
||||||
|
latest_release = requests.get(f"{repo_url}/releases/latest").json()
|
||||||
|
|
||||||
|
for variant in ["RELEASE", "DEBUG"]:
|
||||||
|
|
||||||
|
if "tag_name" not in latest_release:
|
||||||
|
print(f" Error: {latest_release['message']}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
remote_version = latest_release["tag_name"]
|
||||||
|
if remote_version.startswith("v"):
|
||||||
|
remote_version = remote_version[1:]
|
||||||
|
|
||||||
|
if kext_name == "WhateverGreen":
|
||||||
|
self.weg_version = remote_version
|
||||||
|
elif kext_name == "Lilu":
|
||||||
|
self.lilu_version = remote_version
|
||||||
|
|
||||||
|
local_version = self._get_local_version(kext_folder, kext_name, variant)
|
||||||
|
if kext_name == "WhateverGreen":
|
||||||
|
self.weg_old = local_version
|
||||||
|
|
||||||
|
if packaging.version.parse(remote_version) <= packaging.version.parse(local_version):
|
||||||
|
print(f" {kext_name} {variant} is up to date: v{local_version}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
for asset in latest_release["assets"]:
|
||||||
|
if not asset["name"].endswith(f"{variant}.zip"):
|
||||||
|
continue
|
||||||
|
print(f" Downloading {kext_name} {variant}: v{remote_version}...")
|
||||||
|
zip_name = f"{override_kext_zip_name}-v{remote_version}-{variant}.zip" if override_kext_zip_name else f"{kext_name}-v{remote_version}-{variant}.zip"
|
||||||
|
|
||||||
|
self._download_file(asset["browser_download_url"], f"./{kext_folder}/{zip_name}", f"{kext_name}.kext")
|
||||||
|
if Path(f"./{kext_folder}/{zip_name}").exists():
|
||||||
|
subprocess.run(["rm", "-rf", f"./{kext_folder}/{zip_name.replace(f'v{remote_version}', f'v{local_version}')}"])
|
||||||
|
self._update_constants_file(KEXT_DICTIONARY[kext_folder][kext_name]["Constants Variable"], local_version, remote_version)
|
||||||
|
|
||||||
|
if override_kext_zip_name:
|
||||||
|
# rename zip file
|
||||||
|
os.rename(f"./{kext_folder}/{zip_name}", f"./{kext_folder}/{kext_name}-v{remote_version}-{variant}.zip")
|
||||||
|
subprocess.run(["rm", "-rf", f"./{kext_folder}/{kext_name}-v{local_version}-{variant}.zip"])
|
||||||
|
|
||||||
|
|
||||||
|
def _get_local_version(self, kext_folder, kext_name, variant):
|
||||||
|
loose_name_start = f"{kext_name}-v"
|
||||||
|
loose_name_end = f"-{variant}.zip"
|
||||||
|
|
||||||
|
for file in Path(f"./{kext_folder}").iterdir():
|
||||||
|
if file.name.startswith(loose_name_start) and file.name.endswith(loose_name_end):
|
||||||
|
local_version = file.name.replace(loose_name_start, "").replace(loose_name_end, "")
|
||||||
|
if local_version.startswith("v"):
|
||||||
|
local_version = local_version[1:]
|
||||||
|
return local_version[:5]
|
||||||
|
|
||||||
|
raise Exception(f"Could not find local version for {kext_name} {variant}")
|
||||||
|
|
||||||
|
|
||||||
|
def _download_file(self, url, file_path, file):
|
||||||
|
# Download file
|
||||||
|
if Path(file_path).exists():
|
||||||
|
os.remove(file_path)
|
||||||
|
|
||||||
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
|
download = requests.get(url)
|
||||||
|
with open(f"{temp_dir}/temp.zip", "wb") as f:
|
||||||
|
f.write(download.content)
|
||||||
|
|
||||||
|
# Unzip file
|
||||||
|
subprocess.run(["unzip", "-q", f"{temp_dir}/temp.zip", "-d", f"{temp_dir}"], check=True)
|
||||||
|
|
||||||
|
print(f" Moving {file} to {file_path}...")
|
||||||
|
# Zip file
|
||||||
|
subprocess.run(["zip", "-q", "-r", Path(file_path).name, file], cwd=f"{temp_dir}", check=True)
|
||||||
|
|
||||||
|
# Move file
|
||||||
|
subprocess.run(["mv", f"{temp_dir}/{Path(file_path).name}", file_path], check=True)
|
||||||
|
|
||||||
|
|
||||||
|
def _update_constants_file(self, variable_name, old_version, new_version):
|
||||||
|
print(f" Updating {variable_name} to {new_version}...")
|
||||||
|
constants_file = Path("../../resources/constants.py")
|
||||||
|
if not constants_file.exists():
|
||||||
|
raise Exception("Constants file does not exist")
|
||||||
|
constants_file_contents = constants_file.read_text()
|
||||||
|
|
||||||
|
# Replace version
|
||||||
|
for line in constants_file_contents.splitlines():
|
||||||
|
if variable_name in line:
|
||||||
|
constants_file_contents = constants_file_contents.replace(line, line.replace(old_version, new_version))
|
||||||
|
break
|
||||||
|
|
||||||
|
# Write file
|
||||||
|
constants_file.write_text(constants_file_contents)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
GenerateKexts()
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,27 +1,55 @@
|
|||||||
# Determine AppleMobileFileIntegrity's OS configuration
|
# Determine AppleMobileFileIntegrity's OS configuration
|
||||||
|
# Copyright (C) 2022-2023, Mykola Grymalyuk
|
||||||
|
|
||||||
|
import enum
|
||||||
from resources import utilities
|
from resources import utilities
|
||||||
|
from data import amfi_data
|
||||||
class amfi_configuration_detection:
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.AMFI_ALLOW_TASK_FOR_PID = False
|
|
||||||
self.AMFI_ALLOW_INVALID_SIGNATURE = False
|
|
||||||
self.AMFI_LV_ENFORCE_THIRD_PARTY = False
|
|
||||||
self.AMFI_ALLOW_EVERYTHING = False
|
|
||||||
self.SKIP_LIBRARY_VALIDATION = False
|
|
||||||
|
|
||||||
self.boot_args = []
|
|
||||||
self.oclp_args = []
|
|
||||||
|
|
||||||
self.init_nvram_dicts()
|
|
||||||
|
|
||||||
self.parse_amfi_bitmask()
|
|
||||||
self.parse_amfi_boot_args()
|
|
||||||
self.parse_oclp_configuration()
|
|
||||||
|
|
||||||
|
|
||||||
def init_nvram_dicts(self):
|
class AmfiConfigDetectLevel(enum.IntEnum):
|
||||||
|
"""
|
||||||
|
Configuration levels used by AmfiConfigurationDetection
|
||||||
|
"""
|
||||||
|
|
||||||
|
NO_CHECK: int = 0
|
||||||
|
LIBRARY_VALIDATION: int = 1 # For Ventura, use LIBRARY_VALIDATION_AND_SIG
|
||||||
|
LIBRARY_VALIDATION_AND_SIG: int = 2
|
||||||
|
ALLOW_ALL: int = 3
|
||||||
|
|
||||||
|
|
||||||
|
class AmfiConfigurationDetection:
|
||||||
|
"""
|
||||||
|
Detect AppleMobileFileIntegrity's OS configuration
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
>>> import amfi_detect
|
||||||
|
>>> can_patch = amfi_detect.AmfiConfigurationDetection().check_config(amfi_detect.AmfiConfigDetectLevel.ALLOW_ALL)
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.AMFI_ALLOW_TASK_FOR_PID: bool = False
|
||||||
|
self.AMFI_ALLOW_INVALID_SIGNATURE: bool = False
|
||||||
|
self.AMFI_LV_ENFORCE_THIRD_PARTY: bool = False
|
||||||
|
self.AMFI_ALLOW_EVERYTHING: bool = False
|
||||||
|
self.SKIP_LIBRARY_VALIDATION: bool = False
|
||||||
|
|
||||||
|
self.boot_args: list = []
|
||||||
|
self.oclp_args: list = []
|
||||||
|
|
||||||
|
self._init_nvram_dicts()
|
||||||
|
|
||||||
|
self._parse_amfi_bitmask()
|
||||||
|
self._parse_amfi_boot_args()
|
||||||
|
self._parse_oclp_configuration()
|
||||||
|
|
||||||
|
|
||||||
|
def _init_nvram_dicts(self) -> None:
|
||||||
|
"""
|
||||||
|
Initialize the boot-args and OCLP-Settings NVRAM dictionaries
|
||||||
|
"""
|
||||||
|
|
||||||
boot_args = utilities.get_nvram("boot-args", decode=True)
|
boot_args = utilities.get_nvram("boot-args", decode=True)
|
||||||
oclp_args = utilities.get_nvram("OCLP-Settings", "4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102", decode=True)
|
oclp_args = utilities.get_nvram("OCLP-Settings", "4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102", decode=True)
|
||||||
|
|
||||||
@@ -32,40 +60,47 @@ class amfi_configuration_detection:
|
|||||||
self.oclp_args = oclp_args.split(" ")
|
self.oclp_args = oclp_args.split(" ")
|
||||||
|
|
||||||
|
|
||||||
def parse_amfi_bitmask(self):
|
def _parse_amfi_bitmask(self) -> None:
|
||||||
# See data/amfi_data.py for more information
|
"""
|
||||||
|
Parse the AMFI bitmask from boot-args
|
||||||
|
See data/amfi_data.py for more information
|
||||||
|
"""
|
||||||
|
|
||||||
amfi_value = 0
|
amfi_value = 0
|
||||||
for arg in self.boot_args:
|
for arg in self.boot_args:
|
||||||
if arg.startswith("amfi="):
|
if not arg.startswith("amfi="):
|
||||||
try:
|
continue
|
||||||
amfi_value = arg.split("=")
|
try:
|
||||||
if len(amfi_value) != 2:
|
amfi_value = arg.split("=")
|
||||||
return
|
if len(amfi_value) != 2:
|
||||||
amfi_value = amfi_value[1]
|
|
||||||
if amfi_value.startswith("0x"):
|
|
||||||
amfi_value = int(amfi_value, 16)
|
|
||||||
else:
|
|
||||||
amfi_value = int(amfi_value)
|
|
||||||
except:
|
|
||||||
return
|
return
|
||||||
break
|
amfi_value = amfi_value[1]
|
||||||
|
if amfi_value.startswith("0x"):
|
||||||
|
amfi_value = int(amfi_value, 16)
|
||||||
|
else:
|
||||||
|
amfi_value = int(amfi_value)
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
break
|
||||||
|
|
||||||
if amfi_value == 0:
|
if amfi_value == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
if amfi_value & 0x1:
|
self.AMFI_ALLOW_TASK_FOR_PID: bool = amfi_value & amfi_data.AppleMobileFileIntegrity.AMFI_ALLOW_TASK_FOR_PID
|
||||||
self.AMFI_ALLOW_TASK_FOR_PID = True
|
self.AMFI_ALLOW_INVALID_SIGNATURE: bool = amfi_value & amfi_data.AppleMobileFileIntegrity.AMFI_ALLOW_INVALID_SIGNATURE
|
||||||
if amfi_value & 0x2:
|
self.AMFI_LV_ENFORCE_THIRD_PARTY: bool = amfi_value & amfi_data.AppleMobileFileIntegrity.AMFI_LV_ENFORCE_THIRD_PARTY
|
||||||
self.AMFI_ALLOW_INVALID_SIGNATURE = True
|
|
||||||
if amfi_value & 0x4:
|
if amfi_value & amfi_data.AppleMobileFileIntegrity.AMFI_ALLOW_EVERYTHING:
|
||||||
self.AMFI_LV_ENFORCE_THIRD_PARTY = True
|
self.AMFI_ALLOW_EVERYTHING = True
|
||||||
if amfi_value & 0x80:
|
self.SKIP_LIBRARY_VALIDATION = True
|
||||||
self.AMFI_ALLOW_EVERYTHING = True
|
|
||||||
self.SKIP_LIBRARY_VALIDATION = True
|
|
||||||
self.AMFI_ALLOW_INVALID_SIGNATURE = True
|
self.AMFI_ALLOW_INVALID_SIGNATURE = True
|
||||||
|
|
||||||
|
|
||||||
def parse_amfi_boot_args(self):
|
def _parse_amfi_boot_args(self) -> None:
|
||||||
|
"""
|
||||||
|
Parse the AMFI boot-args
|
||||||
|
"""
|
||||||
|
|
||||||
for arg in self.boot_args:
|
for arg in self.boot_args:
|
||||||
if arg.startswith("amfi_unrestrict_task_for_pid"):
|
if arg.startswith("amfi_unrestrict_task_for_pid"):
|
||||||
value = arg.split("=")
|
value = arg.split("=")
|
||||||
@@ -86,26 +121,34 @@ class amfi_configuration_detection:
|
|||||||
self.AMFI_ALLOW_INVALID_SIGNATURE = True
|
self.AMFI_ALLOW_INVALID_SIGNATURE = True
|
||||||
|
|
||||||
|
|
||||||
def parse_oclp_configuration(self):
|
def _parse_oclp_configuration(self) -> None:
|
||||||
|
"""
|
||||||
|
Parse the OCLP configuration
|
||||||
|
"""
|
||||||
|
|
||||||
if "-allow_amfi" in self.oclp_args:
|
if "-allow_amfi" in self.oclp_args:
|
||||||
self.SKIP_LIBRARY_VALIDATION = True
|
self.SKIP_LIBRARY_VALIDATION = True
|
||||||
|
|
||||||
|
|
||||||
def check_config(self, level):
|
def check_config(self, level: int) -> bool:
|
||||||
# Levels:
|
"""
|
||||||
# - 0: No checks
|
Check the AMFI configuration based on provided AMFI level
|
||||||
# - 1. Library Validation (Monterey and Older)
|
See AmfiConfigLevel enum for valid levels
|
||||||
# - 2. Library Validation and Signature Checks (Ventura and Newer)
|
|
||||||
# - 3. Disable all AMFI checks
|
|
||||||
|
|
||||||
if level == 0:
|
Parameters:
|
||||||
|
level (int): The level of AMFI checks to check for
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the AMFI configuration matches the level, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
if level == AmfiConfigDetectLevel.NO_CHECK:
|
||||||
return True
|
return True
|
||||||
|
if level == AmfiConfigDetectLevel.LIBRARY_VALIDATION:
|
||||||
if level == 1:
|
|
||||||
return self.SKIP_LIBRARY_VALIDATION
|
return self.SKIP_LIBRARY_VALIDATION
|
||||||
if level == 2:
|
if level == AmfiConfigDetectLevel.LIBRARY_VALIDATION_AND_SIG:
|
||||||
return bool(self.SKIP_LIBRARY_VALIDATION and self.AMFI_ALLOW_INVALID_SIGNATURE)
|
return bool(self.SKIP_LIBRARY_VALIDATION and self.AMFI_ALLOW_INVALID_SIGNATURE)
|
||||||
if level == 3:
|
if level == AmfiConfigDetectLevel.ALLOW_ALL:
|
||||||
return self.AMFI_ALLOW_EVERYTHING
|
return self.AMFI_ALLOW_EVERYTHING
|
||||||
|
|
||||||
return False
|
return False
|
||||||
@@ -1,115 +1,186 @@
|
|||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
import sys
|
import sys
|
||||||
from resources import defaults, utilities, validation
|
|
||||||
|
from resources import defaults, utilities, validation, constants
|
||||||
from resources.sys_patch import sys_patch, sys_patch_auto
|
from resources.sys_patch import sys_patch, sys_patch_auto
|
||||||
from resources.build import build
|
from resources.build import build
|
||||||
from data import model_array
|
from data import model_array
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
|
|
||||||
# Generic building args
|
# Generic building args
|
||||||
class arguments:
|
class arguments:
|
||||||
def __init__(self):
|
|
||||||
|
def __init__(self, global_constants: constants.Constants) -> None:
|
||||||
|
self.constants: constants.Constants = global_constants
|
||||||
|
|
||||||
self.args = utilities.check_cli_args()
|
self.args = utilities.check_cli_args()
|
||||||
|
|
||||||
def parse_arguments(self, settings):
|
self._parse_arguments()
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_arguments(self) -> None:
|
||||||
|
"""
|
||||||
|
Parses arguments passed to the patcher
|
||||||
|
"""
|
||||||
|
|
||||||
if self.args.validate:
|
if self.args.validate:
|
||||||
validation.validate(settings)
|
self._validation_handler()
|
||||||
elif self.args.build:
|
return
|
||||||
|
|
||||||
|
if self.args.build:
|
||||||
|
self._build_handler()
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.args.patch_sys_vol:
|
||||||
|
self._sys_patch_handler()
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.args.unpatch_sys_vol:
|
||||||
|
self._sys_unpatch_handler()
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.args.auto_patch:
|
||||||
|
self._sys_patch_auto_handler()
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def _validation_handler(self) -> None:
|
||||||
|
"""
|
||||||
|
Enter validation mode
|
||||||
|
"""
|
||||||
|
|
||||||
|
validation.PatcherValidation(self.constants)
|
||||||
|
|
||||||
|
|
||||||
|
def _sys_patch_handler(self) -> None:
|
||||||
|
"""
|
||||||
|
Start root volume patching
|
||||||
|
"""
|
||||||
|
|
||||||
|
logging.info("- Set System Volume patching")
|
||||||
|
if "Library/InstallerSandboxes/" in str(self.constants.payload_path):
|
||||||
|
logging.info("- Running from Installer Sandbox")
|
||||||
|
thread = threading.Thread(target=sys_patch.PatchSysVolume(self.constants.custom_model or self.constants.computer.real_model, self.constants, None).start_patch)
|
||||||
|
thread.start()
|
||||||
|
while thread.is_alive():
|
||||||
|
utilities.block_os_updaters()
|
||||||
|
time.sleep(1)
|
||||||
|
else:
|
||||||
|
sys_patch.PatchSysVolume(self.constants.custom_model or self.constants.computer.real_model, self.constants, None).start_patch()
|
||||||
|
|
||||||
|
|
||||||
|
def _sys_unpatch_handler(self) -> None:
|
||||||
|
"""
|
||||||
|
Start root volume unpatching
|
||||||
|
"""
|
||||||
|
logging.info("- Set System Volume unpatching")
|
||||||
|
sys_patch.PatchSysVolume(self.constants.custom_model or self.constants.computer.real_model, self.constants, None).start_unpatch()
|
||||||
|
|
||||||
|
|
||||||
|
def _sys_patch_auto_handler(self) -> None:
|
||||||
|
"""
|
||||||
|
Start root volume auto patching
|
||||||
|
"""
|
||||||
|
|
||||||
|
logging.info("- Set Auto patching")
|
||||||
|
sys_patch_auto.AutomaticSysPatch(self.constants).start_auto_patch()
|
||||||
|
|
||||||
|
|
||||||
|
def _build_handler(self) -> None:
|
||||||
|
"""
|
||||||
|
Start config building process
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.args.model:
|
||||||
if self.args.model:
|
if self.args.model:
|
||||||
if self.args.model:
|
logging.info(f"- Using custom model: {self.args.model}")
|
||||||
print(f"- Using custom model: {self.args.model}")
|
self.constants.custom_model = self.args.model
|
||||||
settings.custom_model = self.args.model
|
defaults.GenerateDefaults(self.constants.custom_model, False, self.constants)
|
||||||
defaults.generate_defaults(settings.custom_model, False, settings)
|
elif self.constants.computer.real_model not in model_array.SupportedSMBIOS and self.constants.allow_oc_everywhere is False:
|
||||||
elif settings.computer.real_model not in model_array.SupportedSMBIOS and settings.allow_oc_everywhere is False:
|
logging.info(
|
||||||
print(
|
"""Your model is not supported by this patcher for running unsupported OSes!"
|
||||||
"""Your model is not supported by this patcher for running unsupported OSes!"
|
|
||||||
|
|
||||||
If you plan to create the USB for another machine, please select the "Change Model" option in the menu."""
|
If you plan to create the USB for another machine, please select the "Change Model" option in the menu."""
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
print(f"- Using detected model: {settings.computer.real_model}")
|
logging.info(f"- Using detected model: {self.constants.computer.real_model}")
|
||||||
defaults.generate_defaults(settings.custom_model, True, settings)
|
defaults.GenerateDefaults(self.constants.custom_model, True, self.constants)
|
||||||
|
|
||||||
if self.args.disk:
|
if self.args.verbose:
|
||||||
print(f"- Install Disk set: {self.args.disk}")
|
logging.info("- Set verbose configuration")
|
||||||
settings.disk = self.args.disk
|
self.constants.verbose_debug = True
|
||||||
if self.args.verbose:
|
else:
|
||||||
print("- Set verbose configuration")
|
self.constants.verbose_debug = False # Override Defaults detected
|
||||||
settings.verbose_debug = True
|
|
||||||
else:
|
|
||||||
settings.verbose_debug = False # Override Defaults detected
|
|
||||||
if self.args.debug_oc:
|
|
||||||
print("- Set OpenCore DEBUG configuration")
|
|
||||||
settings.opencore_debug = True
|
|
||||||
settings.opencore_build = "DEBUG"
|
|
||||||
if self.args.debug_kext:
|
|
||||||
print("- Set kext DEBUG configuration")
|
|
||||||
settings.kext_debug = True
|
|
||||||
if self.args.hide_picker:
|
|
||||||
print("- Set HidePicker configuration")
|
|
||||||
settings.showpicker = False
|
|
||||||
if self.args.disable_sip:
|
|
||||||
print("- Set Disable SIP configuration")
|
|
||||||
settings.sip_status = False
|
|
||||||
else:
|
|
||||||
settings.sip_status = True # Override Defaults detected
|
|
||||||
if self.args.disable_smb:
|
|
||||||
print("- Set Disable SecureBootModel configuration")
|
|
||||||
settings.secure_status = False
|
|
||||||
else:
|
|
||||||
settings.secure_status = True # Override Defaults detected
|
|
||||||
if self.args.vault:
|
|
||||||
print("- Set Vault configuration")
|
|
||||||
settings.vault = True
|
|
||||||
if self.args.firewire:
|
|
||||||
print("- Set FireWire Boot configuration")
|
|
||||||
settings.firewire_boot = True
|
|
||||||
if self.args.nvme:
|
|
||||||
print("- Set NVMe Boot configuration")
|
|
||||||
settings.nvme_boot = True
|
|
||||||
if self.args.wlan:
|
|
||||||
print("- Set Wake on WLAN configuration")
|
|
||||||
settings.enable_wake_on_wlan = True
|
|
||||||
if self.args.disable_tb:
|
|
||||||
print("- Set Disable Thunderbolt configuration")
|
|
||||||
settings.disable_tb = True
|
|
||||||
if self.args.force_surplus:
|
|
||||||
print("- Forcing SurPlus override configuration")
|
|
||||||
settings.force_surplus = True
|
|
||||||
if self.args.moderate_smbios:
|
|
||||||
print("- Set Moderate SMBIOS Patching configuration")
|
|
||||||
settings.serial_settings = "Moderate"
|
|
||||||
if self.args.smbios_spoof:
|
|
||||||
if self.args.smbios_spoof == "Minimal":
|
|
||||||
settings.serial_settings = "Minimal"
|
|
||||||
elif self.args.smbios_spoof == "Moderate":
|
|
||||||
settings.serial_settings = "Moderate"
|
|
||||||
elif self.args.smbios_spoof == "Advanced":
|
|
||||||
settings.serial_settings = "Advanced"
|
|
||||||
else:
|
|
||||||
print(f"- Unknown SMBIOS arg passed: {self.args.smbios_spoof}")
|
|
||||||
|
|
||||||
if self.args.support_all:
|
if self.args.debug_oc:
|
||||||
print("- Building for natively supported model")
|
logging.info("- Set OpenCore DEBUG configuration")
|
||||||
settings.allow_oc_everywhere = True
|
self.constants.opencore_debug = True
|
||||||
settings.serial_settings = "None"
|
self.constants.opencore_build = "DEBUG"
|
||||||
build.build_opencore(settings.custom_model or settings.computer.real_model, settings).build_opencore()
|
|
||||||
elif self.args.patch_sys_vol:
|
|
||||||
print("- Set System Volume patching")
|
|
||||||
|
|
||||||
if "Library/InstallerSandboxes/" in str(settings.payload_path):
|
if self.args.debug_kext:
|
||||||
print("- Running from Installer Sandbox")
|
logging.info("- Set kext DEBUG configuration")
|
||||||
thread = threading.Thread(target=sys_patch.PatchSysVolume(settings.custom_model or settings.computer.real_model, settings, None).start_patch)
|
self.constants.kext_debug = True
|
||||||
thread.start()
|
|
||||||
while thread.is_alive():
|
if self.args.hide_picker:
|
||||||
utilities.block_os_updaters()
|
logging.info("- Set HidePicker configuration")
|
||||||
time.sleep(1)
|
self.constants.showpicker = False
|
||||||
|
|
||||||
|
if self.args.disable_sip:
|
||||||
|
logging.info("- Set Disable SIP configuration")
|
||||||
|
self.constants.sip_status = False
|
||||||
|
else:
|
||||||
|
self.constants.sip_status = True # Override Defaults detected
|
||||||
|
|
||||||
|
if self.args.disable_smb:
|
||||||
|
logging.info("- Set Disable SecureBootModel configuration")
|
||||||
|
self.constants.secure_status = False
|
||||||
|
else:
|
||||||
|
self.constants.secure_status = True # Override Defaults detected
|
||||||
|
|
||||||
|
if self.args.vault:
|
||||||
|
logging.info("- Set Vault configuration")
|
||||||
|
self.constants.vault = True
|
||||||
|
|
||||||
|
if self.args.firewire:
|
||||||
|
logging.info("- Set FireWire Boot configuration")
|
||||||
|
self.constants.firewire_boot = True
|
||||||
|
|
||||||
|
if self.args.nvme:
|
||||||
|
logging.info("- Set NVMe Boot configuration")
|
||||||
|
self.constants.nvme_boot = True
|
||||||
|
|
||||||
|
if self.args.wlan:
|
||||||
|
logging.info("- Set Wake on WLAN configuration")
|
||||||
|
self.constants.enable_wake_on_wlan = True
|
||||||
|
|
||||||
|
if self.args.disable_tb:
|
||||||
|
logging.info("- Set Disable Thunderbolt configuration")
|
||||||
|
self.constants.disable_tb = True
|
||||||
|
|
||||||
|
if self.args.force_surplus:
|
||||||
|
logging.info("- Forcing SurPlus override configuration")
|
||||||
|
self.constants.force_surplus = True
|
||||||
|
|
||||||
|
if self.args.moderate_smbios:
|
||||||
|
logging.info("- Set Moderate SMBIOS Patching configuration")
|
||||||
|
self.constants.serial_settings = "Moderate"
|
||||||
|
|
||||||
|
if self.args.smbios_spoof:
|
||||||
|
if self.args.smbios_spoof == "Minimal":
|
||||||
|
self.constants.serial_settings = "Minimal"
|
||||||
|
elif self.args.smbios_spoof == "Moderate":
|
||||||
|
self.constants.serial_settings = "Moderate"
|
||||||
|
elif self.args.smbios_spoof == "Advanced":
|
||||||
|
self.constants.serial_settings = "Advanced"
|
||||||
else:
|
else:
|
||||||
sys_patch.PatchSysVolume(settings.custom_model or settings.computer.real_model, settings, None).start_patch()
|
logging.info(f"- Unknown SMBIOS arg passed: {self.args.smbios_spoof}")
|
||||||
elif self.args.unpatch_sys_vol:
|
|
||||||
print("- Set System Volume unpatching")
|
if self.args.support_all:
|
||||||
sys_patch.PatchSysVolume(settings.custom_model or settings.computer.real_model, settings, None).start_unpatch()
|
logging.info("- Building for natively supported model")
|
||||||
elif self.args.auto_patch:
|
self.constants.allow_oc_everywhere = True
|
||||||
print("- Set Auto patching")
|
self.constants.serial_settings = "None"
|
||||||
sys_patch_auto.AutomaticSysPatch(settings).start_auto_patch()
|
|
||||||
|
build.build_opencore(self.constants.custom_model or self.constants.computer.real_model, self.constants).build_opencore()
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ from resources import constants, device_probe
|
|||||||
from resources.build import support
|
from resources.build import support
|
||||||
from data import smbios_data, bluetooth_data
|
from data import smbios_data, bluetooth_data
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
class build_bluetooth:
|
class build_bluetooth:
|
||||||
|
|
||||||
def __init__(self, model, versions, config):
|
def __init__(self, model, versions, config):
|
||||||
@@ -24,7 +26,7 @@ class build_bluetooth:
|
|||||||
|
|
||||||
def on_model(self):
|
def on_model(self):
|
||||||
if self.computer.bluetooth_chipset in ["BRCM2070 Hub", "BRCM2046 Hub"]:
|
if self.computer.bluetooth_chipset in ["BRCM2070 Hub", "BRCM2046 Hub"]:
|
||||||
print("- Fixing Legacy Bluetooth for macOS Monterey")
|
logging.info("- Fixing Legacy Bluetooth for macOS Monterey")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("BlueToolFixup.kext", self.constants.bluetool_version, self.constants.bluetool_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("BlueToolFixup.kext", self.constants.bluetool_version, self.constants.bluetool_path)
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("Bluetooth-Spoof.kext", self.constants.btspoof_version, self.constants.btspoof_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("Bluetooth-Spoof.kext", self.constants.btspoof_version, self.constants.btspoof_path)
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -btlfxallowanyaddr"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -btlfxallowanyaddr"
|
||||||
@@ -34,12 +36,12 @@ class build_bluetooth:
|
|||||||
# Due to this, BlueToolFixup is required to resolve Firmware Uploading on legacy chipsets
|
# Due to this, BlueToolFixup is required to resolve Firmware Uploading on legacy chipsets
|
||||||
if self.computer.wifi:
|
if self.computer.wifi:
|
||||||
if self.computer.wifi.chipset == device_probe.Broadcom.Chipsets.AirPortBrcm4360:
|
if self.computer.wifi.chipset == device_probe.Broadcom.Chipsets.AirPortBrcm4360:
|
||||||
print("- Fixing Legacy Bluetooth for macOS Monterey")
|
logging.info("- Fixing Legacy Bluetooth for macOS Monterey")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("BlueToolFixup.kext", self.constants.bluetool_version, self.constants.bluetool_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("BlueToolFixup.kext", self.constants.bluetool_version, self.constants.bluetool_path)
|
||||||
elif self.computer.bluetooth_chipset == "3rd Party Bluetooth 4.0 Hub":
|
elif self.computer.bluetooth_chipset == "3rd Party Bluetooth 4.0 Hub":
|
||||||
print("- Detected 3rd Party Bluetooth Chipset")
|
logging.info("- Detected 3rd Party Bluetooth Chipset")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("BlueToolFixup.kext", self.constants.bluetool_version, self.constants.bluetool_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("BlueToolFixup.kext", self.constants.bluetool_version, self.constants.bluetool_path)
|
||||||
print("- Enabling Bluetooth FeatureFlags")
|
logging.info("- Enabling Bluetooth FeatureFlags")
|
||||||
self.config["Kernel"]["Quirks"]["ExtendBTFeatureFlags"] = True
|
self.config["Kernel"]["Quirks"]["ExtendBTFeatureFlags"] = True
|
||||||
|
|
||||||
|
|
||||||
@@ -50,7 +52,7 @@ class build_bluetooth:
|
|||||||
return
|
return
|
||||||
|
|
||||||
if smbios_data.smbios_dictionary[self.model]["Bluetooth Model"] <= bluetooth_data.bluetooth_data.BRCM20702_v1.value:
|
if smbios_data.smbios_dictionary[self.model]["Bluetooth Model"] <= bluetooth_data.bluetooth_data.BRCM20702_v1.value:
|
||||||
print("- Fixing Legacy Bluetooth for macOS Monterey")
|
logging.info("- Fixing Legacy Bluetooth for macOS Monterey")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("BlueToolFixup.kext", self.constants.bluetool_version, self.constants.bluetool_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("BlueToolFixup.kext", self.constants.bluetool_version, self.constants.bluetool_path)
|
||||||
if smbios_data.smbios_dictionary[self.model]["Bluetooth Model"] <= bluetooth_data.bluetooth_data.BRCM2070.value:
|
if smbios_data.smbios_dictionary[self.model]["Bluetooth Model"] <= bluetooth_data.bluetooth_data.BRCM2070.value:
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -btlfxallowanyaddr"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -btlfxallowanyaddr"
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import shutil
|
|||||||
import zipfile
|
import zipfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from datetime import date
|
from datetime import date
|
||||||
|
import logging
|
||||||
|
|
||||||
from resources import constants, utilities
|
from resources import constants, utilities
|
||||||
from resources.build import bluetooth, firmware, graphics_audio, support, storage, smbios, security, misc
|
from resources.build import bluetooth, firmware, graphics_audio, support, storage, smbios, security, misc
|
||||||
@@ -30,9 +31,9 @@ class build_opencore:
|
|||||||
def build_efi(self):
|
def build_efi(self):
|
||||||
utilities.cls()
|
utilities.cls()
|
||||||
if not self.constants.custom_model:
|
if not self.constants.custom_model:
|
||||||
print(f"Building Configuration on model: {self.model}")
|
logging.info(f"Building Configuration on model: {self.model}")
|
||||||
else:
|
else:
|
||||||
print(f"Building Configuration for external model: {self.model}")
|
logging.info(f"Building Configuration for external model: {self.model}")
|
||||||
|
|
||||||
self.generate_base()
|
self.generate_base()
|
||||||
self.set_revision()
|
self.set_revision()
|
||||||
@@ -54,31 +55,32 @@ class build_opencore:
|
|||||||
|
|
||||||
# Work-around ocvalidate
|
# Work-around ocvalidate
|
||||||
if self.constants.validate is False:
|
if self.constants.validate is False:
|
||||||
print("- Adding bootmgfw.efi BlessOverride")
|
logging.info("- Adding bootmgfw.efi BlessOverride")
|
||||||
self.config["Misc"]["BlessOverride"] += ["\\EFI\\Microsoft\\Boot\\bootmgfw.efi"]
|
self.config["Misc"]["BlessOverride"] += ["\\EFI\\Microsoft\\Boot\\bootmgfw.efi"]
|
||||||
|
|
||||||
|
|
||||||
def generate_base(self):
|
def generate_base(self):
|
||||||
# Generate OpenCore base folder and config
|
# Generate OpenCore base folder and config
|
||||||
if not Path(self.constants.build_path).exists():
|
if not Path(self.constants.build_path).exists():
|
||||||
print("Creating build folder")
|
logging.info("Creating build folder")
|
||||||
Path(self.constants.build_path).mkdir()
|
Path(self.constants.build_path).mkdir()
|
||||||
else:
|
else:
|
||||||
print("Build folder already present, skipping")
|
logging.info("Build folder already present, skipping")
|
||||||
|
|
||||||
if Path(self.constants.opencore_zip_copied).exists():
|
if Path(self.constants.opencore_zip_copied).exists():
|
||||||
print("Deleting old copy of OpenCore zip")
|
logging.info("Deleting old copy of OpenCore zip")
|
||||||
Path(self.constants.opencore_zip_copied).unlink()
|
Path(self.constants.opencore_zip_copied).unlink()
|
||||||
if Path(self.constants.opencore_release_folder).exists():
|
if Path(self.constants.opencore_release_folder).exists():
|
||||||
print("Deleting old copy of OpenCore folder")
|
logging.info("Deleting old copy of OpenCore folder")
|
||||||
shutil.rmtree(self.constants.opencore_release_folder, onerror=rmtree_handler, ignore_errors=True)
|
shutil.rmtree(self.constants.opencore_release_folder, onerror=rmtree_handler, ignore_errors=True)
|
||||||
|
|
||||||
print(f"\n- Adding OpenCore v{self.constants.opencore_version} {self.constants.opencore_build}")
|
logging.info("")
|
||||||
|
logging.info(f"- Adding OpenCore v{self.constants.opencore_version} {self.constants.opencore_build}")
|
||||||
shutil.copy(self.constants.opencore_zip_source, self.constants.build_path)
|
shutil.copy(self.constants.opencore_zip_source, self.constants.build_path)
|
||||||
zipfile.ZipFile(self.constants.opencore_zip_copied).extractall(self.constants.build_path)
|
zipfile.ZipFile(self.constants.opencore_zip_copied).extractall(self.constants.build_path)
|
||||||
|
|
||||||
# Setup config.plist for editing
|
# Setup config.plist for editing
|
||||||
print("- Adding config.plist for OpenCore")
|
logging.info("- Adding config.plist for OpenCore")
|
||||||
shutil.copy(self.constants.plist_template, self.constants.oc_folder)
|
shutil.copy(self.constants.plist_template, self.constants.oc_folder)
|
||||||
self.config = plistlib.load(Path(self.constants.plist_path).open("rb"))
|
self.config = plistlib.load(Path(self.constants.plist_path).open("rb"))
|
||||||
|
|
||||||
@@ -115,9 +117,9 @@ class build_opencore:
|
|||||||
support.build_support(self.model, self.constants, self.config).sign_files()
|
support.build_support(self.model, self.constants, self.config).sign_files()
|
||||||
support.build_support(self.model, self.constants, self.config).validate_pathing()
|
support.build_support(self.model, self.constants, self.config).validate_pathing()
|
||||||
|
|
||||||
print("")
|
logging.info("")
|
||||||
print(f"Your OpenCore EFI for {self.model} has been built at:")
|
logging.info(f"Your OpenCore EFI for {self.model} has been built at:")
|
||||||
print(f" {self.constants.opencore_release_folder}")
|
logging.info(f" {self.constants.opencore_release_folder}")
|
||||||
print("")
|
logging.info("")
|
||||||
if self.constants.gui_mode is False:
|
if self.constants.gui_mode is False:
|
||||||
input("Press [Enter] to continue\n")
|
input("Press [Enter] to continue\n")
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from resources import constants, generate_smbios
|
|||||||
from resources.build import support
|
from resources.build import support
|
||||||
from data import smbios_data, cpu_data
|
from data import smbios_data, cpu_data
|
||||||
|
|
||||||
import binascii, shutil
|
import binascii, shutil, logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
class build_firmware:
|
class build_firmware:
|
||||||
@@ -50,7 +50,7 @@ class build_firmware:
|
|||||||
# This breaks AppleIntelCPUPowerManagement.kext matching as it no longer matches against the correct criteria
|
# This breaks AppleIntelCPUPowerManagement.kext matching as it no longer matches against the correct criteria
|
||||||
#
|
#
|
||||||
# To resolve, we patched AICPUPM to attach regardless of the value of 'intel_cpupm_matching'
|
# To resolve, we patched AICPUPM to attach regardless of the value of 'intel_cpupm_matching'
|
||||||
print("- Enabling legacy power management support")
|
logging.info("- Enabling legacy power management support")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("AppleIntelCPUPowerManagement.kext", self.constants.aicpupm_version, self.constants.aicpupm_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("AppleIntelCPUPowerManagement.kext", self.constants.aicpupm_version, self.constants.aicpupm_path)
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("AppleIntelCPUPowerManagementClient.kext", self.constants.aicpupm_version, self.constants.aicpupm_client_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("AppleIntelCPUPowerManagementClient.kext", self.constants.aicpupm_version, self.constants.aicpupm_client_path)
|
||||||
|
|
||||||
@@ -60,14 +60,14 @@ class build_firmware:
|
|||||||
# This causes power management to break on pre-Ivy Bridge CPUs as they don't have correct
|
# This causes power management to break on pre-Ivy Bridge CPUs as they don't have correct
|
||||||
# power management tables provided.
|
# power management tables provided.
|
||||||
# This patch will simply increase ASPP's 'IOProbeScore' to outmatch X86PP
|
# This patch will simply increase ASPP's 'IOProbeScore' to outmatch X86PP
|
||||||
print("- Overriding ACPI SMC matching")
|
logging.info("- Overriding ACPI SMC matching")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("ASPP-Override.kext", self.constants.aspp_override_version, self.constants.aspp_override_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("ASPP-Override.kext", self.constants.aspp_override_version, self.constants.aspp_override_path)
|
||||||
if self.constants.disable_xcpm is True:
|
if self.constants.disable_xcpm is True:
|
||||||
# Only inject on older OSes if user requests
|
# Only inject on older OSes if user requests
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Add"], "BundlePath", "ASPP-Override.kext")["MinKernel"] = ""
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Add"], "BundlePath", "ASPP-Override.kext")["MinKernel"] = ""
|
||||||
|
|
||||||
if self.constants.disable_msr_power_ctl is True and smbios_data.smbios_dictionary[self.model]["CPU Generation"] >= cpu_data.cpu_data.nehalem.value:
|
if self.constants.disable_msr_power_ctl is True and smbios_data.smbios_dictionary[self.model]["CPU Generation"] >= cpu_data.cpu_data.nehalem.value:
|
||||||
print("- Disabling Firmware Throttling")
|
logging.info("- Disabling Firmware Throttling")
|
||||||
# Nehalem and newer systems force firmware throttling via MSR_POWER_CTL
|
# Nehalem and newer systems force firmware throttling via MSR_POWER_CTL
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("SimpleMSR.kext", self.constants.simplemsr_version, self.constants.simplemsr_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("SimpleMSR.kext", self.constants.simplemsr_version, self.constants.simplemsr_path)
|
||||||
|
|
||||||
@@ -82,14 +82,14 @@ class build_firmware:
|
|||||||
# CPBG device in ACPI is a Co-Processor Bridge Device, which is not actually physically present
|
# CPBG device in ACPI is a Co-Processor Bridge Device, which is not actually physically present
|
||||||
# IOPCIFamily will error when enumerating this device, thus we'll power it off via _STA (has no effect in older OSes)
|
# IOPCIFamily will error when enumerating this device, thus we'll power it off via _STA (has no effect in older OSes)
|
||||||
if smbios_data.smbios_dictionary[self.model]["CPU Generation"] == cpu_data.cpu_data.nehalem.value and not (self.model.startswith("MacPro") or self.model.startswith("Xserve")):
|
if smbios_data.smbios_dictionary[self.model]["CPU Generation"] == cpu_data.cpu_data.nehalem.value and not (self.model.startswith("MacPro") or self.model.startswith("Xserve")):
|
||||||
print("- Adding SSDT-CPBG.aml")
|
logging.info("- Adding SSDT-CPBG.aml")
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Add"], "Path", "SSDT-CPBG.aml")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Add"], "Path", "SSDT-CPBG.aml")["Enabled"] = True
|
||||||
shutil.copy(self.constants.pci_ssdt_path, self.constants.acpi_path)
|
shutil.copy(self.constants.pci_ssdt_path, self.constants.acpi_path)
|
||||||
|
|
||||||
if cpu_data.cpu_data.sandy_bridge <= smbios_data.smbios_dictionary[self.model]["CPU Generation"] <= cpu_data.cpu_data.ivy_bridge.value and self.model != "MacPro6,1":
|
if cpu_data.cpu_data.sandy_bridge <= smbios_data.smbios_dictionary[self.model]["CPU Generation"] <= cpu_data.cpu_data.ivy_bridge.value and self.model != "MacPro6,1":
|
||||||
# Based on: https://egpu.io/forums/pc-setup/fix-dsdt-override-to-correct-error-12/
|
# Based on: https://egpu.io/forums/pc-setup/fix-dsdt-override-to-correct-error-12/
|
||||||
# Applicable for Sandy and Ivy Bridge Macs
|
# Applicable for Sandy and Ivy Bridge Macs
|
||||||
print("- Enabling Windows 10 UEFI Audio support")
|
logging.info("- Enabling Windows 10 UEFI Audio support")
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Add"], "Path", "SSDT-PCI.aml")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Add"], "Path", "SSDT-PCI.aml")["Enabled"] = True
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Patch"], "Comment", "BUF0 to BUF1")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Patch"], "Comment", "BUF0 to BUF1")["Enabled"] = True
|
||||||
shutil.copy(self.constants.windows_ssdt_path, self.constants.acpi_path)
|
shutil.copy(self.constants.windows_ssdt_path, self.constants.acpi_path)
|
||||||
@@ -110,7 +110,7 @@ class build_firmware:
|
|||||||
# Force Rosetta Cryptex installation in macOS Ventura
|
# Force Rosetta Cryptex installation in macOS Ventura
|
||||||
# Restores support for CPUs lacking AVX2.0 support
|
# Restores support for CPUs lacking AVX2.0 support
|
||||||
if smbios_data.smbios_dictionary[self.model]["CPU Generation"] <= cpu_data.cpu_data.ivy_bridge.value:
|
if smbios_data.smbios_dictionary[self.model]["CPU Generation"] <= cpu_data.cpu_data.ivy_bridge.value:
|
||||||
print("- Enabling Rosetta Cryptex support in Ventura")
|
logging.info("- Enabling Rosetta Cryptex support in Ventura")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("CryptexFixup.kext", self.constants.cryptexfixup_version, self.constants.cryptexfixup_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("CryptexFixup.kext", self.constants.cryptexfixup_version, self.constants.cryptexfixup_path)
|
||||||
|
|
||||||
# i3 Ivy Bridge iMacs don't support RDRAND
|
# i3 Ivy Bridge iMacs don't support RDRAND
|
||||||
@@ -119,13 +119,13 @@ class build_firmware:
|
|||||||
(smbios_data.smbios_dictionary[self.model]["CPU Generation"] <= cpu_data.cpu_data.sandy_bridge.value):
|
(smbios_data.smbios_dictionary[self.model]["CPU Generation"] <= cpu_data.cpu_data.sandy_bridge.value):
|
||||||
# Ref: https://github.com/reenigneorcim/SurPlus
|
# Ref: https://github.com/reenigneorcim/SurPlus
|
||||||
# Enable for all systems missing RDRAND support
|
# Enable for all systems missing RDRAND support
|
||||||
print("- Adding SurPlus Patch for Race Condition")
|
logging.info("- Adding SurPlus Patch for Race Condition")
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "SurPlus v1 - PART 1 of 2 - Patch read_erandom (inlined in _early_random)")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "SurPlus v1 - PART 1 of 2 - Patch read_erandom (inlined in _early_random)")["Enabled"] = True
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "SurPlus v1 - PART 2 of 2 - Patch register_and_init_prng")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "SurPlus v1 - PART 2 of 2 - Patch register_and_init_prng")["Enabled"] = True
|
||||||
if self.constants.force_surplus is True:
|
if self.constants.force_surplus is True:
|
||||||
# Syncretic forces SurPlus to only run on Beta 7 and older by default for saftey reasons
|
# Syncretic forces SurPlus to only run on Beta 7 and older by default for saftey reasons
|
||||||
# If users desires, allow forcing in newer OSes
|
# If users desires, allow forcing in newer OSes
|
||||||
print("- Allowing SurPlus on all newer OSes")
|
logging.info("- Allowing SurPlus on all newer OSes")
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "SurPlus v1 - PART 1 of 2 - Patch read_erandom (inlined in _early_random)")["MaxKernel"] = ""
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "SurPlus v1 - PART 1 of 2 - Patch read_erandom (inlined in _early_random)")["MaxKernel"] = ""
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "SurPlus v1 - PART 2 of 2 - Patch register_and_init_prng")["MaxKernel"] = ""
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "SurPlus v1 - PART 2 of 2 - Patch register_and_init_prng")["MaxKernel"] = ""
|
||||||
|
|
||||||
@@ -143,7 +143,7 @@ class build_firmware:
|
|||||||
|
|
||||||
# HID patches
|
# HID patches
|
||||||
if smbios_data.smbios_dictionary[self.model]["CPU Generation"] <= cpu_data.cpu_data.penryn.value:
|
if smbios_data.smbios_dictionary[self.model]["CPU Generation"] <= cpu_data.cpu_data.penryn.value:
|
||||||
print("- Adding IOHIDFamily patch")
|
logging.info("- Adding IOHIDFamily patch")
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Identifier", "com.apple.iokit.IOHIDFamily")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Identifier", "com.apple.iokit.IOHIDFamily")["Enabled"] = True
|
||||||
|
|
||||||
|
|
||||||
@@ -157,20 +157,20 @@ class build_firmware:
|
|||||||
# Exfat check
|
# Exfat check
|
||||||
if smbios_data.smbios_dictionary[self.model]["CPU Generation"] < cpu_data.cpu_data.sandy_bridge.value:
|
if smbios_data.smbios_dictionary[self.model]["CPU Generation"] < cpu_data.cpu_data.sandy_bridge.value:
|
||||||
# Sandy Bridge and newer Macs natively support ExFat
|
# Sandy Bridge and newer Macs natively support ExFat
|
||||||
print("- Adding ExFatDxeLegacy.efi")
|
logging.info("- Adding ExFatDxeLegacy.efi")
|
||||||
shutil.copy(self.constants.exfat_legacy_driver_path, self.constants.drivers_path)
|
shutil.copy(self.constants.exfat_legacy_driver_path, self.constants.drivers_path)
|
||||||
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("ExFatDxeLegacy.efi", "UEFI", "Drivers")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("ExFatDxeLegacy.efi", "UEFI", "Drivers")["Enabled"] = True
|
||||||
|
|
||||||
# NVMe check
|
# NVMe check
|
||||||
if self.constants.nvme_boot is True:
|
if self.constants.nvme_boot is True:
|
||||||
print("- Enabling NVMe boot support")
|
logging.info("- Enabling NVMe boot support")
|
||||||
shutil.copy(self.constants.nvme_driver_path, self.constants.drivers_path)
|
shutil.copy(self.constants.nvme_driver_path, self.constants.drivers_path)
|
||||||
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("NvmExpressDxe.efi", "UEFI", "Drivers")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("NvmExpressDxe.efi", "UEFI", "Drivers")["Enabled"] = True
|
||||||
|
|
||||||
# USB check
|
# USB check
|
||||||
if self.constants.xhci_boot is True:
|
if self.constants.xhci_boot is True:
|
||||||
print("- Adding USB 3.0 Controller Patch")
|
logging.info("- Adding USB 3.0 Controller Patch")
|
||||||
print("- Adding XhciDxe.efi and UsbBusDxe.efi")
|
logging.info("- Adding XhciDxe.efi and UsbBusDxe.efi")
|
||||||
shutil.copy(self.constants.xhci_driver_path, self.constants.drivers_path)
|
shutil.copy(self.constants.xhci_driver_path, self.constants.drivers_path)
|
||||||
shutil.copy(self.constants.usb_bus_driver_path, self.constants.drivers_path)
|
shutil.copy(self.constants.usb_bus_driver_path, self.constants.drivers_path)
|
||||||
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("XhciDxe.efi", "UEFI", "Drivers")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("XhciDxe.efi", "UEFI", "Drivers")["Enabled"] = True
|
||||||
@@ -178,7 +178,7 @@ class build_firmware:
|
|||||||
|
|
||||||
# PCIe Link Rate check
|
# PCIe Link Rate check
|
||||||
if self.model == "MacPro3,1":
|
if self.model == "MacPro3,1":
|
||||||
print("- Adding PCIe Link Rate Patch")
|
logging.info("- Adding PCIe Link Rate Patch")
|
||||||
shutil.copy(self.constants.link_rate_driver_path, self.constants.drivers_path)
|
shutil.copy(self.constants.link_rate_driver_path, self.constants.drivers_path)
|
||||||
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("FixPCIeLinkRate.efi", "UEFI", "Drivers")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("FixPCIeLinkRate.efi", "UEFI", "Drivers")["Enabled"] = True
|
||||||
|
|
||||||
@@ -193,7 +193,7 @@ class build_firmware:
|
|||||||
# Waiting for XNU source to be released to fix this properly
|
# Waiting for XNU source to be released to fix this properly
|
||||||
# Ref: https://forums.macrumors.com/threads/opencore-on-the-mac-pro.2207814/
|
# Ref: https://forums.macrumors.com/threads/opencore-on-the-mac-pro.2207814/
|
||||||
if self.model in ["MacPro6,1", "iMac7,1", "iMac8,1", "MacBookPro4,1"] or self.constants.set_vmm_cpuid is True:
|
if self.model in ["MacPro6,1", "iMac7,1", "iMac8,1", "MacBookPro4,1"] or self.constants.set_vmm_cpuid is True:
|
||||||
print("- Enabling VMM patch")
|
logging.info("- Enabling VMM patch")
|
||||||
self.config["Kernel"]["Emulate"]["Cpuid1Data"] = binascii.unhexlify("00000000000000000000008000000000")
|
self.config["Kernel"]["Emulate"]["Cpuid1Data"] = binascii.unhexlify("00000000000000000000008000000000")
|
||||||
self.config["Kernel"]["Emulate"]["Cpuid1Mask"] = binascii.unhexlify("00000000000000000000008000000000")
|
self.config["Kernel"]["Emulate"]["Cpuid1Mask"] = binascii.unhexlify("00000000000000000000008000000000")
|
||||||
self.config["Kernel"]["Emulate"]["MinKernel"] = "22.0.0"
|
self.config["Kernel"]["Emulate"]["MinKernel"] = "22.0.0"
|
||||||
@@ -207,17 +207,17 @@ class build_firmware:
|
|||||||
):
|
):
|
||||||
# Fix Virtual Machine support for non-macOS OSes
|
# Fix Virtual Machine support for non-macOS OSes
|
||||||
# Haswell and Broadwell MacBooks lock out the VMX bit if booting UEFI Windows
|
# Haswell and Broadwell MacBooks lock out the VMX bit if booting UEFI Windows
|
||||||
print("- Enabling VMX Bit for non-macOS OSes")
|
logging.info("- Enabling VMX Bit for non-macOS OSes")
|
||||||
self.config["UEFI"]["Quirks"]["EnableVmx"] = True
|
self.config["UEFI"]["Quirks"]["EnableVmx"] = True
|
||||||
|
|
||||||
# Works-around Hibernation bug where connecting all firmware drivers breaks the transition from S4
|
# Works-around Hibernation bug where connecting all firmware drivers breaks the transition from S4
|
||||||
# Mainly applicable for MacBookPro9,1
|
# Mainly applicable for MacBookPro9,1
|
||||||
if self.constants.disable_connectdrivers is True:
|
if self.constants.disable_connectdrivers is True:
|
||||||
print("- Disabling ConnectDrivers")
|
logging.info("- Disabling ConnectDrivers")
|
||||||
self.config["UEFI"]["ConnectDrivers"] = False
|
self.config["UEFI"]["ConnectDrivers"] = False
|
||||||
|
|
||||||
if self.constants.nvram_write is False:
|
if self.constants.nvram_write is False:
|
||||||
print("- Disabling Hardware NVRAM Write")
|
logging.info("- Disabling Hardware NVRAM Write")
|
||||||
self.config["NVRAM"]["WriteFlash"] = False
|
self.config["NVRAM"]["WriteFlash"] = False
|
||||||
|
|
||||||
if self.constants.serial_settings != "None":
|
if self.constants.serial_settings != "None":
|
||||||
@@ -247,7 +247,7 @@ class build_firmware:
|
|||||||
if "5K Display" not in smbios_data.smbios_dictionary[self.model]:
|
if "5K Display" not in smbios_data.smbios_dictionary[self.model]:
|
||||||
return
|
return
|
||||||
|
|
||||||
print("- Adding 5K Display Patch")
|
logging.info("- Adding 5K Display Patch")
|
||||||
# Set LauncherPath to '/boot.efi'
|
# Set LauncherPath to '/boot.efi'
|
||||||
# This is to ensure that only the Mac's firmware presents the boot option, but not OpenCore
|
# This is to ensure that only the Mac's firmware presents the boot option, but not OpenCore
|
||||||
# https://github.com/acidanthera/OpenCorePkg/blob/0.7.6/Library/OcAppleBootPolicyLib/OcAppleBootPolicyLib.c#L50-L73
|
# https://github.com/acidanthera/OpenCorePkg/blob/0.7.6/Library/OcAppleBootPolicyLib/OcAppleBootPolicyLib.c#L50-L73
|
||||||
|
|||||||
@@ -1,13 +1,14 @@
|
|||||||
# Class for handling Graphics and Audio Patches, invocation from build.py
|
# Class for handling Graphics and Audio Patches, invocation from build.py
|
||||||
# Copyright (C) 2020-2022, Dhinak G, Mykola Grymalyuk
|
# Copyright (C) 2020-2022, Dhinak G, Mykola Grymalyuk
|
||||||
|
|
||||||
from resources import constants, device_probe, utilities
|
import shutil, binascii, logging
|
||||||
from resources.build import support
|
|
||||||
from data import smbios_data, model_array, os_data, cpu_data
|
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import shutil, binascii
|
from resources import constants, device_probe, utilities
|
||||||
|
from resources.build import support
|
||||||
|
from data import smbios_data, model_array, os_data, cpu_data, video_bios_data
|
||||||
|
|
||||||
|
|
||||||
class build_graphics_audio:
|
class build_graphics_audio:
|
||||||
|
|
||||||
@@ -21,58 +22,59 @@ class build_graphics_audio:
|
|||||||
|
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
|
self.imac_mxm_patching()
|
||||||
self.graphics_handling()
|
self.graphics_handling()
|
||||||
self.audio_handling()
|
self.audio_handling()
|
||||||
self.firmware_handling()
|
self.firmware_handling()
|
||||||
self.spoof_handling()
|
self.spoof_handling()
|
||||||
self.imac_mxm_patching()
|
|
||||||
self.ioaccel_workaround()
|
self.ioaccel_workaround()
|
||||||
|
|
||||||
|
|
||||||
def graphics_handling(self):
|
def graphics_handling(self):
|
||||||
if self.constants.allow_oc_everywhere is False and self.constants.serial_settings != "None":
|
if self.constants.allow_oc_everywhere is False and self.constants.serial_settings != "None":
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("WhateverGreen.kext", self.constants.whatevergreen_version, self.constants.whatevergreen_path)
|
if not support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("WhateverGreen.kext")["Enabled"] is True:
|
||||||
|
support.build_support(self.model, self.constants, self.config).enable_kext("WhateverGreen.kext", self.constants.whatevergreen_version, self.constants.whatevergreen_path)
|
||||||
|
|
||||||
# Mac Pro handling
|
# Mac Pro handling
|
||||||
if self.model in model_array.MacPro:
|
if self.model in model_array.MacPro:
|
||||||
if not self.constants.custom_model:
|
if not self.constants.custom_model:
|
||||||
for i, device in enumerate(self.computer.gpus):
|
for i, device in enumerate(self.computer.gpus):
|
||||||
print(f"- Found dGPU ({i + 1}): {utilities.friendly_hex(device.vendor_id)}:{utilities.friendly_hex(device.device_id)}")
|
logging.info(f"- Found dGPU ({i + 1}): {utilities.friendly_hex(device.vendor_id)}:{utilities.friendly_hex(device.device_id)}")
|
||||||
self.config["#Revision"][f"Hardware-MacPro-dGPU-{i + 1}"] = f"{utilities.friendly_hex(device.vendor_id)}:{utilities.friendly_hex(device.device_id)}"
|
self.config["#Revision"][f"Hardware-MacPro-dGPU-{i + 1}"] = f"{utilities.friendly_hex(device.vendor_id)}:{utilities.friendly_hex(device.device_id)}"
|
||||||
|
|
||||||
if device.pci_path and device.acpi_path:
|
if device.pci_path and device.acpi_path:
|
||||||
print(f"- Found dGPU ({i + 1}) at {device.pci_path}")
|
logging.info(f"- Found dGPU ({i + 1}) at {device.pci_path}")
|
||||||
if isinstance(device, device_probe.AMD):
|
if isinstance(device, device_probe.AMD):
|
||||||
print("- Adding Mac Pro, Xserve DRM patches")
|
logging.info("- Adding Mac Pro, Xserve DRM patches")
|
||||||
self.config["DeviceProperties"]["Add"][device.pci_path] = {"shikigva": 128, "unfairgva": 1, "rebuild-device-tree": 1, "agdpmod": "pikera", "enable-gva-support": 1}
|
self.config["DeviceProperties"]["Add"][device.pci_path] = {"shikigva": 128, "unfairgva": 1, "rebuild-device-tree": 1, "agdpmod": "pikera", "enable-gva-support": 1}
|
||||||
elif isinstance(device, device_probe.NVIDIA):
|
elif isinstance(device, device_probe.NVIDIA):
|
||||||
print("- Enabling Nvidia Output Patch")
|
logging.info("- Enabling Nvidia Output Patch")
|
||||||
self.config["DeviceProperties"]["Add"][device.pci_path] = {"rebuild-device-tree": 1, "agdpmod": "vit9696"}
|
self.config["DeviceProperties"]["Add"][device.pci_path] = {"rebuild-device-tree": 1, "agdpmod": "vit9696"}
|
||||||
self.config["UEFI"]["Quirks"]["ForgeUefiSupport"] = True
|
self.config["UEFI"]["Quirks"]["ForgeUefiSupport"] = True
|
||||||
self.config["UEFI"]["Quirks"]["ReloadOptionRoms"] = True
|
self.config["UEFI"]["Quirks"]["ReloadOptionRoms"] = True
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print(f"- Failed to find Device path for dGPU {i + 1}")
|
logging.info(f"- Failed to find Device path for dGPU {i + 1}")
|
||||||
if isinstance(device, device_probe.AMD):
|
if isinstance(device, device_probe.AMD):
|
||||||
print("- Adding Mac Pro, Xserve DRM patches")
|
logging.info("- Adding Mac Pro, Xserve DRM patches")
|
||||||
if "shikigva=128 unfairgva=1" not in self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"]:
|
if "shikigva=128 unfairgva=1" not in self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"]:
|
||||||
print("- Falling back to boot-args")
|
logging.info("- Falling back to boot-args")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " shikigva=128 unfairgva=1 agdpmod=pikera radgva=1" + (
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " shikigva=128 unfairgva=1 agdpmod=pikera radgva=1" + (
|
||||||
" -wegtree" if "-wegtree" not in self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] else ""
|
" -wegtree" if "-wegtree" not in self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] else ""
|
||||||
)
|
)
|
||||||
elif isinstance(device, device_probe.NVIDIA):
|
elif isinstance(device, device_probe.NVIDIA):
|
||||||
print("- Enabling Nvidia Output Patch")
|
logging.info("- Enabling Nvidia Output Patch")
|
||||||
if "-wegtree agdpmod=vit9696" not in self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"]:
|
if "-wegtree agdpmod=vit9696" not in self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"]:
|
||||||
print("- Falling back to boot-args")
|
logging.info("- Falling back to boot-args")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -wegtree agdpmod=vit9696"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -wegtree agdpmod=vit9696"
|
||||||
self.config["UEFI"]["Quirks"]["ForgeUefiSupport"] = True
|
self.config["UEFI"]["Quirks"]["ForgeUefiSupport"] = True
|
||||||
self.config["UEFI"]["Quirks"]["ReloadOptionRoms"] = True
|
self.config["UEFI"]["Quirks"]["ReloadOptionRoms"] = True
|
||||||
|
|
||||||
if not self.computer.gpus:
|
if not self.computer.gpus:
|
||||||
print("- No socketed dGPU found")
|
logging.info("- No socketed dGPU found")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print("- Adding Mac Pro, Xserve DRM patches")
|
logging.info("- Adding Mac Pro, Xserve DRM patches")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " shikigva=128 unfairgva=1 -wegtree"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " shikigva=128 unfairgva=1 -wegtree"
|
||||||
|
|
||||||
if not support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("WhateverGreen.kext")["Enabled"] is True:
|
if not support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("WhateverGreen.kext")["Enabled"] is True:
|
||||||
@@ -86,7 +88,7 @@ class build_graphics_audio:
|
|||||||
device.arch in [device_probe.NVIDIA.Archs.Fermi, device_probe.NVIDIA.Archs.Maxwell, device_probe.NVIDIA.Archs.Pascal] or
|
device.arch in [device_probe.NVIDIA.Archs.Fermi, device_probe.NVIDIA.Archs.Maxwell, device_probe.NVIDIA.Archs.Pascal] or
|
||||||
(self.constants.force_nv_web is True and device.arch in [device_probe.NVIDIA.Archs.Tesla, device_probe.NVIDIA.Archs.Kepler])
|
(self.constants.force_nv_web is True and device.arch in [device_probe.NVIDIA.Archs.Tesla, device_probe.NVIDIA.Archs.Kepler])
|
||||||
):
|
):
|
||||||
print(f"- Enabling Web Driver Patches for GPU ({i + 1}): {utilities.friendly_hex(device.vendor_id)}:{utilities.friendly_hex(device.device_id)}")
|
logging.info(f"- Enabling Web Driver Patches for GPU ({i + 1}): {utilities.friendly_hex(device.vendor_id)}:{utilities.friendly_hex(device.device_id)}")
|
||||||
if device.pci_path and device.acpi_path:
|
if device.pci_path and device.acpi_path:
|
||||||
if device.pci_path in self.config["DeviceProperties"]["Add"]:
|
if device.pci_path in self.config["DeviceProperties"]["Add"]:
|
||||||
self.config["DeviceProperties"]["Add"][device.pci_path].update({"disable-metal": 1, "force-compat": 1})
|
self.config["DeviceProperties"]["Add"][device.pci_path].update({"disable-metal": 1, "force-compat": 1})
|
||||||
@@ -104,14 +106,36 @@ class build_graphics_audio:
|
|||||||
if "nvda_drv" not in self.config["NVRAM"]["Delete"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]:
|
if "nvda_drv" not in self.config["NVRAM"]["Delete"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]:
|
||||||
self.config["NVRAM"]["Delete"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"] += ["nvda_drv"]
|
self.config["NVRAM"]["Delete"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"] += ["nvda_drv"]
|
||||||
|
|
||||||
|
|
||||||
def backlight_path_detection(self):
|
def backlight_path_detection(self):
|
||||||
|
|
||||||
|
# self.constants.custom_model: iMac has been modded with new dGPU
|
||||||
|
# self.computer.dgpu: dGPU has been found using the GFX0 path
|
||||||
|
# self.computer.dgpu.pci_path:
|
||||||
if not self.constants.custom_model and self.computer.dgpu and self.computer.dgpu.pci_path:
|
if not self.constants.custom_model and self.computer.dgpu and self.computer.dgpu.pci_path:
|
||||||
self.gfx0_path = self.computer.dgpu.pci_path
|
for i, device in enumerate(self.computer.gpus):
|
||||||
print(f"- Found GFX0 Device Path: {self.gfx0_path}")
|
logging.info(f"- Found dGPU ({i + 1}): {utilities.friendly_hex(device.vendor_id)}:{utilities.friendly_hex(device.device_id)}")
|
||||||
|
self.config["#Revision"][f"Hardware-iMac-dGPU-{i + 1}"] = f"{utilities.friendly_hex(device.vendor_id)}:{utilities.friendly_hex(device.device_id)}"
|
||||||
|
|
||||||
|
if device.pci_path != self.computer.dgpu.pci_path:
|
||||||
|
logging.info("- device path and GFX0 Device path are different")
|
||||||
|
self.gfx0_path = device.pci_path
|
||||||
|
logging.info(f"- Set GFX0 Device Path: {self.gfx0_path}")
|
||||||
|
self.computer.dgpu.device_id = device.device_id
|
||||||
|
self.device_id = device.device_id
|
||||||
|
logging.info(f"- Found GPU Arch: {device.arch}")
|
||||||
|
if device.arch in [device_probe.AMD.Archs.Navi]:
|
||||||
|
self.computer.dgpu.arch = device.arch
|
||||||
|
|
||||||
|
# self.computer.dgpu.vendor_id = device.vendor_id
|
||||||
|
# self.vendor_id = device.vendor_id
|
||||||
|
else:
|
||||||
|
self.gfx0_path = self.computer.dgpu.pci_path
|
||||||
|
logging.info(f"- Found GFX0 Device Path: {self.gfx0_path}")
|
||||||
|
logging.info(f"- Found GPU Arch: {self.computer.dgpu.arch}")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if not self.constants.custom_model:
|
if not self.constants.custom_model:
|
||||||
print("- Failed to find GFX0 Device path, falling back on known logic")
|
logging.info("- Failed to find GFX0 Device path, falling back on known logic")
|
||||||
if self.model in ["iMac11,1", "iMac11,3"]:
|
if self.model in ["iMac11,1", "iMac11,3"]:
|
||||||
self.gfx0_path = "PciRoot(0x0)/Pci(0x3,0x0)/Pci(0x0,0x0)"
|
self.gfx0_path = "PciRoot(0x0)/Pci(0x3,0x0)/Pci(0x0,0x0)"
|
||||||
elif self.model == "iMac10,1":
|
elif self.model == "iMac10,1":
|
||||||
@@ -123,9 +147,9 @@ class build_graphics_audio:
|
|||||||
def nvidia_mxm_patch(self, backlight_path):
|
def nvidia_mxm_patch(self, backlight_path):
|
||||||
if not support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("WhateverGreen.kext")["Enabled"] is True:
|
if not support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("WhateverGreen.kext")["Enabled"] is True:
|
||||||
# Ensure WEG is enabled as we need if for Backlight patching
|
# Ensure WEG is enabled as we need if for Backlight patching
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("WhateverGreen.kext", self.constants.whatevergreen_version, self.constants.whatevergreen_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("WhateverGreen.kext", self.constants.whatevergreen_navi_version, self.constants.whatevergreen_navi_path)
|
||||||
if self.model in ["iMac11,1", "iMac11,2", "iMac11,3", "iMac10,1"]:
|
if self.model in ["iMac11,1", "iMac11,2", "iMac11,3", "iMac10,1"]:
|
||||||
print("- Adding Nvidia Brightness Control and DRM patches")
|
logging.info("- Adding Nvidia Brightness Control and DRM patches")
|
||||||
self.config["DeviceProperties"]["Add"][backlight_path] = {
|
self.config["DeviceProperties"]["Add"][backlight_path] = {
|
||||||
"applbkl": binascii.unhexlify("01000000"),
|
"applbkl": binascii.unhexlify("01000000"),
|
||||||
"@0,backlight-control": binascii.unhexlify("01000000"),
|
"@0,backlight-control": binascii.unhexlify("01000000"),
|
||||||
@@ -144,7 +168,7 @@ class build_graphics_audio:
|
|||||||
"agdpmod": "vit9696",
|
"agdpmod": "vit9696",
|
||||||
}
|
}
|
||||||
elif self.model in ["iMac12,1", "iMac12,2"]:
|
elif self.model in ["iMac12,1", "iMac12,2"]:
|
||||||
print("- Adding Nvidia Brightness Control and DRM patches")
|
logging.info("- Adding Nvidia Brightness Control and DRM patches")
|
||||||
self.config["DeviceProperties"]["Add"][backlight_path] = {
|
self.config["DeviceProperties"]["Add"][backlight_path] = {
|
||||||
"applbkl": binascii.unhexlify("01000000"),
|
"applbkl": binascii.unhexlify("01000000"),
|
||||||
"@0,backlight-control": binascii.unhexlify("01000000"),
|
"@0,backlight-control": binascii.unhexlify("01000000"),
|
||||||
@@ -152,10 +176,9 @@ class build_graphics_audio:
|
|||||||
"shikigva": 256,
|
"shikigva": 256,
|
||||||
"agdpmod": "vit9696",
|
"agdpmod": "vit9696",
|
||||||
}
|
}
|
||||||
print("- Disabling unsupported iGPU")
|
logging.info("- Disabling unsupported iGPU")
|
||||||
self.config["DeviceProperties"]["Add"]["PciRoot(0x0)/Pci(0x2,0x0)"] = {
|
self.config["DeviceProperties"]["Add"]["PciRoot(0x0)/Pci(0x2,0x0)"] = {
|
||||||
"name": binascii.unhexlify("23646973706C6179"),
|
"name": binascii.unhexlify("23646973706C6179"),
|
||||||
"IOName": "#display",
|
|
||||||
"class-code": binascii.unhexlify("FFFFFFFF"),
|
"class-code": binascii.unhexlify("FFFFFFFF"),
|
||||||
}
|
}
|
||||||
shutil.copy(self.constants.backlight_injector_path, self.constants.kexts_path)
|
shutil.copy(self.constants.backlight_injector_path, self.constants.kexts_path)
|
||||||
@@ -165,27 +188,40 @@ class build_graphics_audio:
|
|||||||
|
|
||||||
|
|
||||||
def amd_mxm_patch(self, backlight_path):
|
def amd_mxm_patch(self, backlight_path):
|
||||||
print("- Adding AMD DRM patches")
|
logging.info("- Adding AMD DRM patches")
|
||||||
if not support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("WhateverGreen.kext")["Enabled"] is True:
|
if not support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("WhateverGreen.kext")["Enabled"] is True:
|
||||||
# Ensure WEG is enabled as we need if for Backlight patching
|
# Ensure WEG is enabled as we need if for Backlight patching
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("WhateverGreen.kext", self.constants.whatevergreen_version, self.constants.whatevergreen_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("WhateverGreen.kext", self.constants.whatevergreen_navi_version, self.constants.whatevergreen_navi_path)
|
||||||
self.config["DeviceProperties"]["Add"][backlight_path] = {"shikigva": 128, "unfairgva": 1, "agdpmod": "pikera", "rebuild-device-tree": 1, "enable-gva-support": 1}
|
|
||||||
|
if not self.constants.custom_model:
|
||||||
|
if self.computer.dgpu.device_id == 0x7340:
|
||||||
|
logging.info(f"- Adding AMD RX5500XT vBIOS injection")
|
||||||
|
self.config["DeviceProperties"]["Add"][backlight_path] = {"shikigva": 128, "unfairgva": 1, "agdpmod": "pikera", "rebuild-device-tree": 1, "enable-gva-support": 1, "ATY,bin_image": binascii.unhexlify(video_bios_data.RX5500XT_64K) }
|
||||||
|
logging.info(f"- Adding AMD RX5500XT boot-args")
|
||||||
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " agdpmod=pikera applbkl=3"
|
||||||
|
elif self.computer.dgpu.device_id_unspoofed == 0x6981:
|
||||||
|
logging.info(f"- Adding AMD WX3200 device spoofing")
|
||||||
|
self.config["DeviceProperties"]["Add"][backlight_path] = {"shikigva": 128, "unfairgva": 1, "agdpmod": "pikera", "rebuild-device-tree": 1, "enable-gva-support": 1, "model": "AMD Radeon Pro WX 3200", "device-id": binascii.unhexlify("FF67")}
|
||||||
|
else:
|
||||||
|
self.config["DeviceProperties"]["Add"][backlight_path] = {"shikigva": 128, "unfairgva": 1, "agdpmod": "pikera", "rebuild-device-tree": 1, "enable-gva-support": 1}
|
||||||
|
else:
|
||||||
|
self.config["DeviceProperties"]["Add"][backlight_path] = {"shikigva": 128, "unfairgva": 1, "agdpmod": "pikera", "rebuild-device-tree": 1, "enable-gva-support": 1}
|
||||||
|
|
||||||
if self.constants.custom_model and self.model == "iMac11,2":
|
if self.constants.custom_model and self.model == "iMac11,2":
|
||||||
# iMac11,2 can have either PciRoot(0x0)/Pci(0x3,0x0)/Pci(0x0,0x0) or PciRoot(0x0)/Pci(0x1,0x0)/Pci(0x0,0x0)
|
# iMac11,2 can have either PciRoot(0x0)/Pci(0x3,0x0)/Pci(0x0,0x0) or PciRoot(0x0)/Pci(0x1,0x0)/Pci(0x0,0x0)
|
||||||
# Set both properties when we cannot run hardware detection
|
# Set both properties when we cannot run hardware detection
|
||||||
self.config["DeviceProperties"]["Add"]["PciRoot(0x0)/Pci(0x3,0x0)/Pci(0x0,0x0)"] = {"shikigva": 128, "unfairgva": 1, "agdpmod": "pikera", "rebuild-device-tree": 1, "enable-gva-support": 1}
|
self.config["DeviceProperties"]["Add"]["PciRoot(0x0)/Pci(0x3,0x0)/Pci(0x0,0x0)"] = {"shikigva": 128, "unfairgva": 1, "agdpmod": "pikera", "rebuild-device-tree": 1, "enable-gva-support": 1}
|
||||||
if self.model in ["iMac12,1", "iMac12,2"]:
|
if self.model in ["iMac12,1", "iMac12,2"]:
|
||||||
print("- Disabling unsupported iGPU")
|
logging.info("- Disabling unsupported iGPU")
|
||||||
self.config["DeviceProperties"]["Add"]["PciRoot(0x0)/Pci(0x2,0x0)"] = {
|
self.config["DeviceProperties"]["Add"]["PciRoot(0x0)/Pci(0x2,0x0)"] = {
|
||||||
"name": binascii.unhexlify("23646973706C6179"),
|
"name": binascii.unhexlify("23646973706C6179"),
|
||||||
"IOName": "#display",
|
|
||||||
"class-code": binascii.unhexlify("FFFFFFFF"),
|
"class-code": binascii.unhexlify("FFFFFFFF"),
|
||||||
}
|
}
|
||||||
elif self.model == "iMac10,1":
|
elif self.model == "iMac10,1":
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("AAAMouSSE.kext", self.constants.mousse_version, self.constants.mousse_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("AAAMouSSE.kext", self.constants.mousse_version, self.constants.mousse_path)
|
||||||
if self.computer and self.computer.dgpu:
|
if self.computer and self.computer.dgpu:
|
||||||
if self.computer.dgpu.arch == device_probe.AMD.Archs.Legacy_GCN_7000:
|
if self.computer.dgpu.arch == device_probe.AMD.Archs.Legacy_GCN_7000:
|
||||||
print("- Adding Legacy GCN Power Gate Patches")
|
logging.info("- Adding Legacy GCN Power Gate Patches")
|
||||||
self.config["DeviceProperties"]["Add"][backlight_path].update({
|
self.config["DeviceProperties"]["Add"][backlight_path].update({
|
||||||
"CAIL,CAIL_DisableDrmdmaPowerGating": 1,
|
"CAIL,CAIL_DisableDrmdmaPowerGating": 1,
|
||||||
"CAIL,CAIL_DisableGfxCGPowerGating": 1,
|
"CAIL,CAIL_DisableGfxCGPowerGating": 1,
|
||||||
@@ -193,7 +229,7 @@ class build_graphics_audio:
|
|||||||
"CAIL,CAIL_DisableVCEPowerGating": 1,
|
"CAIL,CAIL_DisableVCEPowerGating": 1,
|
||||||
})
|
})
|
||||||
if self.constants.imac_model == "Legacy GCN":
|
if self.constants.imac_model == "Legacy GCN":
|
||||||
print("- Adding Legacy GCN Power Gate Patches")
|
logging.info("- Adding Legacy GCN Power Gate Patches")
|
||||||
self.config["DeviceProperties"]["Add"][backlight_path].update({
|
self.config["DeviceProperties"]["Add"][backlight_path].update({
|
||||||
"CAIL,CAIL_DisableDrmdmaPowerGating": 1,
|
"CAIL,CAIL_DisableDrmdmaPowerGating": 1,
|
||||||
"CAIL,CAIL_DisableGfxCGPowerGating": 1,
|
"CAIL,CAIL_DisableGfxCGPowerGating": 1,
|
||||||
@@ -207,7 +243,27 @@ class build_graphics_audio:
|
|||||||
"CAIL,CAIL_DisableUVDPowerGating": 1,
|
"CAIL,CAIL_DisableUVDPowerGating": 1,
|
||||||
"CAIL,CAIL_DisableVCEPowerGating": 1,
|
"CAIL,CAIL_DisableVCEPowerGating": 1,
|
||||||
})
|
})
|
||||||
|
elif self.constants.imac_model == "AMD Lexa":
|
||||||
|
logging.info("- Adding Lexa Spoofing Patches")
|
||||||
|
self.config["DeviceProperties"]["Add"][backlight_path].update({
|
||||||
|
"model": "AMD Radeon Pro WX 3200",
|
||||||
|
"device-id": binascii.unhexlify("FF67"),
|
||||||
|
})
|
||||||
|
if self.model == "iMac11,2":
|
||||||
|
self.config["DeviceProperties"]["Add"]["PciRoot(0x0)/Pci(0x3,0x0)/Pci(0x0,0x0)"].update({
|
||||||
|
"model": "AMD Radeon Pro WX 3200",
|
||||||
|
"device-id": binascii.unhexlify("FF67"),
|
||||||
|
})
|
||||||
|
elif self.constants.imac_model == "AMD Navi":
|
||||||
|
logging.info("- Adding Navi Spoofing Patches")
|
||||||
|
navi_backlight_path = backlight_path+"/Pci(0x0,0x0)/Pci(0x0,0x0)"
|
||||||
|
self.config["DeviceProperties"]["Add"][navi_backlight_path] = {
|
||||||
|
"ATY,bin_image": binascii.unhexlify(video_bios_data.RX5500XT_64K),
|
||||||
|
"shikigva": 128,
|
||||||
|
"unfairgva": 1,
|
||||||
|
"rebuild-device-tree": 1,
|
||||||
|
"enable-gva-support": 1
|
||||||
|
}
|
||||||
|
|
||||||
def audio_handling(self):
|
def audio_handling(self):
|
||||||
if (self.model in model_array.LegacyAudio or self.model in model_array.MacPro) and self.constants.set_alc_usage is True:
|
if (self.model in model_array.LegacyAudio or self.model in model_array.MacPro) and self.constants.set_alc_usage is True:
|
||||||
@@ -249,12 +305,12 @@ class build_graphics_audio:
|
|||||||
def firmware_handling(self):
|
def firmware_handling(self):
|
||||||
# Add UGA to GOP layer
|
# Add UGA to GOP layer
|
||||||
if "UGA Graphics" in smbios_data.smbios_dictionary[self.model]:
|
if "UGA Graphics" in smbios_data.smbios_dictionary[self.model]:
|
||||||
print("- Adding UGA to GOP Patch")
|
logging.info("- Adding UGA to GOP Patch")
|
||||||
self.config["UEFI"]["Output"]["GopPassThrough"] = "Apple"
|
self.config["UEFI"]["Output"]["GopPassThrough"] = "Apple"
|
||||||
|
|
||||||
# GMUX handling
|
# GMUX handling
|
||||||
if self.constants.software_demux is True and self.model in ["MacBookPro8,2", "MacBookPro8,3"]:
|
if self.constants.software_demux is True and self.model in ["MacBookPro8,2", "MacBookPro8,3"]:
|
||||||
print("- Enabling software demux")
|
logging.info("- Enabling software demux")
|
||||||
# Add ACPI patches
|
# Add ACPI patches
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Add"], "Path", "SSDT-DGPU.aml")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Add"], "Path", "SSDT-DGPU.aml")["Enabled"] = True
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Patch"], "Comment", "_INI to XINI")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Patch"], "Comment", "_INI to XINI")["Enabled"] = True
|
||||||
@@ -272,24 +328,24 @@ class build_graphics_audio:
|
|||||||
support.build_support(self.model, self.constants, self.config).enable_kext("AMDGPUWakeHandler.kext", self.constants.gpu_wake_version, self.constants.gpu_wake_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("AMDGPUWakeHandler.kext", self.constants.gpu_wake_version, self.constants.gpu_wake_path)
|
||||||
|
|
||||||
if self.constants.dGPU_switch is True and "Switchable GPUs" in smbios_data.smbios_dictionary[self.model]:
|
if self.constants.dGPU_switch is True and "Switchable GPUs" in smbios_data.smbios_dictionary[self.model]:
|
||||||
print("- Allowing GMUX switching in Windows")
|
logging.info("- Allowing GMUX switching in Windows")
|
||||||
self.config["Booter"]["Quirks"]["SignalAppleOS"] = True
|
self.config["Booter"]["Quirks"]["SignalAppleOS"] = True
|
||||||
|
|
||||||
# Force Output support PC VBIOS on Mac Pros
|
# Force Output support PC VBIOS on Mac Pros
|
||||||
if self.constants.force_output_support is True:
|
if self.constants.force_output_support is True:
|
||||||
print("- Forcing GOP Support")
|
logging.info("- Forcing GOP Support")
|
||||||
self.config["UEFI"]["Quirks"]["ForgeUefiSupport"] = True
|
self.config["UEFI"]["Quirks"]["ForgeUefiSupport"] = True
|
||||||
self.config["UEFI"]["Quirks"]["ReloadOptionRoms"] = True
|
self.config["UEFI"]["Quirks"]["ReloadOptionRoms"] = True
|
||||||
|
|
||||||
# AMD GOP VBIOS injection for AMD GCN 1-4 GPUs
|
# AMD GOP VBIOS injection for AMD GCN 1-4 GPUs
|
||||||
if self.constants.amd_gop_injection is True:
|
if self.constants.amd_gop_injection is True:
|
||||||
print("- Adding AMDGOP.efi")
|
logging.info("- Adding AMDGOP.efi")
|
||||||
shutil.copy(self.constants.amd_gop_driver_path, self.constants.drivers_path)
|
shutil.copy(self.constants.amd_gop_driver_path, self.constants.drivers_path)
|
||||||
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("AMDGOP.efi", "UEFI", "Drivers")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("AMDGOP.efi", "UEFI", "Drivers")["Enabled"] = True
|
||||||
|
|
||||||
# Nvidia Kepler GOP VBIOS injection
|
# Nvidia Kepler GOP VBIOS injection
|
||||||
if self.constants.nvidia_kepler_gop_injection is True:
|
if self.constants.nvidia_kepler_gop_injection is True:
|
||||||
print("- Adding NVGOP_GK.efi")
|
logging.info("- Adding NVGOP_GK.efi")
|
||||||
shutil.copy(self.constants.nvidia_kepler_gop_driver_path, self.constants.drivers_path)
|
shutil.copy(self.constants.nvidia_kepler_gop_driver_path, self.constants.drivers_path)
|
||||||
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("NVGOP_GK.efi", "UEFI", "Drivers")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("NVGOP_GK.efi", "UEFI", "Drivers")["Enabled"] = True
|
||||||
|
|
||||||
@@ -300,7 +356,7 @@ class build_graphics_audio:
|
|||||||
|
|
||||||
# AppleMuxControl Override
|
# AppleMuxControl Override
|
||||||
if self.model == "MacBookPro9,1":
|
if self.model == "MacBookPro9,1":
|
||||||
print("- Adding AppleMuxControl Override")
|
logging.info("- Adding AppleMuxControl Override")
|
||||||
amc_map_path = Path(self.constants.plist_folder_path) / Path("AppleMuxControl/Info.plist")
|
amc_map_path = Path(self.constants.plist_folder_path) / Path("AppleMuxControl/Info.plist")
|
||||||
self.config["DeviceProperties"]["Add"]["PciRoot(0x0)/Pci(0x1,0x0)/Pci(0x0,0x0)"] = {"agdpmod": "vit9696"}
|
self.config["DeviceProperties"]["Add"]["PciRoot(0x0)/Pci(0x1,0x0)/Pci(0x0,0x0)"] = {"agdpmod": "vit9696"}
|
||||||
Path(self.constants.amc_kext_folder).mkdir()
|
Path(self.constants.amc_kext_folder).mkdir()
|
||||||
@@ -309,7 +365,7 @@ class build_graphics_audio:
|
|||||||
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("AMC-Override.kext")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("AMC-Override.kext")["Enabled"] = True
|
||||||
|
|
||||||
if self.model not in model_array.NoAGPMSupport:
|
if self.model not in model_array.NoAGPMSupport:
|
||||||
print("- Adding AppleGraphicsPowerManagement Override")
|
logging.info("- Adding AppleGraphicsPowerManagement Override")
|
||||||
agpm_map_path = Path(self.constants.plist_folder_path) / Path("AppleGraphicsPowerManagement/Info.plist")
|
agpm_map_path = Path(self.constants.plist_folder_path) / Path("AppleGraphicsPowerManagement/Info.plist")
|
||||||
Path(self.constants.agpm_kext_folder).mkdir()
|
Path(self.constants.agpm_kext_folder).mkdir()
|
||||||
Path(self.constants.agpm_contents_folder).mkdir()
|
Path(self.constants.agpm_contents_folder).mkdir()
|
||||||
@@ -317,7 +373,7 @@ class build_graphics_audio:
|
|||||||
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("AGPM-Override.kext")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("AGPM-Override.kext")["Enabled"] = True
|
||||||
|
|
||||||
if self.model in model_array.AGDPSupport:
|
if self.model in model_array.AGDPSupport:
|
||||||
print("- Adding AppleGraphicsDevicePolicy Override")
|
logging.info("- Adding AppleGraphicsDevicePolicy Override")
|
||||||
agdp_map_path = Path(self.constants.plist_folder_path) / Path("AppleGraphicsDevicePolicy/Info.plist")
|
agdp_map_path = Path(self.constants.plist_folder_path) / Path("AppleGraphicsDevicePolicy/Info.plist")
|
||||||
Path(self.constants.agdp_kext_folder).mkdir()
|
Path(self.constants.agdp_kext_folder).mkdir()
|
||||||
Path(self.constants.agdp_contents_folder).mkdir()
|
Path(self.constants.agdp_contents_folder).mkdir()
|
||||||
@@ -326,17 +382,17 @@ class build_graphics_audio:
|
|||||||
|
|
||||||
# AGPM Patch
|
# AGPM Patch
|
||||||
if self.model in model_array.DualGPUPatch:
|
if self.model in model_array.DualGPUPatch:
|
||||||
print("- Adding dual GPU patch")
|
logging.info("- Adding dual GPU patch")
|
||||||
if not self.constants.custom_model and self.computer.dgpu and self.computer.dgpu.pci_path:
|
if not self.constants.custom_model and self.computer.dgpu and self.computer.dgpu.pci_path:
|
||||||
self.gfx0_path = self.computer.dgpu.pci_path
|
self.gfx0_path = self.computer.dgpu.pci_path
|
||||||
print(f"- Found GFX0 Device Path: {self.gfx0_path}")
|
logging.info(f"- Found GFX0 Device Path: {self.gfx0_path}")
|
||||||
else:
|
else:
|
||||||
if not self.constants.custom_model:
|
if not self.constants.custom_model:
|
||||||
print("- Failed to find GFX0 Device path, falling back on known logic")
|
logging.info("- Failed to find GFX0 Device path, falling back on known logic")
|
||||||
self.gfx0_path = "PciRoot(0x0)/Pci(0x1,0x0)/Pci(0x0,0x0)"
|
self.gfx0_path = "PciRoot(0x0)/Pci(0x1,0x0)/Pci(0x0,0x0)"
|
||||||
|
|
||||||
if self.model in model_array.IntelNvidiaDRM and self.constants.drm_support is True:
|
if self.model in model_array.IntelNvidiaDRM and self.constants.drm_support is True:
|
||||||
print("- Prioritizing DRM support over Intel QuickSync")
|
logging.info("- Prioritizing DRM support over Intel QuickSync")
|
||||||
self.config["DeviceProperties"]["Add"][self.gfx0_path] = {"agdpmod": "vit9696", "shikigva": 256}
|
self.config["DeviceProperties"]["Add"][self.gfx0_path] = {"agdpmod": "vit9696", "shikigva": 256}
|
||||||
self.config["DeviceProperties"]["Add"]["PciRoot(0x0)/Pci(0x2,0x0)"] = {
|
self.config["DeviceProperties"]["Add"]["PciRoot(0x0)/Pci(0x2,0x0)"] = {
|
||||||
"name": binascii.unhexlify("23646973706C6179"),
|
"name": binascii.unhexlify("23646973706C6179"),
|
||||||
@@ -344,7 +400,8 @@ class build_graphics_audio:
|
|||||||
"class-code": binascii.unhexlify("FFFFFFFF"),
|
"class-code": binascii.unhexlify("FFFFFFFF"),
|
||||||
}
|
}
|
||||||
elif self.constants.serial_settings != "None":
|
elif self.constants.serial_settings != "None":
|
||||||
self.config["DeviceProperties"]["Add"][self.gfx0_path] = {"agdpmod": "vit9696"}
|
if self.gfx0_path not in self.config["DeviceProperties"]["Add"] or "agdpmod" not in self.config["DeviceProperties"]["Add"][self.gfx0_path]:
|
||||||
|
self.config["DeviceProperties"]["Add"][self.gfx0_path] = {"agdpmod": "vit9696"}
|
||||||
|
|
||||||
if self.model.startswith("iMac14,1"):
|
if self.model.startswith("iMac14,1"):
|
||||||
# Ensure that agdpmod is applied to iMac14,x with iGPU only
|
# Ensure that agdpmod is applied to iMac14,x with iGPU only
|
||||||
@@ -352,30 +409,29 @@ class build_graphics_audio:
|
|||||||
|
|
||||||
|
|
||||||
def imac_mxm_patching(self):
|
def imac_mxm_patching(self):
|
||||||
|
self.backlight_path_detection()
|
||||||
# Check GPU Vendor
|
# Check GPU Vendor
|
||||||
if self.constants.metal_build is True:
|
if self.constants.metal_build is True:
|
||||||
self.backlight_path_detection()
|
logging.info("- Adding Metal GPU patches on request")
|
||||||
print("- Adding Metal GPU patches on request")
|
|
||||||
if self.constants.imac_vendor == "AMD":
|
if self.constants.imac_vendor == "AMD":
|
||||||
self.amd_mxm_patch(self.gfx0_path)
|
self.amd_mxm_patch(self.gfx0_path)
|
||||||
elif self.constants.imac_vendor == "Nvidia":
|
elif self.constants.imac_vendor == "Nvidia":
|
||||||
self.nvidia_mxm_patch(self.gfx0_path)
|
self.nvidia_mxm_patch(self.gfx0_path)
|
||||||
else:
|
else:
|
||||||
print("- Failed to find vendor")
|
logging.info("- Failed to find vendor")
|
||||||
elif not self.constants.custom_model and self.model in model_array.LegacyGPU and self.computer.dgpu:
|
elif not self.constants.custom_model and self.model in model_array.LegacyGPU and self.computer.dgpu:
|
||||||
print(f"- Detected dGPU: {utilities.friendly_hex(self.computer.dgpu.vendor_id)}:{utilities.friendly_hex(self.computer.dgpu.device_id)}")
|
logging.info(f"- Detected dGPU: {utilities.friendly_hex(self.computer.dgpu.vendor_id)}:{utilities.friendly_hex(self.computer.dgpu.device_id)}")
|
||||||
if self.computer.dgpu.arch in [
|
if self.computer.dgpu.arch in [
|
||||||
device_probe.AMD.Archs.Legacy_GCN_7000,
|
device_probe.AMD.Archs.Legacy_GCN_7000,
|
||||||
device_probe.AMD.Archs.Legacy_GCN_8000,
|
device_probe.AMD.Archs.Legacy_GCN_8000,
|
||||||
device_probe.AMD.Archs.Legacy_GCN_9000,
|
device_probe.AMD.Archs.Legacy_GCN_9000,
|
||||||
device_probe.AMD.Archs.Polaris,
|
device_probe.AMD.Archs.Polaris,
|
||||||
|
device_probe.AMD.Archs.Polaris_Spoof,
|
||||||
device_probe.AMD.Archs.Vega,
|
device_probe.AMD.Archs.Vega,
|
||||||
device_probe.AMD.Archs.Navi,
|
device_probe.AMD.Archs.Navi,
|
||||||
]:
|
]:
|
||||||
self.backlight_path_detection()
|
|
||||||
self.amd_mxm_patch(self.gfx0_path)
|
self.amd_mxm_patch(self.gfx0_path)
|
||||||
elif self.computer.dgpu.arch == device_probe.NVIDIA.Archs.Kepler:
|
elif self.computer.dgpu.arch == device_probe.NVIDIA.Archs.Kepler:
|
||||||
self.backlight_path_detection()
|
|
||||||
self.nvidia_mxm_patch(self.gfx0_path)
|
self.nvidia_mxm_patch(self.gfx0_path)
|
||||||
|
|
||||||
def ioaccel_workaround(self):
|
def ioaccel_workaround(self):
|
||||||
@@ -430,6 +486,7 @@ class build_graphics_audio:
|
|||||||
if gpu in [
|
if gpu in [
|
||||||
# Metal KDK (pre-AVX2.0)
|
# Metal KDK (pre-AVX2.0)
|
||||||
device_probe.AMD.Archs.Polaris,
|
device_probe.AMD.Archs.Polaris,
|
||||||
|
device_probe.AMD.Archs.Polaris_Spoof,
|
||||||
device_probe.AMD.Archs.Vega,
|
device_probe.AMD.Archs.Vega,
|
||||||
device_probe.AMD.Archs.Navi,
|
device_probe.AMD.Archs.Navi,
|
||||||
]:
|
]:
|
||||||
@@ -454,8 +511,9 @@ class build_graphics_audio:
|
|||||||
gpu = gpu.arch
|
gpu = gpu.arch
|
||||||
if gpu in [
|
if gpu in [
|
||||||
device_probe.AMD.Archs.Polaris,
|
device_probe.AMD.Archs.Polaris,
|
||||||
|
device_probe.AMD.Archs.Polaris_Spoof,
|
||||||
device_probe.AMD.Archs.Vega,
|
device_probe.AMD.Archs.Vega,
|
||||||
device_probe.AMD.Archs.Navi,
|
device_probe.AMD.Archs.Navi,
|
||||||
]:
|
]:
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("KDKlessWorkaround.kext", self.constants.kdkless_version, self.constants.kdkless_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("KDKlessWorkaround.kext", self.constants.kdkless_version, self.constants.kdkless_path)
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from resources import constants, device_probe, generate_smbios, utilities
|
|||||||
from resources.build import support
|
from resources.build import support
|
||||||
from data import model_array, smbios_data, cpu_data
|
from data import model_array, smbios_data, cpu_data
|
||||||
|
|
||||||
import binascii, shutil
|
import binascii, shutil, logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
@@ -38,7 +38,7 @@ class build_misc:
|
|||||||
if self.constants.fu_status is True:
|
if self.constants.fu_status is True:
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("FeatureUnlock.kext", self.constants.featureunlock_version, self.constants.featureunlock_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("FeatureUnlock.kext", self.constants.featureunlock_version, self.constants.featureunlock_path)
|
||||||
if self.constants.fu_arguments is not None:
|
if self.constants.fu_arguments is not None:
|
||||||
print(f"- Adding additional FeatureUnlock args: {self.constants.fu_arguments}")
|
logging.info(f"- Adding additional FeatureUnlock args: {self.constants.fu_arguments}")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += self.constants.fu_arguments
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += self.constants.fu_arguments
|
||||||
|
|
||||||
def restrict_events_handling(self):
|
def restrict_events_handling(self):
|
||||||
@@ -49,7 +49,7 @@ class build_misc:
|
|||||||
if self.model in ["MacBookPro6,1", "MacBookPro6,2", "MacBookPro9,1", "MacBookPro10,1"]:
|
if self.model in ["MacBookPro6,1", "MacBookPro6,2", "MacBookPro9,1", "MacBookPro10,1"]:
|
||||||
block_args += "gmux,"
|
block_args += "gmux,"
|
||||||
if self.model in model_array.MacPro:
|
if self.model in model_array.MacPro:
|
||||||
print("- Disabling memory error reporting")
|
logging.info("- Disabling memory error reporting")
|
||||||
block_args += "pcie,"
|
block_args += "pcie,"
|
||||||
gpu_dict = []
|
gpu_dict = []
|
||||||
if not self.constants.custom_model:
|
if not self.constants.custom_model:
|
||||||
@@ -65,20 +65,20 @@ class build_misc:
|
|||||||
device_probe.Intel.Archs.Haswell,
|
device_probe.Intel.Archs.Haswell,
|
||||||
device_probe.NVIDIA.Archs.Kepler,
|
device_probe.NVIDIA.Archs.Kepler,
|
||||||
]:
|
]:
|
||||||
print("- Disabling mediaanalysisd")
|
logging.info("- Disabling mediaanalysisd")
|
||||||
block_args += "media,"
|
block_args += "media,"
|
||||||
break
|
break
|
||||||
if block_args.endswith(","):
|
if block_args.endswith(","):
|
||||||
block_args = block_args[:-1]
|
block_args = block_args[:-1]
|
||||||
|
|
||||||
if block_args != "":
|
if block_args != "":
|
||||||
print(f"- Setting RestrictEvents block arguments: {block_args}")
|
logging.info(f"- Setting RestrictEvents block arguments: {block_args}")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("RestrictEvents.kext", self.constants.restrictevents_version, self.constants.restrictevents_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("RestrictEvents.kext", self.constants.restrictevents_version, self.constants.restrictevents_path)
|
||||||
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["revblock"] = block_args
|
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["revblock"] = block_args
|
||||||
|
|
||||||
patch_args = ""
|
patch_args = ""
|
||||||
if support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (1)")["Enabled"] is True and self.constants.set_content_caching is True:
|
if support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (1)")["Enabled"] is True and self.constants.set_content_caching is True:
|
||||||
print("- Fixing Content Caching support")
|
logging.info("- Fixing Content Caching support")
|
||||||
patch_args += "asset,"
|
patch_args += "asset,"
|
||||||
|
|
||||||
if patch_args.endswith(","):
|
if patch_args.endswith(","):
|
||||||
@@ -89,17 +89,17 @@ class build_misc:
|
|||||||
patch_args = "none"
|
patch_args = "none"
|
||||||
|
|
||||||
if patch_args != "":
|
if patch_args != "":
|
||||||
print(f"- Setting RestrictEvents patch arguments: {patch_args}")
|
logging.info(f"- Setting RestrictEvents patch arguments: {patch_args}")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("RestrictEvents.kext", self.constants.restrictevents_version, self.constants.restrictevents_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("RestrictEvents.kext", self.constants.restrictevents_version, self.constants.restrictevents_path)
|
||||||
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["revpatch"] = patch_args
|
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["revpatch"] = patch_args
|
||||||
|
|
||||||
if self.constants.custom_cpu_model == 0 or self.constants.custom_cpu_model == 1:
|
if self.constants.custom_cpu_model == 0 or self.constants.custom_cpu_model == 1:
|
||||||
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["revcpu"] = self.constants.custom_cpu_model
|
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["revcpu"] = self.constants.custom_cpu_model
|
||||||
if self.constants.custom_cpu_model_value != "":
|
if self.constants.custom_cpu_model_value != "":
|
||||||
print(f"- Adding custom CPU Name: {self.constants.custom_cpu_model_value}")
|
logging.info(f"- Adding custom CPU Name: {self.constants.custom_cpu_model_value}")
|
||||||
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["revcpuname"] = self.constants.custom_cpu_model_value
|
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["revcpuname"] = self.constants.custom_cpu_model_value
|
||||||
else:
|
else:
|
||||||
print("- Adding CPU Name Patch")
|
logging.info("- Adding CPU Name Patch")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("RestrictEvents.kext", self.constants.restrictevents_version, self.constants.restrictevents_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("RestrictEvents.kext", self.constants.restrictevents_version, self.constants.restrictevents_path)
|
||||||
|
|
||||||
if support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("RestrictEvents.kext")["Enabled"] is False:
|
if support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("RestrictEvents.kext")["Enabled"] is False:
|
||||||
@@ -125,7 +125,7 @@ class build_misc:
|
|||||||
if self.constants.firewire_boot is True and generate_smbios.check_firewire(self.model) is True:
|
if self.constants.firewire_boot is True and generate_smbios.check_firewire(self.model) is True:
|
||||||
# Enable FireWire Boot Support
|
# Enable FireWire Boot Support
|
||||||
# Applicable for both native FireWire and Thunderbolt to FireWire adapters
|
# Applicable for both native FireWire and Thunderbolt to FireWire adapters
|
||||||
print("- Enabling FireWire Boot Support")
|
logging.info("- Enabling FireWire Boot Support")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("IOFireWireFamily.kext", self.constants.fw_kext, self.constants.fw_family_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("IOFireWireFamily.kext", self.constants.fw_kext, self.constants.fw_family_path)
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("IOFireWireSBP2.kext", self.constants.fw_kext, self.constants.fw_sbp2_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("IOFireWireSBP2.kext", self.constants.fw_kext, self.constants.fw_sbp2_path)
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("IOFireWireSerialBusProtocolTransport.kext", self.constants.fw_kext, self.constants.fw_bus_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("IOFireWireSerialBusProtocolTransport.kext", self.constants.fw_kext, self.constants.fw_bus_path)
|
||||||
@@ -148,7 +148,7 @@ class build_misc:
|
|||||||
|
|
||||||
def thunderbolt_handling(self):
|
def thunderbolt_handling(self):
|
||||||
if self.constants.disable_tb is True and self.model in ["MacBookPro11,1", "MacBookPro11,2", "MacBookPro11,3", "MacBookPro11,4", "MacBookPro11,5"]:
|
if self.constants.disable_tb is True and self.model in ["MacBookPro11,1", "MacBookPro11,2", "MacBookPro11,3", "MacBookPro11,4", "MacBookPro11,5"]:
|
||||||
print("- Disabling 2013-2014 laptop Thunderbolt Controller")
|
logging.info("- Disabling 2013-2014 laptop Thunderbolt Controller")
|
||||||
if self.model in ["MacBookPro11,3", "MacBookPro11,5"]:
|
if self.model in ["MacBookPro11,3", "MacBookPro11,5"]:
|
||||||
# 15" dGPU models: IOACPIPlane:/_SB/PCI0@0/PEG1@10001/UPSB@0/DSB0@0/NHI0@0
|
# 15" dGPU models: IOACPIPlane:/_SB/PCI0@0/PEG1@10001/UPSB@0/DSB0@0/NHI0@0
|
||||||
tb_device_path = "PciRoot(0x0)/Pci(0x1,0x1)/Pci(0x0,0x0)/Pci(0x0,0x0)/Pci(0x0,0x0)"
|
tb_device_path = "PciRoot(0x0)/Pci(0x1,0x1)/Pci(0x0,0x0)/Pci(0x0,0x0)/Pci(0x0,0x0)"
|
||||||
@@ -175,7 +175,7 @@ class build_misc:
|
|||||||
(self.model in model_array.Missing_USB_Map or self.model in model_array.Missing_USB_Map_Ventura)
|
(self.model in model_array.Missing_USB_Map or self.model in model_array.Missing_USB_Map_Ventura)
|
||||||
or self.constants.serial_settings in ["Moderate", "Advanced"])
|
or self.constants.serial_settings in ["Moderate", "Advanced"])
|
||||||
):
|
):
|
||||||
print("- Adding USB-Map.kext")
|
logging.info("- Adding USB-Map.kext")
|
||||||
Path(self.constants.map_kext_folder).mkdir()
|
Path(self.constants.map_kext_folder).mkdir()
|
||||||
Path(self.constants.map_contents_folder).mkdir()
|
Path(self.constants.map_contents_folder).mkdir()
|
||||||
shutil.copy(usb_map_path, self.constants.map_contents_folder)
|
shutil.copy(usb_map_path, self.constants.map_contents_folder)
|
||||||
@@ -196,7 +196,7 @@ class build_misc:
|
|||||||
smbios_data.smbios_dictionary[self.model]["CPU Generation"] <= cpu_data.cpu_data.penryn.value or \
|
smbios_data.smbios_dictionary[self.model]["CPU Generation"] <= cpu_data.cpu_data.penryn.value or \
|
||||||
self.model in ["MacPro4,1", "MacPro5,1"]
|
self.model in ["MacPro4,1", "MacPro5,1"]
|
||||||
):
|
):
|
||||||
print("- Adding UHCI/OHCI USB support")
|
logging.info("- Adding UHCI/OHCI USB support")
|
||||||
shutil.copy(self.constants.apple_usb_11_injector_path, self.constants.kexts_path)
|
shutil.copy(self.constants.apple_usb_11_injector_path, self.constants.kexts_path)
|
||||||
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("USB1.1-Injector.kext/Contents/PlugIns/AppleUSBOHCI.kext")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("USB1.1-Injector.kext/Contents/PlugIns/AppleUSBOHCI.kext")["Enabled"] = True
|
||||||
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("USB1.1-Injector.kext/Contents/PlugIns/AppleUSBOHCIPCI.kext")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("USB1.1-Injector.kext/Contents/PlugIns/AppleUSBOHCIPCI.kext")["Enabled"] = True
|
||||||
@@ -207,11 +207,11 @@ class build_misc:
|
|||||||
# DEBUG Settings (OpenCorePkg and Kernel Space)
|
# DEBUG Settings (OpenCorePkg and Kernel Space)
|
||||||
|
|
||||||
if self.constants.verbose_debug is True:
|
if self.constants.verbose_debug is True:
|
||||||
print("- Enabling Verbose boot")
|
logging.info("- Enabling Verbose boot")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -v"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -v"
|
||||||
|
|
||||||
if self.constants.kext_debug is True:
|
if self.constants.kext_debug is True:
|
||||||
print("- Enabling DEBUG Kexts")
|
logging.info("- Enabling DEBUG Kexts")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -liludbgall liludump=90"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -liludbgall liludump=90"
|
||||||
# Disabled due to macOS Monterey crashing shortly after kernel init
|
# Disabled due to macOS Monterey crashing shortly after kernel init
|
||||||
# Use DebugEnhancer.kext instead
|
# Use DebugEnhancer.kext instead
|
||||||
@@ -219,7 +219,7 @@ class build_misc:
|
|||||||
support.build_support(self.model, self.constants, self.config).enable_kext("DebugEnhancer.kext", self.constants.debugenhancer_version, self.constants.debugenhancer_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("DebugEnhancer.kext", self.constants.debugenhancer_version, self.constants.debugenhancer_path)
|
||||||
|
|
||||||
if self.constants.opencore_debug is True:
|
if self.constants.opencore_debug is True:
|
||||||
print("- Enabling DEBUG OpenCore")
|
logging.info("- Enabling DEBUG OpenCore")
|
||||||
self.config["Misc"]["Debug"]["Target"] = 0x43
|
self.config["Misc"]["Debug"]["Target"] = 0x43
|
||||||
self.config["Misc"]["Debug"]["DisplayLevel"] = 0x80000042
|
self.config["Misc"]["Debug"]["DisplayLevel"] = 0x80000042
|
||||||
|
|
||||||
@@ -227,7 +227,7 @@ class build_misc:
|
|||||||
# OpenCorePkg Settings
|
# OpenCorePkg Settings
|
||||||
|
|
||||||
# OpenCanopy Settings (GUI)
|
# OpenCanopy Settings (GUI)
|
||||||
print("- Adding OpenCanopy GUI")
|
logging.info("- Adding OpenCanopy GUI")
|
||||||
shutil.rmtree(self.constants.resources_path, onerror=self.rmtree_handler)
|
shutil.rmtree(self.constants.resources_path, onerror=self.rmtree_handler)
|
||||||
shutil.copy(self.constants.gui_path, self.constants.oc_folder)
|
shutil.copy(self.constants.gui_path, self.constants.oc_folder)
|
||||||
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("OpenCanopy.efi", "UEFI", "Drivers")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("OpenCanopy.efi", "UEFI", "Drivers")["Enabled"] = True
|
||||||
@@ -236,14 +236,14 @@ class build_misc:
|
|||||||
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("ResetNvramEntry.efi", "UEFI", "Drivers")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("ResetNvramEntry.efi", "UEFI", "Drivers")["Enabled"] = True
|
||||||
|
|
||||||
if self.constants.showpicker is False:
|
if self.constants.showpicker is False:
|
||||||
print("- Hiding OpenCore picker")
|
logging.info("- Hiding OpenCore picker")
|
||||||
self.config["Misc"]["Boot"]["ShowPicker"] = False
|
self.config["Misc"]["Boot"]["ShowPicker"] = False
|
||||||
|
|
||||||
if self.constants.oc_timeout != 5:
|
if self.constants.oc_timeout != 5:
|
||||||
print(f"- Setting custom OpenCore picker timeout to {self.constants.oc_timeout} seconds")
|
logging.info(f"- Setting custom OpenCore picker timeout to {self.constants.oc_timeout} seconds")
|
||||||
self.config["Misc"]["Boot"]["Timeout"] = self.constants.oc_timeout
|
self.config["Misc"]["Boot"]["Timeout"] = self.constants.oc_timeout
|
||||||
|
|
||||||
if self.constants.vault is True and utilities.check_command_line_tools() is True:
|
if self.constants.vault is True and utilities.check_command_line_tools() is True:
|
||||||
print("- Setting Vault configuration")
|
logging.info("- Setting Vault configuration")
|
||||||
self.config["Misc"]["Security"]["Vault"] = "Secure"
|
self.config["Misc"]["Security"]["Vault"] = "Secure"
|
||||||
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("OpenShell.efi", "Misc", "Tools")["Enabled"] = False
|
support.build_support(self.model, self.constants, self.config).get_efi_binary_by_path("OpenShell.efi", "Misc", "Tools")["Enabled"] = False
|
||||||
@@ -5,6 +5,8 @@ from resources import constants, device_probe, utilities
|
|||||||
from resources.build import support
|
from resources.build import support
|
||||||
from data import smbios_data
|
from data import smbios_data
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
class build_wireless:
|
class build_wireless:
|
||||||
|
|
||||||
def __init__(self, model, versions, config):
|
def __init__(self, model, versions, config):
|
||||||
@@ -24,22 +26,22 @@ class build_wireless:
|
|||||||
|
|
||||||
|
|
||||||
def on_model(self):
|
def on_model(self):
|
||||||
print(f"- Found Wireless Device {utilities.friendly_hex(self.computer.wifi.vendor_id)}:{utilities.friendly_hex(self.computer.wifi.device_id)}")
|
logging.info(f"- Found Wireless Device {utilities.friendly_hex(self.computer.wifi.vendor_id)}:{utilities.friendly_hex(self.computer.wifi.device_id)}")
|
||||||
self.config["#Revision"]["Hardware-Wifi"] = f"{utilities.friendly_hex(self.computer.wifi.vendor_id)}:{utilities.friendly_hex(self.computer.wifi.device_id)}"
|
self.config["#Revision"]["Hardware-Wifi"] = f"{utilities.friendly_hex(self.computer.wifi.vendor_id)}:{utilities.friendly_hex(self.computer.wifi.device_id)}"
|
||||||
|
|
||||||
if isinstance(self.computer.wifi, device_probe.Broadcom):
|
if isinstance(self.computer.wifi, device_probe.Broadcom):
|
||||||
# This works around OCLP spoofing the Wifi card and therefore unable to actually detect the correct device
|
# This works around OCLP spoofing the Wifi card and therefore unable to actually detect the correct device
|
||||||
if self.computer.wifi.chipset == device_probe.Broadcom.Chipsets.AirportBrcmNIC and self.constants.validate is False and self.computer.wifi.country_code:
|
if self.computer.wifi.chipset == device_probe.Broadcom.Chipsets.AirportBrcmNIC and self.constants.validate is False and self.computer.wifi.country_code:
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("AirportBrcmFixup.kext", self.constants.airportbcrmfixup_version, self.constants.airportbcrmfixup_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("AirportBrcmFixup.kext", self.constants.airportbcrmfixup_version, self.constants.airportbcrmfixup_path)
|
||||||
print(f"- Setting Wireless Card's Country Code: {self.computer.wifi.country_code}")
|
logging.info(f"- Setting Wireless Card's Country Code: {self.computer.wifi.country_code}")
|
||||||
if self.computer.wifi.pci_path:
|
if self.computer.wifi.pci_path:
|
||||||
arpt_path = self.computer.wifi.pci_path
|
arpt_path = self.computer.wifi.pci_path
|
||||||
print(f"- Found ARPT device at {arpt_path}")
|
logging.info(f"- Found ARPT device at {arpt_path}")
|
||||||
self.config["DeviceProperties"]["Add"][arpt_path] = {"brcmfx-country": self.computer.wifi.country_code}
|
self.config["DeviceProperties"]["Add"][arpt_path] = {"brcmfx-country": self.computer.wifi.country_code}
|
||||||
else:
|
else:
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += f" brcmfx-country={self.computer.wifi.country_code}"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += f" brcmfx-country={self.computer.wifi.country_code}"
|
||||||
if self.constants.enable_wake_on_wlan is True:
|
if self.constants.enable_wake_on_wlan is True:
|
||||||
print("- Enabling Wake on WLAN support")
|
logging.info("- Enabling Wake on WLAN support")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += f" -brcmfxwowl"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += f" -brcmfxwowl"
|
||||||
elif self.computer.wifi.chipset == device_probe.Broadcom.Chipsets.AirPortBrcm4360:
|
elif self.computer.wifi.chipset == device_probe.Broadcom.Chipsets.AirPortBrcm4360:
|
||||||
self.wifi_fake_id()
|
self.wifi_fake_id()
|
||||||
@@ -63,20 +65,20 @@ class build_wireless:
|
|||||||
if not "Wireless Model" in smbios_data.smbios_dictionary[self.model]:
|
if not "Wireless Model" in smbios_data.smbios_dictionary[self.model]:
|
||||||
return
|
return
|
||||||
if smbios_data.smbios_dictionary[self.model]["Wireless Model"] == device_probe.Broadcom.Chipsets.AirPortBrcm4360:
|
if smbios_data.smbios_dictionary[self.model]["Wireless Model"] == device_probe.Broadcom.Chipsets.AirPortBrcm4360:
|
||||||
print("- Enabling BCM943224 and BCM94331 Networking Support")
|
logging.info("- Enabling BCM943224 and BCM94331 Networking Support")
|
||||||
self.wifi_fake_id()
|
self.wifi_fake_id()
|
||||||
elif smbios_data.smbios_dictionary[self.model]["Wireless Model"] == device_probe.Broadcom.Chipsets.AirPortBrcm4331:
|
elif smbios_data.smbios_dictionary[self.model]["Wireless Model"] == device_probe.Broadcom.Chipsets.AirPortBrcm4331:
|
||||||
print("- Enabling BCM94328 Networking Support")
|
logging.info("- Enabling BCM94328 Networking Support")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("corecaptureElCap.kext", self.constants.corecaptureelcap_version, self.constants.corecaptureelcap_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("corecaptureElCap.kext", self.constants.corecaptureelcap_version, self.constants.corecaptureelcap_path)
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("IO80211ElCap.kext", self.constants.io80211elcap_version, self.constants.io80211elcap_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("IO80211ElCap.kext", self.constants.io80211elcap_version, self.constants.io80211elcap_path)
|
||||||
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("IO80211ElCap.kext/Contents/PlugIns/AirPortBrcm4331.kext")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("IO80211ElCap.kext/Contents/PlugIns/AirPortBrcm4331.kext")["Enabled"] = True
|
||||||
elif smbios_data.smbios_dictionary[self.model]["Wireless Model"] == device_probe.Broadcom.Chipsets.AirPortBrcm43224:
|
elif smbios_data.smbios_dictionary[self.model]["Wireless Model"] == device_probe.Broadcom.Chipsets.AirPortBrcm43224:
|
||||||
print("- Enabling BCM94328 Networking Support")
|
logging.info("- Enabling BCM94328 Networking Support")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("corecaptureElCap.kext", self.constants.corecaptureelcap_version, self.constants.corecaptureelcap_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("corecaptureElCap.kext", self.constants.corecaptureelcap_version, self.constants.corecaptureelcap_path)
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("IO80211ElCap.kext", self.constants.io80211elcap_version, self.constants.io80211elcap_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("IO80211ElCap.kext", self.constants.io80211elcap_version, self.constants.io80211elcap_path)
|
||||||
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("IO80211ElCap.kext/Contents/PlugIns/AppleAirPortBrcm43224.kext")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("IO80211ElCap.kext/Contents/PlugIns/AppleAirPortBrcm43224.kext")["Enabled"] = True
|
||||||
elif smbios_data.smbios_dictionary[self.model]["Wireless Model"] == device_probe.Atheros.Chipsets.AirPortAtheros40:
|
elif smbios_data.smbios_dictionary[self.model]["Wireless Model"] == device_probe.Atheros.Chipsets.AirPortAtheros40:
|
||||||
print("- Enabling Atheros Networking Support")
|
logging.info("- Enabling Atheros Networking Support")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("corecaptureElCap.kext", self.constants.corecaptureelcap_version, self.constants.corecaptureelcap_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("corecaptureElCap.kext", self.constants.corecaptureelcap_version, self.constants.corecaptureelcap_path)
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("IO80211ElCap.kext", self.constants.io80211elcap_version, self.constants.io80211elcap_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("IO80211ElCap.kext", self.constants.io80211elcap_version, self.constants.io80211elcap_path)
|
||||||
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("IO80211ElCap.kext/Contents/PlugIns/AirPortAtheros40.kext")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("IO80211ElCap.kext/Contents/PlugIns/AirPortAtheros40.kext")["Enabled"] = True
|
||||||
@@ -92,7 +94,7 @@ class build_wireless:
|
|||||||
if support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("AirportBrcmFixup.kext")["Enabled"] is False:
|
if support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("AirportBrcmFixup.kext")["Enabled"] is False:
|
||||||
return
|
return
|
||||||
|
|
||||||
print("- Enabling Wake on WLAN support")
|
logging.info("- Enabling Wake on WLAN support")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += f" -brcmfxwowl"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += f" -brcmfxwowl"
|
||||||
|
|
||||||
|
|
||||||
@@ -103,10 +105,10 @@ class build_wireless:
|
|||||||
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("AirportBrcmFixup.kext/Contents/PlugIns/AirPortBrcmNIC_Injector.kext")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_kext_by_bundle_path("AirportBrcmFixup.kext/Contents/PlugIns/AirPortBrcmNIC_Injector.kext")["Enabled"] = True
|
||||||
if not self.constants.custom_model and self.computer.wifi and self.computer.wifi.pci_path:
|
if not self.constants.custom_model and self.computer.wifi and self.computer.wifi.pci_path:
|
||||||
arpt_path = self.computer.wifi.pci_path
|
arpt_path = self.computer.wifi.pci_path
|
||||||
print(f"- Found ARPT device at {arpt_path}")
|
logging.info(f"- Found ARPT device at {arpt_path}")
|
||||||
else:
|
else:
|
||||||
if not self.model in smbios_data.smbios_dictionary:
|
if not self.model in smbios_data.smbios_dictionary:
|
||||||
print("No known PCI pathing for this model")
|
logging.info("No known PCI pathing for this model")
|
||||||
return
|
return
|
||||||
if "nForce Chipset" in smbios_data.smbios_dictionary[self.model]:
|
if "nForce Chipset" in smbios_data.smbios_dictionary[self.model]:
|
||||||
# Nvidia chipsets all have the same path to ARPT
|
# Nvidia chipsets all have the same path to ARPT
|
||||||
@@ -122,8 +124,8 @@ class build_wireless:
|
|||||||
# Assumes we have a laptop with Intel chipset
|
# Assumes we have a laptop with Intel chipset
|
||||||
# iMac11,x-12,x also apply
|
# iMac11,x-12,x also apply
|
||||||
arpt_path = "PciRoot(0x0)/Pci(0x1C,0x1)/Pci(0x0,0x0)"
|
arpt_path = "PciRoot(0x0)/Pci(0x1C,0x1)/Pci(0x0,0x0)"
|
||||||
print(f"- Using known ARPT Path: {arpt_path}")
|
logging.info(f"- Using known ARPT Path: {arpt_path}")
|
||||||
|
|
||||||
if not self.constants.custom_model and self.computer.wifi and self.constants.validate is False and self.computer.wifi.country_code:
|
if not self.constants.custom_model and self.computer.wifi and self.constants.validate is False and self.computer.wifi.country_code:
|
||||||
print(f"- Applying fake ID for WiFi, setting Country Code: {self.computer.wifi.country_code}")
|
logging.info(f"- Applying fake ID for WiFi, setting Country Code: {self.computer.wifi.country_code}")
|
||||||
self.config["DeviceProperties"]["Add"][arpt_path] = {"brcmfx-country": self.computer.wifi.country_code}
|
self.config["DeviceProperties"]["Add"][arpt_path] = {"brcmfx-country": self.computer.wifi.country_code}
|
||||||
@@ -5,6 +5,7 @@ from resources import constants, utilities
|
|||||||
from resources.build import support
|
from resources.build import support
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
class build_security:
|
class build_security:
|
||||||
@@ -20,51 +21,53 @@ class build_security:
|
|||||||
if self.constants.sip_status is False or self.constants.custom_sip_value:
|
if self.constants.sip_status is False or self.constants.custom_sip_value:
|
||||||
# Work-around 12.3 bug where Electron apps no longer launch with SIP lowered
|
# Work-around 12.3 bug where Electron apps no longer launch with SIP lowered
|
||||||
# Unknown whether this is intended behavior or not, revisit with 12.4
|
# Unknown whether this is intended behavior or not, revisit with 12.4
|
||||||
print("- Adding ipc_control_port_options=0 to boot-args")
|
logging.info("- Adding ipc_control_port_options=0 to boot-args")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " ipc_control_port_options=0"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " ipc_control_port_options=0"
|
||||||
# Adds AutoPkgInstaller for Automatic OpenCore-Patcher installation
|
# Adds AutoPkgInstaller for Automatic OpenCore-Patcher installation
|
||||||
# Only install if running the GUI (AutoPkg-Assets.pkg requires the GUI)
|
# Only install if running the GUI (AutoPkg-Assets.pkg requires the GUI)
|
||||||
if self.constants.wxpython_variant is True:
|
if self.constants.wxpython_variant is True:
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("AutoPkgInstaller.kext", self.constants.autopkg_version, self.constants.autopkg_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("AutoPkgInstaller.kext", self.constants.autopkg_version, self.constants.autopkg_path)
|
||||||
if self.constants.custom_sip_value:
|
if self.constants.custom_sip_value:
|
||||||
print(f"- Setting SIP value to: {self.constants.custom_sip_value}")
|
logging.info(f"- Setting SIP value to: {self.constants.custom_sip_value}")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["csr-active-config"] = utilities.string_to_hex(self.constants.custom_sip_value.lstrip("0x"))
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["csr-active-config"] = utilities.string_to_hex(self.constants.custom_sip_value.lstrip("0x"))
|
||||||
elif self.constants.sip_status is False:
|
elif self.constants.sip_status is False:
|
||||||
print("- Set SIP to allow Root Volume patching")
|
logging.info("- Set SIP to allow Root Volume patching")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["csr-active-config"] = binascii.unhexlify("03080000")
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["csr-active-config"] = binascii.unhexlify("03080000")
|
||||||
|
|
||||||
# apfs.kext has an undocumented boot-arg that allows FileVault usage on broken APFS seals (-arv_allow_fv)
|
# apfs.kext has an undocumented boot-arg that allows FileVault usage on broken APFS seals (-arv_allow_fv)
|
||||||
# This is however hidden behind kern.development, thus we patch _apfs_filevault_allowed to always return true
|
# This is however hidden behind kern.development, thus we patch _apfs_filevault_allowed to always return true
|
||||||
# Note this function was added in 11.3 (20E232, 20.4), older builds do not support this (ie. 11.2.3)
|
# Note this function was added in 11.3 (20E232, 20.4), older builds do not support this (ie. 11.2.3)
|
||||||
print("- Allowing FileVault on Root Patched systems")
|
logging.info("- Allowing FileVault on Root Patched systems")
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Force FileVault on Broken Seal")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Force FileVault on Broken Seal")["Enabled"] = True
|
||||||
# Lets us check in sys_patch.py if config supports FileVault
|
# Lets us check in sys_patch.py if config supports FileVault
|
||||||
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["OCLP-Settings"] += " -allow_fv"
|
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["OCLP-Settings"] += " -allow_fv"
|
||||||
|
|
||||||
# Patch KC UUID panics due to RSR installation
|
# Patch KC UUID panics due to RSR installation
|
||||||
# - Ref: https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1019
|
# - Ref: https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1019
|
||||||
print("- Enabling KC UUID mismatch patch")
|
logging.info("- Enabling KC UUID mismatch patch")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -nokcmismatchpanic"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -nokcmismatchpanic"
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("RSRHelper.kext", self.constants.rsrhelper_version, self.constants.rsrhelper_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("RSRHelper.kext", self.constants.rsrhelper_version, self.constants.rsrhelper_path)
|
||||||
|
|
||||||
if self.constants.disable_cs_lv is True:
|
if self.constants.disable_cs_lv is True:
|
||||||
print("- Disabling Library Validation")
|
|
||||||
# In Ventura, LV patch broke. For now, add AMFI arg
|
# In Ventura, LV patch broke. For now, add AMFI arg
|
||||||
# Before merging into mainline, this needs to be resolved
|
# Before merging into mainline, this needs to be resolved
|
||||||
|
if self.constants.disable_amfi is True:
|
||||||
|
logging.info("- Disabling AMFI")
|
||||||
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " amfi=0x80"
|
||||||
|
else:
|
||||||
|
logging.info("- Disabling Library Validation")
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Disable Library Validation Enforcement")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Disable Library Validation Enforcement")["Enabled"] = True
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Disable _csr_check() in _vnode_check_signature")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Disable _csr_check() in _vnode_check_signature")["Enabled"] = True
|
||||||
if self.constants.disable_amfi is True:
|
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " amfi=0x80"
|
|
||||||
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["OCLP-Settings"] += " -allow_amfi"
|
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["OCLP-Settings"] += " -allow_amfi"
|
||||||
# CSLVFixup simply patches out __RESTRICT and __restrict out of the Music.app Binary
|
# CSLVFixup simply patches out __RESTRICT and __restrict out of the Music.app Binary
|
||||||
# Ref: https://pewpewthespells.com/blog/blocking_code_injection_on_ios_and_os_x.html
|
# Ref: https://pewpewthespells.com/blog/blocking_code_injection_on_ios_and_os_x.html
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("CSLVFixup.kext", self.constants.cslvfixup_version, self.constants.cslvfixup_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("CSLVFixup.kext", self.constants.cslvfixup_version, self.constants.cslvfixup_path)
|
||||||
|
|
||||||
if self.constants.secure_status is False:
|
if self.constants.secure_status is False:
|
||||||
print("- Disabling SecureBootModel")
|
logging.info("- Disabling SecureBootModel")
|
||||||
self.config["Misc"]["Security"]["SecureBootModel"] = "Disabled"
|
self.config["Misc"]["Security"]["SecureBootModel"] = "Disabled"
|
||||||
if self.constants.force_vmm is True:
|
if self.constants.force_vmm is True:
|
||||||
print("- Forcing VMM patchset to support OTA updates")
|
logging.info("- Forcing VMM patchset to support OTA updates")
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (1)")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (1)")["Enabled"] = True
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (2) Legacy")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (2) Legacy")["Enabled"] = True
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (2) Ventura")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (2) Ventura")["Enabled"] = True
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from resources import constants, utilities, generate_smbios
|
|||||||
from resources.build import support
|
from resources.build import support
|
||||||
from data import smbios_data, cpu_data, model_array
|
from data import smbios_data, cpu_data, model_array
|
||||||
|
|
||||||
import subprocess, plistlib, binascii, uuid, ast
|
import subprocess, plistlib, binascii, uuid, ast, logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
class build_smbios:
|
class build_smbios:
|
||||||
@@ -19,26 +19,26 @@ class build_smbios:
|
|||||||
if self.constants.allow_oc_everywhere is False or self.constants.allow_native_spoofs is True:
|
if self.constants.allow_oc_everywhere is False or self.constants.allow_native_spoofs is True:
|
||||||
if self.constants.serial_settings == "None":
|
if self.constants.serial_settings == "None":
|
||||||
# Credit to Parrotgeek1 for boot.efi and hv_vmm_present patch sets
|
# Credit to Parrotgeek1 for boot.efi and hv_vmm_present patch sets
|
||||||
print("- Enabling Board ID exemption patch")
|
logging.info("- Enabling Board ID exemption patch")
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Booter"]["Patch"], "Comment", "Skip Board ID check")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Booter"]["Patch"], "Comment", "Skip Board ID check")["Enabled"] = True
|
||||||
|
|
||||||
print("- Enabling VMM exemption patch")
|
logging.info("- Enabling VMM exemption patch")
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (1)")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (1)")["Enabled"] = True
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (2) Legacy")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (2) Legacy")["Enabled"] = True
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (2) Ventura")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Comment", "Reroute kern.hv_vmm_present patch (2) Ventura")["Enabled"] = True
|
||||||
else:
|
else:
|
||||||
print("- Enabling SMC exemption patch")
|
logging.info("- Enabling SMC exemption patch")
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Identifier", "com.apple.driver.AppleSMC")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["Kernel"]["Patch"], "Identifier", "com.apple.driver.AppleSMC")["Enabled"] = True
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("SMC-Spoof.kext", self.constants.smcspoof_version, self.constants.smcspoof_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("SMC-Spoof.kext", self.constants.smcspoof_version, self.constants.smcspoof_path)
|
||||||
|
|
||||||
if self.constants.serial_settings in ["Moderate", "Advanced"]:
|
if self.constants.serial_settings in ["Moderate", "Advanced"]:
|
||||||
print("- Enabling USB Rename Patches")
|
logging.info("- Enabling USB Rename Patches")
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Patch"], "Comment", "XHC1 to SHC1")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Patch"], "Comment", "XHC1 to SHC1")["Enabled"] = True
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Patch"], "Comment", "EHC1 to EH01")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Patch"], "Comment", "EHC1 to EH01")["Enabled"] = True
|
||||||
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Patch"], "Comment", "EHC2 to EH02")["Enabled"] = True
|
support.build_support(self.model, self.constants, self.config).get_item_by_kv(self.config["ACPI"]["Patch"], "Comment", "EHC2 to EH02")["Enabled"] = True
|
||||||
|
|
||||||
if self.model == self.constants.override_smbios:
|
if self.model == self.constants.override_smbios:
|
||||||
print("- Adding -no_compat_check")
|
logging.info("- Adding -no_compat_check")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -no_compat_check"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -no_compat_check"
|
||||||
|
|
||||||
|
|
||||||
@@ -47,20 +47,20 @@ class build_smbios:
|
|||||||
|
|
||||||
if self.constants.override_smbios == "Default":
|
if self.constants.override_smbios == "Default":
|
||||||
if self.constants.serial_settings != "None":
|
if self.constants.serial_settings != "None":
|
||||||
print("- Setting macOS Monterey Supported SMBIOS")
|
logging.info("- Setting macOS Monterey Supported SMBIOS")
|
||||||
if self.constants.allow_native_spoofs is True:
|
if self.constants.allow_native_spoofs is True:
|
||||||
spoofed_model = self.model
|
spoofed_model = self.model
|
||||||
else:
|
else:
|
||||||
spoofed_model = generate_smbios.set_smbios_model_spoof(self.model)
|
spoofed_model = generate_smbios.set_smbios_model_spoof(self.model)
|
||||||
else:
|
else:
|
||||||
spoofed_model = self.constants.override_smbios
|
spoofed_model = self.constants.override_smbios
|
||||||
print(f"- Using Model ID: {spoofed_model}")
|
logging.info(f"- Using Model ID: {spoofed_model}")
|
||||||
|
|
||||||
spoofed_board = ""
|
spoofed_board = ""
|
||||||
if spoofed_model in smbios_data.smbios_dictionary:
|
if spoofed_model in smbios_data.smbios_dictionary:
|
||||||
if "Board ID" in smbios_data.smbios_dictionary[spoofed_model]:
|
if "Board ID" in smbios_data.smbios_dictionary[spoofed_model]:
|
||||||
spoofed_board = smbios_data.smbios_dictionary[spoofed_model]["Board ID"]
|
spoofed_board = smbios_data.smbios_dictionary[spoofed_model]["Board ID"]
|
||||||
print(f"- Using Board ID: {spoofed_board}")
|
logging.info(f"- Using Board ID: {spoofed_board}")
|
||||||
|
|
||||||
self.spoofed_model = spoofed_model
|
self.spoofed_model = spoofed_model
|
||||||
self.spoofed_board = spoofed_board
|
self.spoofed_board = spoofed_board
|
||||||
@@ -69,13 +69,13 @@ class build_smbios:
|
|||||||
self.config["#Revision"]["Spoofed-Model"] = f"{self.spoofed_model} - {self.constants.serial_settings}"
|
self.config["#Revision"]["Spoofed-Model"] = f"{self.spoofed_model} - {self.constants.serial_settings}"
|
||||||
|
|
||||||
if self.constants.serial_settings == "Moderate":
|
if self.constants.serial_settings == "Moderate":
|
||||||
print("- Using Moderate SMBIOS patching")
|
logging.info("- Using Moderate SMBIOS patching")
|
||||||
self.moderate_serial_patch()
|
self.moderate_serial_patch()
|
||||||
elif self.constants.serial_settings == "Advanced":
|
elif self.constants.serial_settings == "Advanced":
|
||||||
print("- Using Advanced SMBIOS patching")
|
logging.info("- Using Advanced SMBIOS patching")
|
||||||
self.advanced_serial_patch()
|
self.advanced_serial_patch()
|
||||||
elif self.constants.serial_settings == "Minimal":
|
elif self.constants.serial_settings == "Minimal":
|
||||||
print("- Using Minimal SMBIOS patching")
|
logging.info("- Using Minimal SMBIOS patching")
|
||||||
self.spoofed_model = self.model
|
self.spoofed_model = self.model
|
||||||
self.minimal_serial_patch()
|
self.minimal_serial_patch()
|
||||||
else:
|
else:
|
||||||
@@ -87,12 +87,12 @@ class build_smbios:
|
|||||||
# Note 1: Only apply if system is UEFI 1.2, this is generally Ivy Bridge and older
|
# Note 1: Only apply if system is UEFI 1.2, this is generally Ivy Bridge and older
|
||||||
# Note 2: Flipping 'UEFI -> ProtocolOverrides -> DataHub' will break hibernation
|
# Note 2: Flipping 'UEFI -> ProtocolOverrides -> DataHub' will break hibernation
|
||||||
if (smbios_data.smbios_dictionary[self.model]["CPU Generation"] <= cpu_data.cpu_data.ivy_bridge.value and self.model):
|
if (smbios_data.smbios_dictionary[self.model]["CPU Generation"] <= cpu_data.cpu_data.ivy_bridge.value and self.model):
|
||||||
print("- Detected UEFI 1.2 or older Mac, updating BoardProduct")
|
logging.info("- Detected UEFI 1.2 or older Mac, updating BoardProduct")
|
||||||
self.config["PlatformInfo"]["DataHub"]["BoardProduct"] = self.spoofed_board
|
self.config["PlatformInfo"]["DataHub"]["BoardProduct"] = self.spoofed_board
|
||||||
self.config["PlatformInfo"]["UpdateDataHub"] = True
|
self.config["PlatformInfo"]["UpdateDataHub"] = True
|
||||||
|
|
||||||
if self.constants.custom_serial_number != "" and self.constants.custom_board_serial_number != "":
|
if self.constants.custom_serial_number != "" and self.constants.custom_board_serial_number != "":
|
||||||
print("- Adding custom serial numbers")
|
logging.info("- Adding custom serial numbers")
|
||||||
self.config["PlatformInfo"]["Automatic"] = True
|
self.config["PlatformInfo"]["Automatic"] = True
|
||||||
self.config["PlatformInfo"]["UpdateDataHub"] = True
|
self.config["PlatformInfo"]["UpdateDataHub"] = True
|
||||||
self.config["PlatformInfo"]["UpdateNVRAM"] = True
|
self.config["PlatformInfo"]["UpdateNVRAM"] = True
|
||||||
@@ -156,7 +156,7 @@ class build_smbios:
|
|||||||
if self.model == "MacBookPro6,2":
|
if self.model == "MacBookPro6,2":
|
||||||
# Force G State to not exceed moderate state
|
# Force G State to not exceed moderate state
|
||||||
# Ref: https://github.com/fabioiop/MBP-2010-GPU-Panic-fix
|
# Ref: https://github.com/fabioiop/MBP-2010-GPU-Panic-fix
|
||||||
print("- Patching G State for MacBookPro6,2")
|
logging.info("- Patching G State for MacBookPro6,2")
|
||||||
for gpu in ["Vendor10deDevice0a34", "Vendor10deDevice0a29"]:
|
for gpu in ["Vendor10deDevice0a34", "Vendor10deDevice0a29"]:
|
||||||
agpm_config["IOKitPersonalities"]["AGPM"]["Machines"][self.spoofed_board][gpu]["BoostPState"] = [2, 2, 2, 2]
|
agpm_config["IOKitPersonalities"]["AGPM"]["Machines"][self.spoofed_board][gpu]["BoostPState"] = [2, 2, 2, 2]
|
||||||
agpm_config["IOKitPersonalities"]["AGPM"]["Machines"][self.spoofed_board][gpu]["BoostTime"] = [2, 2, 2, 2]
|
agpm_config["IOKitPersonalities"]["AGPM"]["Machines"][self.spoofed_board][gpu]["BoostTime"] = [2, 2, 2, 2]
|
||||||
@@ -183,7 +183,7 @@ class build_smbios:
|
|||||||
fw_feature = generate_smbios.generate_fw_features(self.model, self.constants.custom_model)
|
fw_feature = generate_smbios.generate_fw_features(self.model, self.constants.custom_model)
|
||||||
# fw_feature = self.patch_firmware_feature()
|
# fw_feature = self.patch_firmware_feature()
|
||||||
fw_feature = hex(fw_feature).lstrip("0x").rstrip("L").strip()
|
fw_feature = hex(fw_feature).lstrip("0x").rstrip("L").strip()
|
||||||
print(f"- Setting Firmware Feature: {fw_feature}")
|
logging.info(f"- Setting Firmware Feature: {fw_feature}")
|
||||||
fw_feature = utilities.string_to_hex(fw_feature)
|
fw_feature = utilities.string_to_hex(fw_feature)
|
||||||
|
|
||||||
# FirmwareFeatures
|
# FirmwareFeatures
|
||||||
@@ -216,7 +216,7 @@ class build_smbios:
|
|||||||
self.config["PlatformInfo"]["UpdateDataHub"] = True
|
self.config["PlatformInfo"]["UpdateDataHub"] = True
|
||||||
|
|
||||||
if self.constants.custom_serial_number != "" and self.constants.custom_board_serial_number != "":
|
if self.constants.custom_serial_number != "" and self.constants.custom_board_serial_number != "":
|
||||||
print("- Adding custom serial numbers")
|
logging.info("- Adding custom serial numbers")
|
||||||
sn = self.constants.custom_serial_number
|
sn = self.constants.custom_serial_number
|
||||||
mlb = self.constants.custom_board_serial_number
|
mlb = self.constants.custom_board_serial_number
|
||||||
|
|
||||||
@@ -237,7 +237,7 @@ class build_smbios:
|
|||||||
if self.constants.custom_cpu_model == 0 or self.constants.custom_cpu_model == 1:
|
if self.constants.custom_cpu_model == 0 or self.constants.custom_cpu_model == 1:
|
||||||
self.config["PlatformInfo"]["Generic"]["ProcessorType"] = 1537
|
self.config["PlatformInfo"]["Generic"]["ProcessorType"] = 1537
|
||||||
if self.constants.custom_serial_number != "" and self.constants.custom_board_serial_number != "":
|
if self.constants.custom_serial_number != "" and self.constants.custom_board_serial_number != "":
|
||||||
print("- Adding custom serial numbers")
|
logging.info("- Adding custom serial numbers")
|
||||||
self.config["PlatformInfo"]["Generic"]["SystemSerialNumber"] = self.constants.custom_serial_number
|
self.config["PlatformInfo"]["Generic"]["SystemSerialNumber"] = self.constants.custom_serial_number
|
||||||
self.config["PlatformInfo"]["Generic"]["MLB"] = self.constants.custom_board_serial_number
|
self.config["PlatformInfo"]["Generic"]["MLB"] = self.constants.custom_board_serial_number
|
||||||
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["OCLP-Spoofed-SN"] = self.constants.custom_serial_number
|
self.config["NVRAM"]["Add"]["4D1FDA02-38C7-4A6A-9CC6-4BCCA8B30102"]["OCLP-Spoofed-SN"] = self.constants.custom_serial_number
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from resources import constants, device_probe, utilities
|
|||||||
from resources.build import support
|
from resources.build import support
|
||||||
from data import model_array, smbios_data, cpu_data
|
from data import model_array, smbios_data, cpu_data
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
class build_storage:
|
class build_storage:
|
||||||
|
|
||||||
@@ -31,11 +32,11 @@ class build_storage:
|
|||||||
for controller in sata_devices:
|
for controller in sata_devices:
|
||||||
# https://linux-hardware.org/?id=pci:1179-010b-1b4b-9183
|
# https://linux-hardware.org/?id=pci:1179-010b-1b4b-9183
|
||||||
if controller.vendor_id == 0x1179 and controller.device_id == 0x010b:
|
if controller.vendor_id == 0x1179 and controller.device_id == 0x010b:
|
||||||
print("- Enabling AHCI SSD patch")
|
logging.info("- Enabling AHCI SSD patch")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("MonteAHCIPort.kext", self.constants.monterey_ahci_version, self.constants.monterey_ahci_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("MonteAHCIPort.kext", self.constants.monterey_ahci_version, self.constants.monterey_ahci_path)
|
||||||
break
|
break
|
||||||
elif self.model in ["MacBookAir6,1", "MacBookAir6,2"]:
|
elif self.model in ["MacBookAir6,1", "MacBookAir6,2"]:
|
||||||
print("- Enabling AHCI SSD patch")
|
logging.info("- Enabling AHCI SSD patch")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("MonteAHCIPort.kext", self.constants.monterey_ahci_version, self.constants.monterey_ahci_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("MonteAHCIPort.kext", self.constants.monterey_ahci_version, self.constants.monterey_ahci_path)
|
||||||
|
|
||||||
# ThirdPartyDrives Check
|
# ThirdPartyDrives Check
|
||||||
@@ -48,11 +49,11 @@ class build_storage:
|
|||||||
if drive in smbios_data.smbios_dictionary[self.model]["Stock Storage"]:
|
if drive in smbios_data.smbios_dictionary[self.model]["Stock Storage"]:
|
||||||
if not self.constants.custom_model:
|
if not self.constants.custom_model:
|
||||||
if self.computer.third_party_sata_ssd is True:
|
if self.computer.third_party_sata_ssd is True:
|
||||||
print("- Adding SATA Hibernation Patch")
|
logging.info("- Adding SATA Hibernation Patch")
|
||||||
self.config["Kernel"]["Quirks"]["ThirdPartyDrives"] = True
|
self.config["Kernel"]["Quirks"]["ThirdPartyDrives"] = True
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
print("- Adding SATA Hibernation Patch")
|
logging.info("- Adding SATA Hibernation Patch")
|
||||||
self.config["Kernel"]["Quirks"]["ThirdPartyDrives"] = True
|
self.config["Kernel"]["Quirks"]["ThirdPartyDrives"] = True
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -73,29 +74,29 @@ class build_storage:
|
|||||||
# Use Innie's same logic:
|
# Use Innie's same logic:
|
||||||
# https://github.com/cdf/Innie/blob/v1.3.0/Innie/Innie.cpp#L90-L97
|
# https://github.com/cdf/Innie/blob/v1.3.0/Innie/Innie.cpp#L90-L97
|
||||||
for i, controller in enumerate(self.computer.storage):
|
for i, controller in enumerate(self.computer.storage):
|
||||||
print(f"- Fixing PCIe Storage Controller ({i + 1}) reporting")
|
logging.info(f"- Fixing PCIe Storage Controller ({i + 1}) reporting")
|
||||||
if controller.pci_path:
|
if controller.pci_path:
|
||||||
self.config["DeviceProperties"]["Add"][controller.pci_path] = {"built-in": 1}
|
self.config["DeviceProperties"]["Add"][controller.pci_path] = {"built-in": 1}
|
||||||
else:
|
else:
|
||||||
print(f"- Failed to find Device path for PCIe Storage Controller {i}, falling back to Innie")
|
logging.info(f"- Failed to find Device path for PCIe Storage Controller {i}, falling back to Innie")
|
||||||
support.build_support(self.model, self.constants, self.config).enable_kext("Innie.kext", self.constants.innie_version, self.constants.innie_path)
|
support.build_support(self.model, self.constants, self.config).enable_kext("Innie.kext", self.constants.innie_version, self.constants.innie_path)
|
||||||
|
|
||||||
if not self.constants.custom_model and self.constants.allow_nvme_fixing is True:
|
if not self.constants.custom_model and self.constants.allow_nvme_fixing is True:
|
||||||
nvme_devices = [i for i in self.computer.storage if isinstance(i, device_probe.NVMeController)]
|
nvme_devices = [i for i in self.computer.storage if isinstance(i, device_probe.NVMeController)]
|
||||||
for i, controller in enumerate(nvme_devices):
|
for i, controller in enumerate(nvme_devices):
|
||||||
print(f"- Found 3rd Party NVMe SSD ({i + 1}): {utilities.friendly_hex(controller.vendor_id)}:{utilities.friendly_hex(controller.device_id)}")
|
logging.info(f"- Found 3rd Party NVMe SSD ({i + 1}): {utilities.friendly_hex(controller.vendor_id)}:{utilities.friendly_hex(controller.device_id)}")
|
||||||
self.config["#Revision"][f"Hardware-NVMe-{i}"] = f"{utilities.friendly_hex(controller.vendor_id)}:{utilities.friendly_hex(controller.device_id)}"
|
self.config["#Revision"][f"Hardware-NVMe-{i}"] = f"{utilities.friendly_hex(controller.vendor_id)}:{utilities.friendly_hex(controller.device_id)}"
|
||||||
|
|
||||||
# Disable Bit 0 (L0s), enable Bit 1 (L1)
|
# Disable Bit 0 (L0s), enable Bit 1 (L1)
|
||||||
nvme_aspm = (controller.aspm & (~0b11)) | 0b10
|
nvme_aspm = (controller.aspm & (~0b11)) | 0b10
|
||||||
|
|
||||||
if controller.pci_path:
|
if controller.pci_path:
|
||||||
print(f"- Found NVMe ({i}) at {controller.pci_path}")
|
logging.info(f"- Found NVMe ({i}) at {controller.pci_path}")
|
||||||
self.config["DeviceProperties"]["Add"].setdefault(controller.pci_path, {})["pci-aspm-default"] = nvme_aspm
|
self.config["DeviceProperties"]["Add"].setdefault(controller.pci_path, {})["pci-aspm-default"] = nvme_aspm
|
||||||
self.config["DeviceProperties"]["Add"][controller.pci_path.rpartition("/")[0]] = {"pci-aspm-default": nvme_aspm}
|
self.config["DeviceProperties"]["Add"][controller.pci_path.rpartition("/")[0]] = {"pci-aspm-default": nvme_aspm}
|
||||||
else:
|
else:
|
||||||
if "-nvmefaspm" not in self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"]:
|
if "-nvmefaspm" not in self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"]:
|
||||||
print("- Falling back to -nvmefaspm")
|
logging.info("- Falling back to -nvmefaspm")
|
||||||
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -nvmefaspm"
|
self.config["NVRAM"]["Add"]["7C436110-AB2A-4BBB-A880-FE41995C9F82"]["boot-args"] += " -nvmefaspm"
|
||||||
|
|
||||||
if (controller.vendor_id != 0x144D and controller.device_id != 0xA804):
|
if (controller.vendor_id != 0x144D and controller.device_id != 0xA804):
|
||||||
@@ -132,5 +133,5 @@ class build_storage:
|
|||||||
|
|
||||||
def trim_handling(self):
|
def trim_handling(self):
|
||||||
if self.constants.apfs_trim_timeout is False:
|
if self.constants.apfs_trim_timeout is False:
|
||||||
print(f"- Disabling APFS TRIM timeout")
|
logging.info(f"- Disabling APFS TRIM timeout")
|
||||||
self.config["Kernel"]["Quirks"]["SetApfsTrimTimeout"] = 0
|
self.config["Kernel"]["Quirks"]["SetApfsTrimTimeout"] = 0
|
||||||
@@ -5,6 +5,7 @@ from resources import constants, utilities
|
|||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import shutil, plistlib, subprocess, zipfile
|
import shutil, plistlib, subprocess, zipfile
|
||||||
|
import logging
|
||||||
|
|
||||||
class build_support:
|
class build_support:
|
||||||
|
|
||||||
@@ -27,7 +28,7 @@ class build_support:
|
|||||||
def get_kext_by_bundle_path(self, bundle_path):
|
def get_kext_by_bundle_path(self, bundle_path):
|
||||||
kext = self.get_item_by_kv(self.config["Kernel"]["Add"], "BundlePath", bundle_path)
|
kext = self.get_item_by_kv(self.config["Kernel"]["Add"], "BundlePath", bundle_path)
|
||||||
if not kext:
|
if not kext:
|
||||||
print(f"- Could not find kext {bundle_path}!")
|
logging.info(f"- Could not find kext {bundle_path}!")
|
||||||
raise IndexError
|
raise IndexError
|
||||||
return kext
|
return kext
|
||||||
|
|
||||||
@@ -35,7 +36,7 @@ class build_support:
|
|||||||
def get_efi_binary_by_path(self, bundle_path, entry_location, efi_type):
|
def get_efi_binary_by_path(self, bundle_path, entry_location, efi_type):
|
||||||
efi_binary = self.get_item_by_kv(self.config[entry_location][efi_type], "Path", bundle_path)
|
efi_binary = self.get_item_by_kv(self.config[entry_location][efi_type], "Path", bundle_path)
|
||||||
if not efi_binary:
|
if not efi_binary:
|
||||||
print(f"- Could not find {efi_type}: {bundle_path}!")
|
logging.info(f"- Could not find {efi_type}: {bundle_path}!")
|
||||||
raise IndexError
|
raise IndexError
|
||||||
return efi_binary
|
return efi_binary
|
||||||
|
|
||||||
@@ -50,7 +51,7 @@ class build_support:
|
|||||||
if kext["Enabled"] is True:
|
if kext["Enabled"] is True:
|
||||||
return
|
return
|
||||||
|
|
||||||
print(f"- Adding {kext_name} {kext_version}")
|
logging.info(f"- Adding {kext_name} {kext_version}")
|
||||||
shutil.copy(kext_path, self.constants.kexts_path)
|
shutil.copy(kext_path, self.constants.kexts_path)
|
||||||
kext["Enabled"] = True
|
kext["Enabled"] = True
|
||||||
|
|
||||||
@@ -63,27 +64,27 @@ class build_support:
|
|||||||
# sign.command checks for the existence of '/usr/bin/strings' however does not verify whether it's executable
|
# sign.command checks for the existence of '/usr/bin/strings' however does not verify whether it's executable
|
||||||
# sign.command will continue to run and create an unbootable OpenCore.efi due to the missing strings binary
|
# sign.command will continue to run and create an unbootable OpenCore.efi due to the missing strings binary
|
||||||
# macOS has dummy binaries that just reroute to the actual binaries after you install Xcode's Command Line Tools
|
# macOS has dummy binaries that just reroute to the actual binaries after you install Xcode's Command Line Tools
|
||||||
print("- Missing Command Line tools, skipping Vault for saftey reasons")
|
logging.info("- Missing Command Line tools, skipping Vault for saftey reasons")
|
||||||
print("- Install via 'xcode-select --install' and rerun OCLP if you wish to vault this config")
|
logging.info("- Install via 'xcode-select --install' and rerun OCLP if you wish to vault this config")
|
||||||
return
|
return
|
||||||
|
|
||||||
print("- Vaulting EFI")
|
logging.info("- Vaulting EFI")
|
||||||
subprocess.run([str(self.constants.vault_path), f"{self.constants.oc_folder}/"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
subprocess.run([str(self.constants.vault_path), f"{self.constants.oc_folder}/"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
|
||||||
|
|
||||||
def validate_pathing(self):
|
def validate_pathing(self):
|
||||||
# Verify whether all files are accounted for on-disk
|
# Verify whether all files are accounted for on-disk
|
||||||
# This ensures that OpenCore won't hit a critical error and fail to boot
|
# This ensures that OpenCore won't hit a critical error and fail to boot
|
||||||
print("- Validating generated config")
|
logging.info("- Validating generated config")
|
||||||
if not Path(self.constants.opencore_release_folder / Path("EFI/OC/config.plist")):
|
if not Path(self.constants.opencore_release_folder / Path("EFI/OC/config.plist")):
|
||||||
print("- OpenCore config file missing!!!")
|
logging.info("- OpenCore config file missing!!!")
|
||||||
raise Exception("OpenCore config file missing")
|
raise Exception("OpenCore config file missing")
|
||||||
|
|
||||||
config_plist = plistlib.load(Path(self.constants.opencore_release_folder / Path("EFI/OC/config.plist")).open("rb"))
|
config_plist = plistlib.load(Path(self.constants.opencore_release_folder / Path("EFI/OC/config.plist")).open("rb"))
|
||||||
|
|
||||||
for acpi in config_plist["ACPI"]["Add"]:
|
for acpi in config_plist["ACPI"]["Add"]:
|
||||||
if not Path(self.constants.opencore_release_folder / Path("EFI/OC/ACPI") / Path(acpi["Path"])).exists():
|
if not Path(self.constants.opencore_release_folder / Path("EFI/OC/ACPI") / Path(acpi["Path"])).exists():
|
||||||
print(f" - Missing ACPI Table: {acpi['Path']}")
|
logging.info(f" - Missing ACPI Table: {acpi['Path']}")
|
||||||
raise Exception(f"Missing ACPI Table: {acpi['Path']}")
|
raise Exception(f"Missing ACPI Table: {acpi['Path']}")
|
||||||
|
|
||||||
for kext in config_plist["Kernel"]["Add"]:
|
for kext in config_plist["Kernel"]["Add"]:
|
||||||
@@ -91,40 +92,40 @@ class build_support:
|
|||||||
kext_binary_path = Path(kext_path / Path(kext["ExecutablePath"]))
|
kext_binary_path = Path(kext_path / Path(kext["ExecutablePath"]))
|
||||||
kext_plist_path = Path(kext_path / Path(kext["PlistPath"]))
|
kext_plist_path = Path(kext_path / Path(kext["PlistPath"]))
|
||||||
if not kext_path.exists():
|
if not kext_path.exists():
|
||||||
print(f"- Missing kext: {kext_path}")
|
logging.info(f"- Missing kext: {kext_path}")
|
||||||
raise Exception(f"Missing {kext_path}")
|
raise Exception(f"Missing {kext_path}")
|
||||||
if not kext_binary_path.exists():
|
if not kext_binary_path.exists():
|
||||||
print(f"- Missing {kext['BundlePath']}'s binary: {kext_binary_path}")
|
logging.info(f"- Missing {kext['BundlePath']}'s binary: {kext_binary_path}")
|
||||||
raise Exception(f"Missing {kext_binary_path}")
|
raise Exception(f"Missing {kext_binary_path}")
|
||||||
if not kext_plist_path.exists():
|
if not kext_plist_path.exists():
|
||||||
print(f"- Missing {kext['BundlePath']}'s plist: {kext_plist_path}")
|
logging.info(f"- Missing {kext['BundlePath']}'s plist: {kext_plist_path}")
|
||||||
raise Exception(f"Missing {kext_plist_path}")
|
raise Exception(f"Missing {kext_plist_path}")
|
||||||
|
|
||||||
for tool in config_plist["Misc"]["Tools"]:
|
for tool in config_plist["Misc"]["Tools"]:
|
||||||
if not Path(self.constants.opencore_release_folder / Path("EFI/OC/Tools") / Path(tool["Path"])).exists():
|
if not Path(self.constants.opencore_release_folder / Path("EFI/OC/Tools") / Path(tool["Path"])).exists():
|
||||||
print(f" - Missing tool: {tool['Path']}")
|
logging.info(f" - Missing tool: {tool['Path']}")
|
||||||
raise Exception(f"Missing tool: {tool['Path']}")
|
raise Exception(f"Missing tool: {tool['Path']}")
|
||||||
|
|
||||||
for driver in config_plist["UEFI"]["Drivers"]:
|
for driver in config_plist["UEFI"]["Drivers"]:
|
||||||
if not Path(self.constants.opencore_release_folder / Path("EFI/OC/Drivers") / Path(driver["Path"])).exists():
|
if not Path(self.constants.opencore_release_folder / Path("EFI/OC/Drivers") / Path(driver["Path"])).exists():
|
||||||
print(f" - Missing driver: {driver['Path']}")
|
logging.info(f" - Missing driver: {driver['Path']}")
|
||||||
raise Exception(f"Missing driver: {driver['Path']}")
|
raise Exception(f"Missing driver: {driver['Path']}")
|
||||||
|
|
||||||
# Validating local files
|
# Validating local files
|
||||||
# Report if they have no associated config.plist entry (i.e. they're not being used)
|
# Report if they have no associated config.plist entry (i.e. they're not being used)
|
||||||
for tool_files in Path(self.constants.opencore_release_folder / Path("EFI/OC/Tools")).glob("*"):
|
for tool_files in Path(self.constants.opencore_release_folder / Path("EFI/OC/Tools")).glob("*"):
|
||||||
if tool_files.name not in [x["Path"] for x in config_plist["Misc"]["Tools"]]:
|
if tool_files.name not in [x["Path"] for x in config_plist["Misc"]["Tools"]]:
|
||||||
print(f" - Missing tool from config: {tool_files.name}")
|
logging.info(f" - Missing tool from config: {tool_files.name}")
|
||||||
raise Exception(f"Missing tool from config: {tool_files.name}")
|
raise Exception(f"Missing tool from config: {tool_files.name}")
|
||||||
|
|
||||||
for driver_file in Path(self.constants.opencore_release_folder / Path("EFI/OC/Drivers")).glob("*"):
|
for driver_file in Path(self.constants.opencore_release_folder / Path("EFI/OC/Drivers")).glob("*"):
|
||||||
if driver_file.name not in [x["Path"] for x in config_plist["UEFI"]["Drivers"]]:
|
if driver_file.name not in [x["Path"] for x in config_plist["UEFI"]["Drivers"]]:
|
||||||
print(f"- Found extra driver: {driver_file.name}")
|
logging.info(f"- Found extra driver: {driver_file.name}")
|
||||||
raise Exception(f"Found extra driver: {driver_file.name}")
|
raise Exception(f"Found extra driver: {driver_file.name}")
|
||||||
|
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
print("- Cleaning up files")
|
logging.info("- Cleaning up files")
|
||||||
# Remove unused entries
|
# Remove unused entries
|
||||||
entries_to_clean = {
|
entries_to_clean = {
|
||||||
"ACPI": ["Add", "Delete", "Patch"],
|
"ACPI": ["Add", "Delete", "Patch"],
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,31 +1,41 @@
|
|||||||
# Parse Commit Info from binary's info.plist
|
# Parse Commit Info from binary's info.plist
|
||||||
# App Structure:
|
|
||||||
# OpenCore-Patcher.app:
|
|
||||||
# Contents:
|
|
||||||
# MacOS:
|
|
||||||
# OpenCore-Patcher
|
|
||||||
# Info.plist
|
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import plistlib
|
import plistlib
|
||||||
|
|
||||||
class commit_info:
|
class ParseCommitInfo:
|
||||||
|
|
||||||
|
def __init__(self, binary_path: str) -> None:
|
||||||
|
"""
|
||||||
|
Parameters:
|
||||||
|
binary_path (str): Path to binary
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, binary_path):
|
|
||||||
self.binary_path = str(binary_path)
|
self.binary_path = str(binary_path)
|
||||||
self.plist_path = self.convert_binary_path_to_plist_path()
|
self.plist_path = self._convert_binary_path_to_plist_path()
|
||||||
|
|
||||||
|
|
||||||
def convert_binary_path_to_plist_path(self):
|
def _convert_binary_path_to_plist_path(self) -> str or None:
|
||||||
|
"""
|
||||||
|
Resolve Info.plist path from binary path
|
||||||
|
"""
|
||||||
|
|
||||||
if Path(self.binary_path).exists():
|
if Path(self.binary_path).exists():
|
||||||
plist_path = self.binary_path.replace("MacOS/OpenCore-Patcher", "Info.plist")
|
plist_path = self.binary_path.replace("MacOS/OpenCore-Patcher", "Info.plist")
|
||||||
if Path(plist_path).exists() and plist_path.endswith(".plist"):
|
if Path(plist_path).exists() and plist_path.endswith(".plist"):
|
||||||
return plist_path
|
return plist_path
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def generate_commit_info(self):
|
|
||||||
|
def generate_commit_info(self) -> tuple:
|
||||||
|
"""
|
||||||
|
Generate commit info from Info.plist
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: (Branch, Commit Date, Commit URL)
|
||||||
|
"""
|
||||||
|
|
||||||
if self.plist_path:
|
if self.plist_path:
|
||||||
# print(self.plist_path)
|
|
||||||
plist_info = plistlib.load(Path(self.plist_path).open("rb"))
|
plist_info = plistlib.load(Path(self.plist_path).open("rb"))
|
||||||
if "Github" in plist_info:
|
if "Github" in plist_info:
|
||||||
return (
|
return (
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# pylint: disable=multiple-statements
|
# pylint: disable=multiple-statements
|
||||||
# Define Files
|
# Defines versioning, file paths and other settings for the patcher
|
||||||
# Copyright (C) 2020-2023, Dhinak G, Mykola Grymalyuk
|
# Copyright (C) 2020-2023, Dhinak G, Mykola Grymalyuk
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -10,210 +10,216 @@ from data import os_data
|
|||||||
|
|
||||||
|
|
||||||
class Constants:
|
class Constants:
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
# Patcher Versioning
|
# Patcher Versioning
|
||||||
self.patcher_version = "0.6.1" # OpenCore-Legacy-Patcher
|
self.patcher_version: str = "0.6.2" # OpenCore-Legacy-Patcher
|
||||||
self.patcher_support_pkg_version = "0.8.2" # PatcherSupportPkg
|
self.patcher_support_pkg_version: str = "0.8.7" # PatcherSupportPkg
|
||||||
self.url_patcher_support_pkg = "https://github.com/dortania/PatcherSupportPkg/releases/download/"
|
self.copyright_date: str = "Copyright © 2020-2023 Dortania"
|
||||||
self.nightly_url_patcher_support_pkg = "https://nightly.link/dortania/PatcherSupportPkg/workflows/build/master/"
|
|
||||||
self.discord_link = "https://discord.gg/rqdPgH8xSN"
|
# URLs
|
||||||
self.guide_link = "https://dortania.github.io/OpenCore-Legacy-Patcher/"
|
self.url_patcher_support_pkg: str = "https://github.com/dortania/PatcherSupportPkg/releases/download/"
|
||||||
self.repo_link = "https://github.com/dortania/OpenCore-Legacy-Patcher"
|
self.discord_link: str = "https://discord.gg/rqdPgH8xSN"
|
||||||
self.repo_link_latest = f"{self.repo_link}/releases/tag/{self.patcher_version}"
|
self.guide_link: str = "https://dortania.github.io/OpenCore-Legacy-Patcher/"
|
||||||
self.copyright_date = "Copyright © 2020-2023 Dortania"
|
self.repo_link: str = "https://github.com/dortania/OpenCore-Legacy-Patcher"
|
||||||
self.installer_pkg_url = f"{self.repo_link}/releases/download/{self.patcher_version}/AutoPkg-Assets.pkg"
|
self.installer_pkg_url: str = f"{self.repo_link}/releases/download/{self.patcher_version}/AutoPkg-Assets.pkg"
|
||||||
self.installer_pkg_url_nightly = "http://nightly.link/dortania/OpenCore-Legacy-Patcher/workflows/build-app-wxpython/main/AutoPkg-Assets.pkg.zip"
|
self.installer_pkg_url_nightly: str = "http://nightly.link/dortania/OpenCore-Legacy-Patcher/workflows/build-app-wxpython/main/AutoPkg-Assets.pkg.zip"
|
||||||
|
|
||||||
# OpenCore Versioning
|
# OpenCore Versioning
|
||||||
# https://github.com/acidanthera/OpenCorePkg
|
# https://github.com/acidanthera/OpenCorePkg
|
||||||
self.opencore_commit = "a753334 - 01-02-2023"
|
self.opencore_commit: str = "e4f0ba1 - 03-06-2023"
|
||||||
self.opencore_version = "0.8.8"
|
self.opencore_version: str = "0.9.0"
|
||||||
|
|
||||||
# Kext Versioning
|
# Kext Versioning
|
||||||
## Acidanthera
|
## Acidanthera
|
||||||
## https://github.com/acidanthera
|
## https://github.com/acidanthera
|
||||||
self.lilu_version = "1.6.3" # Lilu
|
self.lilu_version: str = "1.6.4" # Lilu
|
||||||
self.whatevergreen_version = "1.6.3" # WhateverGreen
|
self.whatevergreen_version: str = "1.6.4" # WhateverGreen
|
||||||
self.airportbcrmfixup_version = "2.1.6" # AirPortBrcmFixup
|
self.whatevergreen_navi_version: str = "1.6.4-Navi" # WhateverGreen (Navi Patch)
|
||||||
self.nvmefix_version = "1.0.9" # NVMeFix
|
self.airportbcrmfixup_version: str = "2.1.6" # AirPortBrcmFixup
|
||||||
self.applealc_version = "1.6.3" # AppleALC
|
self.nvmefix_version: str = "1.1.0" # NVMeFix
|
||||||
self.restrictevents_version = "1.0.9" # RestrictEvents
|
self.applealc_version: str = "1.6.3" # AppleALC
|
||||||
self.featureunlock_version = "1.1.2" # FeatureUnlock
|
self.restrictevents_version: str = "1.0.9" # RestrictEvents
|
||||||
self.debugenhancer_version = "1.0.7" # DebugEnhancer
|
self.featureunlock_version: str = "1.1.4" # FeatureUnlock
|
||||||
self.cpufriend_version = "1.2.6" # CPUFriend
|
self.debugenhancer_version: str = "1.0.7" # DebugEnhancer
|
||||||
self.bluetool_version = "2.6.4" # BlueToolFixup (BrcmPatchRAM)
|
self.cpufriend_version: str = "1.2.6" # CPUFriend
|
||||||
self.cslvfixup_version = "2.6.1" # CSLVFixup
|
self.bluetool_version: str = "2.6.4" # BlueToolFixup (BrcmPatchRAM)
|
||||||
self.autopkg_version = "1.0.1" # AutoPkgInstaller
|
self.cslvfixup_version: str = "2.6.1" # CSLVFixup
|
||||||
self.cryptexfixup_version = "1.0.1" # CryptexFixup
|
self.autopkg_version: str = "1.0.2" # AutoPkgInstaller
|
||||||
|
self.cryptexfixup_version: str = "1.0.1" # CryptexFixup
|
||||||
|
|
||||||
## Apple
|
## Apple
|
||||||
## https://www.apple.com
|
## https://www.apple.com
|
||||||
self.marvel_version = "1.0.1" # MarvelYukonEthernet
|
self.marvel_version: str = "1.0.1" # MarvelYukonEthernet
|
||||||
self.nforce_version = "1.0.1" # nForceEthernet
|
self.nforce_version: str = "1.0.1" # nForceEthernet
|
||||||
self.piixata_version = "1.0.1" # AppleIntelPIIXATA
|
self.piixata_version: str = "1.0.1" # AppleIntelPIIXATA
|
||||||
self.fw_kext = "1.0.1" # IOFireWireFamily
|
self.fw_kext: str = "1.0.1" # IOFireWireFamily
|
||||||
self.apple_trackpad = "1.0.1" # AppleUSBTrackpad
|
self.apple_trackpad: str = "1.0.1" # AppleUSBTrackpad
|
||||||
self.apple_isight_version = "1.0.0" # AppleiSight
|
self.apple_isight_version: str = "1.0.0" # AppleiSight
|
||||||
self.apple_raid_version = "1.0.0" # AppleRAIDCard
|
self.apple_raid_version: str = "1.0.0" # AppleRAIDCard
|
||||||
self.apfs_zlib_version = "12.3.1" # NoAVXFSCompressionTypeZlib
|
self.apfs_zlib_version: str = "12.3.1" # NoAVXFSCompressionTypeZlib
|
||||||
self.apfs_zlib_v2_version = "12.6" # NoAVXFSCompressionTypeZlib (patched with AVXpel)
|
self.apfs_zlib_v2_version: str = "12.6" # NoAVXFSCompressionTypeZlib (patched with AVXpel)
|
||||||
self.multitouch_version = "1.0.0" # AppleUSBMultitouch
|
self.multitouch_version: str = "1.0.0" # AppleUSBMultitouch
|
||||||
self.topcase_version = "1.0.0" # AppleUSBTopCase
|
self.topcase_version: str = "1.0.0" # AppleUSBTopCase
|
||||||
self.intel_82574l_version = "1.0.0" # Intel82574L
|
self.intel_82574l_version: str = "1.0.0" # Intel82574L
|
||||||
self.intel_8254x_version = "1.0.0" # AppleIntel8254XEthernet
|
self.intel_8254x_version: str = "1.0.0" # AppleIntel8254XEthernet
|
||||||
self.apple_usb_11_injector = "1.0.0" # AppleUSBUHCI/OHCI
|
self.apple_usb_11_injector: str = "1.0.0" # AppleUSBUHCI/OHCI
|
||||||
self.aicpupm_version = "1.0.0" # AppleIntelCPUPowerManagement/Client
|
self.aicpupm_version: str = "1.0.0" # AppleIntelCPUPowerManagement/Client
|
||||||
|
|
||||||
## Apple - Dortania Modified
|
## Apple - Dortania Modified
|
||||||
self.bcm570_version = "1.0.2" # CatalinaBCM5701Ethernet
|
self.bcm570_version: str = "1.0.2" # CatalinaBCM5701Ethernet
|
||||||
self.i210_version = "1.0.0" # CatalinaIntelI210Ethernet
|
self.i210_version: str = "1.0.0" # CatalinaIntelI210Ethernet
|
||||||
self.corecaptureelcap_version = "1.0.1" # corecaptureElCap
|
self.corecaptureelcap_version: str = "1.0.1" # corecaptureElCap
|
||||||
self.io80211elcap_version = "2.0.0" # IO80211ElCap
|
self.io80211elcap_version: str = "2.0.0" # IO80211ElCap
|
||||||
self.bigsursdxc_version = "1.0.0" # BigSurSDXC
|
self.bigsursdxc_version: str = "1.0.0" # BigSurSDXC
|
||||||
self.monterey_ahci_version = "1.0.0" # CatalinaAHCI
|
self.monterey_ahci_version: str = "1.0.0" # CatalinaAHCI
|
||||||
|
|
||||||
## Dortania
|
## Dortania
|
||||||
## https://github.com/dortania
|
## https://github.com/dortania
|
||||||
self.backlight_injector_version = "1.1.0" # BacklightInjector
|
self.backlight_injector_version: str = "1.1.0" # BacklightInjector
|
||||||
self.smcspoof_version = "1.0.0" # SMC-Spoof
|
self.smcspoof_version: str = "1.0.0" # SMC-Spoof
|
||||||
self.mce_version = "1.0.0" # AppleMCEReporterDisabler
|
self.mce_version: str = "1.0.0" # AppleMCEReporterDisabler
|
||||||
self.btspoof_version = "1.0.0" # Bluetooth-Spoof
|
self.btspoof_version: str = "1.0.0" # Bluetooth-Spoof
|
||||||
self.aspp_override_version = "1.0.1" # ACPI_SMC_PlatformPlugin Override
|
self.aspp_override_version: str = "1.0.1" # ACPI_SMC_PlatformPlugin Override
|
||||||
self.rsrhelper_version = "1.0.0" # RSRHelper
|
self.rsrhelper_version: str = "1.0.0" # RSRHelper
|
||||||
|
|
||||||
## Syncretic
|
## Syncretic
|
||||||
## https://forums.macrumors.com/members/syncretic.1173816/
|
## https://forums.macrumors.com/members/syncretic.1173816/
|
||||||
## https://github.com/reenigneorcim/latebloom
|
## https://github.com/reenigneorcim/latebloom
|
||||||
self.mousse_version = "0.95-Dortania" # MouSSE
|
self.mousse_version: str = "0.95-Dortania" # MouSSE
|
||||||
self.telemetrap_version = "1.0.0" # telemetrap
|
self.telemetrap_version: str = "1.0.0" # telemetrap
|
||||||
|
|
||||||
## cdf
|
## cdf
|
||||||
## https://github.com/cdf/Innie
|
## https://github.com/cdf/Innie
|
||||||
self.innie_version = "1.3.0" # Innie
|
self.innie_version: str = "1.3.1" # Innie
|
||||||
|
|
||||||
## arter97
|
## arter97
|
||||||
## https://github.com/arter97/SimpleMSR/
|
## https://github.com/arter97/SimpleMSR/
|
||||||
self.simplemsr_version = "1.0.0" # SimpleMSR
|
self.simplemsr_version: str = "1.0.0" # SimpleMSR
|
||||||
|
|
||||||
## blackgate
|
## blackgate
|
||||||
## https://github.com/blackgate/AMDGPUWakeHandler
|
## https://github.com/blackgate/AMDGPUWakeHandler
|
||||||
self.gpu_wake_version = "1.0.0"
|
self.gpu_wake_version: str = "1.0.0"
|
||||||
|
|
||||||
## flagersgit
|
## flagersgit
|
||||||
## https://github.com/flagersgit/KDKlessWorkaround
|
## https://github.com/flagersgit/KDKlessWorkaround
|
||||||
self.kdkless_version = "1.0.0"
|
self.kdkless_version: str = "1.0.0"
|
||||||
|
|
||||||
# Get resource path
|
# Get resource path
|
||||||
self.current_path = Path(__file__).parent.parent.resolve()
|
self.current_path: Path = Path(__file__).parent.parent.resolve()
|
||||||
self.payload_path = self.current_path / Path("payloads")
|
self.payload_path: Path = self.current_path / Path("payloads")
|
||||||
|
|
||||||
# Patcher Settings
|
# Patcher Settings
|
||||||
self.allow_oc_everywhere = False # Set whether Patcher can be run on unsupported Macs
|
## Internal settings
|
||||||
self.gui_mode = False # Determine whether running in a GUI or TUI
|
self.allow_oc_everywhere: bool = False # Set whether Patcher can be run on unsupported Macs
|
||||||
self.disk = "" # Set installation ESP
|
self.gui_mode: bool = False # Determine whether running in a GUI or TUI
|
||||||
self.patch_disk = "" # Set Root Volume to patch
|
self.cli_mode: bool = False # Determine if running in CLI mode
|
||||||
self.validate = False # Enable validation testing for CI
|
self.validate: bool = False # Enable validation testing for CI
|
||||||
self.recovery_status = False # Detect if booted into RecoveryOS
|
self.recovery_status: bool = False # Detect if booted into RecoveryOS
|
||||||
self.launcher_binary = None # Determine launch binary (ie. Python vs PyInstaller)
|
self.ignore_updates: bool = False # Ignore OCLP updates
|
||||||
self.launcher_script = None # Determine launch file (if run via Python)
|
self.wxpython_variant: bool = False # Determine if using wxPython variant
|
||||||
self.ignore_updates = False # Ignore OCLP updates
|
self.has_checked_updates: bool = False # Determine if check for updates has been run
|
||||||
self.wxpython_variant = False # Determine if using wxPython variant
|
self.root_patcher_succeeded: bool = False # Determine if root patcher succeeded
|
||||||
self.unpack_thread = None # Determine if unpack thread finished
|
self.start_build_install: bool = False # Determine if build install should be started
|
||||||
self.cli_mode = False # Determine if running in CLI mode
|
self.host_is_non_metal: bool = False # Determine if host is non-metal (ie. enable UI hacks)
|
||||||
self.should_nuke_kdks = True # Determine if KDKs should be nuked if unused in /L*/D*/KDKs
|
self.needs_to_open_preferences: bool = False # Determine if preferences need to be opened
|
||||||
self.has_checked_updates = False # Determine if check for updates has been run
|
self.host_is_hackintosh: bool = False # Determine if host is Hackintosh
|
||||||
|
self.should_nuke_kdks: bool = True # Determine if KDKs should be nuked if unused in /L*/D*/KDKs
|
||||||
|
self.launcher_binary: str = None # Determine launch binary path (ie. Python vs PyInstaller)
|
||||||
|
self.launcher_script: str = None # Determine launch file path (None if PyInstaller)
|
||||||
|
self.booted_oc_disk: str = None # Determine current disk OCLP booted from
|
||||||
|
self.unpack_thread = None # Determine if unpack thread finished (threading.Thread)
|
||||||
|
|
||||||
|
self.commit_info: tuple = (None, None, None) # Commit info (Branch, Commit Date, Commit URL)
|
||||||
|
|
||||||
## Hardware
|
## Hardware
|
||||||
self.computer: device_probe.Computer = None # type: ignore
|
self.computer: device_probe.Computer = None # type: ignore
|
||||||
self.custom_model: Optional[str] = None
|
self.custom_model: Optional[str] = None
|
||||||
|
|
||||||
## OpenCore Settings
|
## OpenCore Settings
|
||||||
self.opencore_debug = False
|
self.opencore_debug: bool = False # Enable OpenCore debug
|
||||||
self.opencore_build = "RELEASE"
|
self.boot_efi: bool = False # Use EFI/BOOT/BOOTx64.efi vs boot.efi bootstrap
|
||||||
self.showpicker = True # Show or Hide OpenCore's Boot Picker
|
self.showpicker: bool = True # Show or Hide OpenCore's Boot Picker
|
||||||
self.boot_efi = False # Use EFI/BOOT/BOOTx64.efi bootstrap
|
self.nvram_write: bool = True # Write to hardware NVRAM
|
||||||
self.nvram_write = True # Write to hardware NVRAM
|
self.oc_timeout: int = 5 # Set OpenCore timeout
|
||||||
|
self.opencore_build: str = "RELEASE"
|
||||||
|
|
||||||
## Kext Settings
|
## Kext Settings
|
||||||
self.kext_debug = False # Enables Lilu debug and DebugEnhancer
|
self.kext_debug: bool = False # Enables Lilu debug and DebugEnhancer
|
||||||
self.kext_variant = "RELEASE"
|
self.kext_variant: str = "RELEASE"
|
||||||
|
|
||||||
## NVRAM Settings
|
## NVRAM Settings
|
||||||
self.verbose_debug = False # -v
|
self.verbose_debug: bool = False # -v
|
||||||
|
|
||||||
## SMBIOS Settings
|
## SMBIOS Settings
|
||||||
self.custom_cpu_model = 2 # Patch type value
|
self.serial_settings: str = "None" # Set SMBIOS level used
|
||||||
self.custom_cpu_model_value = "" # New CPU name within About This Mac
|
self.override_smbios: str = "Default" # Set SMBIOS model used
|
||||||
self.serial_settings = "None" # Set SMBIOS level used
|
self.allow_native_spoofs: bool = False # Allow native models to recieve spoofs
|
||||||
self.override_smbios = "Default" # Set SMBIOS model used
|
|
||||||
self.allow_native_spoofs = False # Allow native models to recieve spoofs
|
### RestrictEvents CPU renaming
|
||||||
self.custom_serial_number = "" # Set SMBIOS serial number
|
self.custom_cpu_model: int = 2 # Patch type value
|
||||||
self.custom_board_serial_number = "" # Set SMBIOS board serial number
|
self.custom_cpu_model_value: str = "" # New CPU name within About This Mac
|
||||||
|
|
||||||
|
### Serial Number Overrides
|
||||||
|
self.custom_serial_number: str = "" # Set SMBIOS serial number
|
||||||
|
self.custom_board_serial_number: str = "" # Set SMBIOS board serial number
|
||||||
|
|
||||||
## FeatureUnlock Settings
|
## FeatureUnlock Settings
|
||||||
self.fu_status = True # Enable FeatureUnlock
|
self.fu_status: bool = True # Enable FeatureUnlock
|
||||||
self.fu_arguments = None # Set FeatureUnlock arguments
|
self.fu_arguments: str = None # Set FeatureUnlock arguments
|
||||||
|
|
||||||
## Security Settings
|
## Security Settings
|
||||||
self.apecid_support = False # ApECID
|
self.sip_status: bool = True # System Integrity Protection
|
||||||
self.sip_status = True # System Integrity Protection
|
self.secure_status: bool = False # Secure Boot Model
|
||||||
self.secure_status = False # Secure Boot Model
|
self.vault: bool = False # EFI Vault
|
||||||
self.vault = False # EFI Vault
|
self.disable_cs_lv: bool = False # Disable Library validation
|
||||||
self.disable_cs_lv = False # Disable Library validation
|
self.disable_amfi: bool = False # Disable AMFI
|
||||||
self.disable_amfi = False # Disable AMFI
|
|
||||||
|
|
||||||
## OS Settings
|
## OS Settings
|
||||||
self.os_support = 12.0
|
self.os_support: float = 12.0
|
||||||
self.detected_os = 0 # Major Kernel Version
|
self.detected_os: int = 0 # Major Kernel Version
|
||||||
self.detected_os_minor = 0 # Minor Kernel Version
|
self.detected_os_minor: int = 0 # Minor Kernel Version
|
||||||
self.detected_os_build = "" # OS Build
|
self.detected_os_build: str = "" # OS Build
|
||||||
self.detected_os_version = "" # OS Version
|
self.detected_os_version: str = "" # OS Version
|
||||||
|
|
||||||
## Boot Volume Settings
|
## Boot Volume Settings
|
||||||
self.firewire_boot = False # Allow macOS FireWire Boot
|
self.firewire_boot: bool = False # Allow macOS FireWire Boot (kernel)
|
||||||
self.nvme_boot = False # Allow UEFI NVMe Boot
|
self.nvme_boot: bool = False # Allow UEFI NVMe Boot
|
||||||
self.xhci_boot = False
|
self.xhci_boot: bool = False # Allow UEFI XHCI Boot
|
||||||
|
|
||||||
## Graphics Settings
|
## Graphics Settings
|
||||||
self.metal_build = False # Set MXM Build support
|
self.allow_ts2_accel: bool = True # Set TeraScale 2 Acceleration support
|
||||||
self.imac_vendor = "None" # Set MXM GPU vendor
|
self.drm_support: bool = False # Set iMac14,x DRM support
|
||||||
self.imac_model = "" # Set MXM GPU model
|
self.force_nv_web: bool = False # Force Nvidia Web Drivers on Tesla and Kepler
|
||||||
self.drm_support = False # Set iMac14,x DRM support
|
self.force_output_support: bool = False # Force Output support for Mac Pros with PC VBIOS
|
||||||
self.allow_ts2_accel = True # Set TeraScale 2 Acceleration support
|
self.amd_gop_injection: bool = False # Set GOP Injection support
|
||||||
self.force_nv_web = False # Force Nvidia Web Drivers on Tesla and Kepler
|
self.nvidia_kepler_gop_injection: bool = False # Set Kepler GOP Injection support
|
||||||
self.force_output_support = False # Force Output support for Mac Pros with PC VBIOS
|
|
||||||
self.amd_gop_injection = False # Set GOP Injection support
|
|
||||||
self.nvidia_kepler_gop_injection = False # Set Kepler GOP Injection support
|
|
||||||
|
|
||||||
## Miscellaneous
|
### MXM GPU Support
|
||||||
self.disallow_cpufriend = False # Disable CPUFriend
|
self.metal_build: bool = False # Set MXM Build support
|
||||||
self.enable_wake_on_wlan = False # Allow Wake on WLAN for modern Broadcom
|
self.imac_vendor: str = "None" # Set MXM GPU vendor
|
||||||
self.disable_tb = False # Disable Thunderbolt Controller
|
self.imac_model: str = "" # Set MXM GPU model
|
||||||
self.set_alc_usage = True # Set AppleALC usage
|
|
||||||
self.dGPU_switch = False # Set Display GPU Switching for Windows
|
|
||||||
self.force_surplus = False # Force SurPlus patch in newer OSes
|
|
||||||
self.force_latest_psp = False # Force latest PatcherSupportPkg
|
|
||||||
self.disable_msr_power_ctl = False # Disable MSR Power Control (missing battery throttling)
|
|
||||||
self.software_demux = False # Enable Software Demux patch set
|
|
||||||
self.force_vmm = False # Force VMM patch
|
|
||||||
self.custom_sip_value = None # Set custom SIP value
|
|
||||||
self.walkthrough = False # Enable Walkthrough
|
|
||||||
self.disable_connectdrivers = False # Disable ConnectDrivers (hibernation)
|
|
||||||
self.allow_3rd_party_drives = True # Allow ThridPartyDrives quirk
|
|
||||||
self.set_content_caching = False # Set Content Caching
|
|
||||||
self.allow_nvme_fixing = True # Allow NVMe Kernel Space Patches
|
|
||||||
self.disable_xcpm = False # Disable XCPM (X86PlatformPlugin.kext)
|
|
||||||
self.root_patcher_succeeded = False # Determine if root patcher succeeded
|
|
||||||
self.booted_oc_disk = None # Determine current disk OCLP booted from
|
|
||||||
self.start_build_install = False # Determine if build install should be started
|
|
||||||
self.host_is_non_metal = False # Determine if host is non-metal (ie. enable UI hacks)
|
|
||||||
self.needs_to_open_preferences = False # Determine if preferences need to be opened
|
|
||||||
self.host_is_hackintosh = False # Determine if host is Hackintosh
|
|
||||||
self.commit_info = (None, None, None)
|
|
||||||
self.set_vmm_cpuid = False # Set VMM bit inside CPUID
|
|
||||||
self.oc_timeout = 5 # Set OpenCore timeout
|
|
||||||
self.apfs_trim_timeout = True # Set APFS Trim timeout
|
|
||||||
|
|
||||||
|
## Miscellaneous build settings
|
||||||
|
self.disallow_cpufriend: bool = False # Disable CPUFriend
|
||||||
|
self.enable_wake_on_wlan: bool = False # Allow Wake on WLAN for modern Broadcom
|
||||||
|
self.disable_tb: bool = False # Disable Thunderbolt Controller
|
||||||
|
self.dGPU_switch: bool = False # Set Display GPU Switching for Windows
|
||||||
|
self.force_surplus: bool = False # Force SurPlus patch in newer OSes
|
||||||
|
self.force_latest_psp: bool = False # Force latest PatcherSupportPkg
|
||||||
|
self.disable_msr_power_ctl: bool = False # Disable MSR Power Control (missing battery throttling)
|
||||||
|
self.software_demux: bool = False # Enable Software Demux patch set
|
||||||
|
self.force_vmm: bool = False # Force VMM patch
|
||||||
|
self.disable_connectdrivers: bool = False # Disable ConnectDrivers (hibernation)
|
||||||
|
self.set_content_caching: bool = False # Set Content Caching
|
||||||
|
self.disable_xcpm: bool = False # Disable XCPM (X86PlatformPlugin.kext)
|
||||||
|
self.set_vmm_cpuid: bool = False # Set VMM bit inside CPUID
|
||||||
|
self.set_alc_usage: bool = True # Set AppleALC usage
|
||||||
|
self.allow_3rd_party_drives: bool = True # Allow ThridPartyDrives quirk
|
||||||
|
self.allow_nvme_fixing: bool = True # Allow NVMe Kernel Space Patches
|
||||||
|
self.apfs_trim_timeout: bool = True # Set APFS Trim timeout
|
||||||
|
self.custom_sip_value: int = None # Set custom SIP value
|
||||||
|
|
||||||
|
## Non-Metal OS support
|
||||||
self.legacy_accel_support = [
|
self.legacy_accel_support = [
|
||||||
os_data.os_data.big_sur,
|
os_data.os_data.big_sur,
|
||||||
os_data.os_data.monterey,
|
os_data.os_data.monterey,
|
||||||
@@ -281,10 +287,6 @@ class Constants:
|
|||||||
def link_rate_driver_path(self):
|
def link_rate_driver_path(self):
|
||||||
return self.payload_path / Path("Drivers/FixPCIeLinkRate.efi")
|
return self.payload_path / Path("Drivers/FixPCIeLinkRate.efi")
|
||||||
|
|
||||||
@property
|
|
||||||
def list_txt_path(self):
|
|
||||||
return self.payload_path / Path("List.txt")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def installer_sh_path(self):
|
def installer_sh_path(self):
|
||||||
return self.payload_path / Path("Installer.sh")
|
return self.payload_path / Path("Installer.sh")
|
||||||
@@ -302,6 +304,10 @@ class Constants:
|
|||||||
def whatevergreen_path(self):
|
def whatevergreen_path(self):
|
||||||
return self.payload_kexts_path / Path(f"Acidanthera/WhateverGreen-v{self.whatevergreen_version}-{self.kext_variant}.zip")
|
return self.payload_kexts_path / Path(f"Acidanthera/WhateverGreen-v{self.whatevergreen_version}-{self.kext_variant}.zip")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def whatevergreen_navi_path(self):
|
||||||
|
return self.payload_kexts_path / Path(f"Acidanthera/WhateverGreen-v{self.whatevergreen_navi_version}-{self.kext_variant}.zip")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def airportbcrmfixup_path(self):
|
def airportbcrmfixup_path(self):
|
||||||
return self.payload_kexts_path / Path(f"Acidanthera/AirportBrcmFixup-v{self.airportbcrmfixup_version}-{self.kext_variant}.zip")
|
return self.payload_kexts_path / Path(f"Acidanthera/AirportBrcmFixup-v{self.airportbcrmfixup_version}-{self.kext_variant}.zip")
|
||||||
@@ -456,7 +462,7 @@ class Constants:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def innie_path(self):
|
def innie_path(self):
|
||||||
return self.payload_kexts_path / Path(f"Misc/Innie-v{self.innie_version}.zip")
|
return self.payload_kexts_path / Path(f"Misc/Innie-v{self.innie_version}-{self.kext_variant}.zip")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def simplemsr_path(self):
|
def simplemsr_path(self):
|
||||||
|
|||||||
@@ -1,36 +1,60 @@
|
|||||||
# Generate Default Data
|
# Generate Default Data
|
||||||
from resources import utilities, device_probe, generate_smbios, global_settings
|
|
||||||
from data import smbios_data, cpu_data, os_data
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
from resources import (
|
||||||
|
utilities,
|
||||||
|
device_probe,
|
||||||
|
generate_smbios,
|
||||||
|
global_settings,
|
||||||
|
constants
|
||||||
|
)
|
||||||
|
from data import (
|
||||||
|
smbios_data,
|
||||||
|
cpu_data,
|
||||||
|
os_data
|
||||||
|
)
|
||||||
|
|
||||||
class generate_defaults:
|
|
||||||
|
|
||||||
def __init__(self, model, host_is_target, settings):
|
class GenerateDefaults:
|
||||||
self.model = model
|
|
||||||
self.constants = settings
|
def __init__(self, model: str, host_is_target: bool, global_constants: constants.Constants) -> None:
|
||||||
self.host_is_target = host_is_target
|
self.constants: constants.Constants = global_constants
|
||||||
|
|
||||||
|
self.model: str = model
|
||||||
|
|
||||||
|
self.host_is_target: bool = host_is_target
|
||||||
|
|
||||||
# Reset Variables
|
# Reset Variables
|
||||||
self.constants.sip_status = True
|
self.constants.sip_status: bool = True
|
||||||
self.constants.secure_status = False
|
self.constants.secure_status: bool = False
|
||||||
self.constants.disable_cs_lv = False
|
self.constants.disable_cs_lv: bool = False
|
||||||
self.constants.disable_amfi = False
|
self.constants.disable_amfi: bool = False
|
||||||
self.constants.fu_status = True
|
self.constants.fu_status: bool = True
|
||||||
self.constants.fu_arguments = None
|
|
||||||
|
|
||||||
self.constants.custom_serial_number = ""
|
self.constants.fu_arguments: str = None
|
||||||
self.constants.custom_board_serial_number = ""
|
|
||||||
|
|
||||||
self.general_probe()
|
self.constants.custom_serial_number: str = ""
|
||||||
self.nvram_probe()
|
self.constants.custom_board_serial_number: str = ""
|
||||||
self.gpu_probe()
|
|
||||||
self.networking_probe()
|
if self.host_is_target is True:
|
||||||
self.misc_hardwares_probe()
|
for gpu in self.constants.computer.gpus:
|
||||||
self.smbios_probe()
|
if gpu.device_id_unspoofed == -1:
|
||||||
|
gpu.device_id_unspoofed = gpu.device_id
|
||||||
|
if gpu.vendor_id_unspoofed == -1:
|
||||||
|
gpu.vendor_id_unspoofed = gpu.vendor_id
|
||||||
|
|
||||||
|
self._general_probe()
|
||||||
|
self._nvram_probe()
|
||||||
|
self._gpu_probe()
|
||||||
|
self._networking_probe()
|
||||||
|
self._misc_hardwares_probe()
|
||||||
|
self._smbios_probe()
|
||||||
|
|
||||||
|
|
||||||
def general_probe(self):
|
def _general_probe(self) -> None:
|
||||||
|
"""
|
||||||
|
General probe for data
|
||||||
|
"""
|
||||||
|
|
||||||
if "Book" in self.model:
|
if "Book" in self.model:
|
||||||
self.constants.set_content_caching = False
|
self.constants.set_content_caching = False
|
||||||
@@ -40,11 +64,11 @@ class generate_defaults:
|
|||||||
if self.model in ["MacBookPro8,2", "MacBookPro8,3"]:
|
if self.model in ["MacBookPro8,2", "MacBookPro8,3"]:
|
||||||
# Users disabling TS2 most likely have a faulty dGPU
|
# Users disabling TS2 most likely have a faulty dGPU
|
||||||
# users can override this in settings
|
# users can override this in settings
|
||||||
ts2_status = global_settings.global_settings().read_property("MacBookPro_TeraScale_2_Accel")
|
ts2_status = global_settings.GlobalEnviromentSettings().read_property("MacBookPro_TeraScale_2_Accel")
|
||||||
if ts2_status is True:
|
if ts2_status is True:
|
||||||
self.constants.allow_ts2_accel = True
|
self.constants.allow_ts2_accel = True
|
||||||
else:
|
else:
|
||||||
global_settings.global_settings().write_property("MacBookPro_TeraScale_2_Accel", False)
|
global_settings.GlobalEnviromentSettings().write_property("MacBookPro_TeraScale_2_Accel", False)
|
||||||
self.constants.allow_ts2_accel = False
|
self.constants.allow_ts2_accel = False
|
||||||
|
|
||||||
if self.model in smbios_data.smbios_dictionary:
|
if self.model in smbios_data.smbios_dictionary:
|
||||||
@@ -61,14 +85,19 @@ class generate_defaults:
|
|||||||
# Check if running in RecoveryOS
|
# Check if running in RecoveryOS
|
||||||
self.constants.recovery_status = utilities.check_recovery()
|
self.constants.recovery_status = utilities.check_recovery()
|
||||||
|
|
||||||
if global_settings.global_settings().read_property("Force_Web_Drivers") is True:
|
if global_settings.GlobalEnviromentSettings().read_property("Force_Web_Drivers") is True:
|
||||||
self.constants.force_nv_web = True
|
self.constants.force_nv_web = True
|
||||||
|
|
||||||
result = global_settings.global_settings().read_property("ShouldNukeKDKs")
|
result = global_settings.GlobalEnviromentSettings().read_property("ShouldNukeKDKs")
|
||||||
if result is False:
|
if result is False:
|
||||||
self.constants.should_nuke_kdks = False
|
self.constants.should_nuke_kdks = False
|
||||||
|
|
||||||
def smbios_probe(self):
|
|
||||||
|
def _smbios_probe(self) -> None:
|
||||||
|
"""
|
||||||
|
SMBIOS specific probe
|
||||||
|
"""
|
||||||
|
|
||||||
if not self.host_is_target:
|
if not self.host_is_target:
|
||||||
if self.model in ["MacPro4,1", "MacPro5,1"]:
|
if self.model in ["MacPro4,1", "MacPro5,1"]:
|
||||||
# Allow H.265 on AMD
|
# Allow H.265 on AMD
|
||||||
@@ -99,7 +128,11 @@ class generate_defaults:
|
|||||||
self.constants.force_vmm = False
|
self.constants.force_vmm = False
|
||||||
|
|
||||||
|
|
||||||
def nvram_probe(self):
|
def _nvram_probe(self) -> None:
|
||||||
|
"""
|
||||||
|
NVRAM specific probe
|
||||||
|
"""
|
||||||
|
|
||||||
if not self.host_is_target:
|
if not self.host_is_target:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -120,7 +153,11 @@ class generate_defaults:
|
|||||||
self.constants.custom_cpu_model_value = custom_cpu_model_value.split("%00")[0]
|
self.constants.custom_cpu_model_value = custom_cpu_model_value.split("%00")[0]
|
||||||
|
|
||||||
|
|
||||||
def networking_probe(self):
|
def _networking_probe(self) -> None:
|
||||||
|
"""
|
||||||
|
Networking specific probe
|
||||||
|
"""
|
||||||
|
|
||||||
if self.host_is_target:
|
if self.host_is_target:
|
||||||
if not (
|
if not (
|
||||||
(
|
(
|
||||||
@@ -157,7 +194,11 @@ class generate_defaults:
|
|||||||
self.constants.fu_status = True
|
self.constants.fu_status = True
|
||||||
self.constants.fu_arguments = " -disable_sidecar_mac"
|
self.constants.fu_arguments = " -disable_sidecar_mac"
|
||||||
|
|
||||||
def misc_hardwares_probe(self):
|
|
||||||
|
def _misc_hardwares_probe(self) -> None:
|
||||||
|
"""
|
||||||
|
Misc probe
|
||||||
|
"""
|
||||||
if self.host_is_target:
|
if self.host_is_target:
|
||||||
if self.constants.computer.usb_controllers:
|
if self.constants.computer.usb_controllers:
|
||||||
if self.model in smbios_data.smbios_dictionary:
|
if self.model in smbios_data.smbios_dictionary:
|
||||||
@@ -170,7 +211,11 @@ class generate_defaults:
|
|||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
def gpu_probe(self):
|
def _gpu_probe(self) -> None:
|
||||||
|
"""
|
||||||
|
Graphics specific probe
|
||||||
|
"""
|
||||||
|
|
||||||
gpu_dict = []
|
gpu_dict = []
|
||||||
if self.host_is_target:
|
if self.host_is_target:
|
||||||
gpu_dict = self.constants.computer.gpus
|
gpu_dict = self.constants.computer.gpus
|
||||||
@@ -196,14 +241,23 @@ class generate_defaults:
|
|||||||
device_probe.AMD.Archs.Legacy_GCN_8000,
|
device_probe.AMD.Archs.Legacy_GCN_8000,
|
||||||
device_probe.AMD.Archs.Legacy_GCN_9000,
|
device_probe.AMD.Archs.Legacy_GCN_9000,
|
||||||
device_probe.AMD.Archs.Polaris,
|
device_probe.AMD.Archs.Polaris,
|
||||||
|
device_probe.AMD.Archs.Polaris_Spoof,
|
||||||
device_probe.AMD.Archs.Vega,
|
device_probe.AMD.Archs.Vega,
|
||||||
device_probe.AMD.Archs.Navi,
|
device_probe.AMD.Archs.Navi,
|
||||||
]:
|
]:
|
||||||
|
if gpu in [
|
||||||
|
device_probe.Intel.Archs.Ivy_Bridge,
|
||||||
|
device_probe.Intel.Archs.Haswell,
|
||||||
|
device_probe.NVIDIA.Archs.Kepler,
|
||||||
|
]:
|
||||||
|
self.constants.disable_amfi = True
|
||||||
|
|
||||||
if gpu in [
|
if gpu in [
|
||||||
device_probe.AMD.Archs.Legacy_GCN_7000,
|
device_probe.AMD.Archs.Legacy_GCN_7000,
|
||||||
device_probe.AMD.Archs.Legacy_GCN_8000,
|
device_probe.AMD.Archs.Legacy_GCN_8000,
|
||||||
device_probe.AMD.Archs.Legacy_GCN_9000,
|
device_probe.AMD.Archs.Legacy_GCN_9000,
|
||||||
device_probe.AMD.Archs.Polaris,
|
device_probe.AMD.Archs.Polaris,
|
||||||
|
device_probe.AMD.Archs.Polaris_Spoof,
|
||||||
device_probe.AMD.Archs.Vega,
|
device_probe.AMD.Archs.Vega,
|
||||||
device_probe.AMD.Archs.Navi,
|
device_probe.AMD.Archs.Navi,
|
||||||
]:
|
]:
|
||||||
@@ -221,6 +275,7 @@ class generate_defaults:
|
|||||||
# See if system can use the native AMD stack in Ventura
|
# See if system can use the native AMD stack in Ventura
|
||||||
if gpu in [
|
if gpu in [
|
||||||
device_probe.AMD.Archs.Polaris,
|
device_probe.AMD.Archs.Polaris,
|
||||||
|
device_probe.AMD.Archs.Polaris_Spoof,
|
||||||
device_probe.AMD.Archs.Vega,
|
device_probe.AMD.Archs.Vega,
|
||||||
device_probe.AMD.Archs.Navi,
|
device_probe.AMD.Archs.Navi,
|
||||||
]:
|
]:
|
||||||
@@ -252,4 +307,9 @@ class generate_defaults:
|
|||||||
self.constants.disable_cs_lv = True
|
self.constants.disable_cs_lv = True
|
||||||
if os_data.os_data.ventura in self.constants.legacy_accel_support:
|
if os_data.os_data.ventura in self.constants.legacy_accel_support:
|
||||||
# Only disable AMFI if we officially support Ventura
|
# Only disable AMFI if we officially support Ventura
|
||||||
self.constants.disable_amfi = True
|
self.constants.disable_amfi = True
|
||||||
|
|
||||||
|
# Enable BetaBlur if user hasn't disabled it
|
||||||
|
is_blur_enabled = subprocess.run(["defaults", "read", "-g", "Moraea_BlurBeta"], stdout=subprocess.PIPE).stdout.decode("utf-8").strip()
|
||||||
|
if is_blur_enabled in ["false", "0"]:
|
||||||
|
subprocess.run(["defaults", "write", "-g", "Moraea_BlurBeta", "-bool", "true"])
|
||||||
@@ -25,25 +25,46 @@ class CPU:
|
|||||||
class PCIDevice:
|
class PCIDevice:
|
||||||
VENDOR_ID: ClassVar[int] # Default vendor id, for subclasses.
|
VENDOR_ID: ClassVar[int] # Default vendor id, for subclasses.
|
||||||
|
|
||||||
vendor_id: int # The vendor ID of this PCI device
|
vendor_id: int # The vendor ID of this PCI device
|
||||||
device_id: int # The device ID of this PCI device
|
device_id: int # The device ID of this PCI device
|
||||||
class_code: int # The class code of this PCI device - https://pci-ids.ucw.cz/read/PD
|
class_code: int # The class code of this PCI device - https://pci-ids.ucw.cz/read/PD
|
||||||
|
|
||||||
name: Optional[str] = None # Name of IORegistryEntry
|
name: Optional[str] = None # Name of IORegistryEntry
|
||||||
model: Optional[str] = None # model property
|
model: Optional[str] = None # model property
|
||||||
acpi_path: Optional[str] = None # ACPI Device Path
|
acpi_path: Optional[str] = None # ACPI Device Path
|
||||||
pci_path: Optional[str] = None # PCI Device Path
|
pci_path: Optional[str] = None # PCI Device Path
|
||||||
disable_metal: Optional[bool] = False # 'disable-metal' property
|
disable_metal: Optional[bool] = False # 'disable-metal' property
|
||||||
force_compatible: Optional[bool] = False # 'force-compat' property
|
force_compatible: Optional[bool] = False # 'force-compat' property
|
||||||
|
vendor_id_unspoofed: Optional[int] = -1 # Unspoofed vendor ID of this PCI device
|
||||||
|
device_id_unspoofed: Optional[int] = -1 # Unspoofed device ID of this PCI device
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_ioregistry(cls, entry: ioreg.io_registry_entry_t, anti_spoof=False):
|
def from_ioregistry(cls, entry: ioreg.io_registry_entry_t, anti_spoof=False):
|
||||||
properties: dict = ioreg.corefoundation_to_native(ioreg.IORegistryEntryCreateCFProperties(entry, None, ioreg.kCFAllocatorDefault, ioreg.kNilOptions)[1]) # type: ignore
|
properties: dict = ioreg.corefoundation_to_native(ioreg.IORegistryEntryCreateCFProperties(entry, None, ioreg.kCFAllocatorDefault, ioreg.kNilOptions)[1]) # type: ignore
|
||||||
if anti_spoof and "IOName" in properties:
|
|
||||||
vendor_id, device_id = (int(i, 16) for i in properties["IOName"][3:].split(","))
|
vendor_id = None
|
||||||
else:
|
device_id = None
|
||||||
|
vendor_id_unspoofed = None
|
||||||
|
device_id_unspoofed = None
|
||||||
|
|
||||||
|
if "IOName" in properties:
|
||||||
|
ioname = properties["IOName"]
|
||||||
|
if type(ioname) is bytes:
|
||||||
|
ioname = ioname.strip(b"\0").decode()
|
||||||
|
|
||||||
|
if ioname.startswith("pci"):
|
||||||
|
vendor_id_unspoofed, device_id_unspoofed = (int(i, 16) for i in ioname[3:].split(","))
|
||||||
|
if anti_spoof:
|
||||||
|
vendor_id = vendor_id_unspoofed
|
||||||
|
device_id = device_id_unspoofed
|
||||||
|
|
||||||
|
if vendor_id is None and device_id is None:
|
||||||
vendor_id, device_id = [int.from_bytes(properties[i][:4], byteorder="little") for i in ["vendor-id", "device-id"]]
|
vendor_id, device_id = [int.from_bytes(properties[i][:4], byteorder="little") for i in ["vendor-id", "device-id"]]
|
||||||
|
|
||||||
|
if vendor_id_unspoofed is None and device_id_unspoofed is None:
|
||||||
|
vendor_id_unspoofed = vendor_id
|
||||||
|
device_id_unspoofed = device_id
|
||||||
|
|
||||||
device = cls(vendor_id, device_id, int.from_bytes(properties["class-code"][:6], byteorder="little"), name=ioreg.io_name_t_to_str(ioreg.IORegistryEntryGetName(entry, None)[1]))
|
device = cls(vendor_id, device_id, int.from_bytes(properties["class-code"][:6], byteorder="little"), name=ioreg.io_name_t_to_str(ioreg.IORegistryEntryGetName(entry, None)[1]))
|
||||||
if "model" in properties:
|
if "model" in properties:
|
||||||
model = properties["model"]
|
model = properties["model"]
|
||||||
@@ -56,6 +77,9 @@ class PCIDevice:
|
|||||||
device.disable_metal = True
|
device.disable_metal = True
|
||||||
if "force-compat" in properties:
|
if "force-compat" in properties:
|
||||||
device.force_compatible = True
|
device.force_compatible = True
|
||||||
|
|
||||||
|
device.vendor_id_unspoofed = vendor_id_unspoofed
|
||||||
|
device.device_id_unspoofed = device_id_unspoofed
|
||||||
device.populate_pci_path(entry)
|
device.populate_pci_path(entry)
|
||||||
return device
|
return device
|
||||||
|
|
||||||
@@ -242,6 +266,7 @@ class AMD(GPU):
|
|||||||
Legacy_GCN_8000 = "Legacy GCN v2"
|
Legacy_GCN_8000 = "Legacy GCN v2"
|
||||||
Legacy_GCN_9000 = "Legacy GCN v3"
|
Legacy_GCN_9000 = "Legacy GCN v3"
|
||||||
Polaris = "Polaris"
|
Polaris = "Polaris"
|
||||||
|
Polaris_Spoof = "Polaris (Spoofed)"
|
||||||
Vega = "Vega"
|
Vega = "Vega"
|
||||||
Navi = "Navi"
|
Navi = "Navi"
|
||||||
Unknown = "Unknown"
|
Unknown = "Unknown"
|
||||||
@@ -263,6 +288,8 @@ class AMD(GPU):
|
|||||||
self.arch = AMD.Archs.TeraScale_2
|
self.arch = AMD.Archs.TeraScale_2
|
||||||
elif self.device_id in pci_data.amd_ids.polaris_ids:
|
elif self.device_id in pci_data.amd_ids.polaris_ids:
|
||||||
self.arch = AMD.Archs.Polaris
|
self.arch = AMD.Archs.Polaris
|
||||||
|
elif self.device_id in pci_data.amd_ids.polaris_spoof_ids:
|
||||||
|
self.arch = AMD.Archs.Polaris_Spoof
|
||||||
elif self.device_id in pci_data.amd_ids.vega_ids:
|
elif self.device_id in pci_data.amd_ids.vega_ids:
|
||||||
self.arch = AMD.Archs.Vega
|
self.arch = AMD.Archs.Vega
|
||||||
elif self.device_id in pci_data.amd_ids.navi_ids:
|
elif self.device_id in pci_data.amd_ids.navi_ids:
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
from data import smbios_data, os_data, cpu_data
|
from data import smbios_data, os_data, cpu_data
|
||||||
from resources import utilities
|
from resources import utilities
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
def set_smbios_model_spoof(model):
|
def set_smbios_model_spoof(model):
|
||||||
try:
|
try:
|
||||||
@@ -60,7 +61,7 @@ def generate_fw_features(model, custom):
|
|||||||
if not custom:
|
if not custom:
|
||||||
firmwarefeature = utilities.get_rom("firmware-features")
|
firmwarefeature = utilities.get_rom("firmware-features")
|
||||||
if not firmwarefeature:
|
if not firmwarefeature:
|
||||||
print("- Failed to find FirmwareFeatures, falling back on defaults")
|
logging.info("- Failed to find FirmwareFeatures, falling back on defaults")
|
||||||
if smbios_data.smbios_dictionary[model]["FirmwareFeatures"] is None:
|
if smbios_data.smbios_dictionary[model]["FirmwareFeatures"] is None:
|
||||||
firmwarefeature = 0
|
firmwarefeature = 0
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -5,42 +5,66 @@
|
|||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import plistlib
|
import plistlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
class global_settings:
|
|
||||||
|
|
||||||
def __init__(self):
|
class GlobalEnviromentSettings:
|
||||||
self.file_name = ".com.dortania.opencore-legacy-patcher.plist"
|
"""
|
||||||
self.global_settings_folder = "/Users/Shared"
|
Library for querying and writing global enviroment settings
|
||||||
self.global_settings_plist = f"{self.global_settings_folder}/{self.file_name}"
|
"""
|
||||||
self.generate_settings_file()
|
|
||||||
self.convert_defaults_to_global_settings()
|
|
||||||
|
|
||||||
def generate_settings_file(self):
|
def __init__(self) -> None:
|
||||||
if Path(self.global_settings_plist).exists():
|
self.file_name: str = ".com.dortania.opencore-legacy-patcher.plist"
|
||||||
return
|
self.global_settings_folder: str = "/Users/Shared"
|
||||||
try:
|
self.global_settings_plist: str = f"{self.global_settings_folder}/{self.file_name}"
|
||||||
plistlib.dump({"Developed by Dortania": True,}, Path(self.global_settings_plist).open("wb"))
|
|
||||||
except PermissionError:
|
self._generate_settings_file()
|
||||||
print("- Permission error: Unable to write to global settings file")
|
self._convert_defaults_to_global_settings()
|
||||||
|
self._fix_file_permission()
|
||||||
|
|
||||||
|
|
||||||
|
def read_property(self, property_name: str) -> str or None:
|
||||||
|
"""
|
||||||
|
Reads a property from the global settings file
|
||||||
|
"""
|
||||||
|
|
||||||
def read_property(self, property_name):
|
|
||||||
if Path(self.global_settings_plist).exists():
|
if Path(self.global_settings_plist).exists():
|
||||||
plist = plistlib.load(Path(self.global_settings_plist).open("rb"))
|
plist = plistlib.load(Path(self.global_settings_plist).open("rb"))
|
||||||
if property_name in plist:
|
if property_name in plist:
|
||||||
return plist[property_name]
|
return plist[property_name]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def write_property(self, property_name, property_value):
|
|
||||||
|
def write_property(self, property_name: str, property_value) -> None:
|
||||||
|
"""
|
||||||
|
Writes a property to the global settings file
|
||||||
|
"""
|
||||||
|
|
||||||
if Path(self.global_settings_plist).exists():
|
if Path(self.global_settings_plist).exists():
|
||||||
plist = plistlib.load(Path(self.global_settings_plist).open("rb"))
|
plist = plistlib.load(Path(self.global_settings_plist).open("rb"))
|
||||||
plist[property_name] = property_value
|
plist[property_name] = property_value
|
||||||
try:
|
try:
|
||||||
plistlib.dump(plist, Path(self.global_settings_plist).open("wb"))
|
plistlib.dump(plist, Path(self.global_settings_plist).open("wb"))
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
print("- Failed to write to global settings file")
|
logging.info("- Failed to write to global settings file")
|
||||||
|
|
||||||
|
|
||||||
def convert_defaults_to_global_settings(self):
|
def _generate_settings_file(self) -> None:
|
||||||
|
if Path(self.global_settings_plist).exists():
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
plistlib.dump({"Developed by Dortania": True,}, Path(self.global_settings_plist).open("wb"))
|
||||||
|
except PermissionError:
|
||||||
|
logging.info("- Permission error: Unable to write to global settings file")
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_defaults_to_global_settings(self) -> None:
|
||||||
|
"""
|
||||||
|
Converts legacy defaults to global settings
|
||||||
|
"""
|
||||||
|
|
||||||
defaults_path = "~/Library/Preferences/com.dortania.opencore-legacy-patcher.plist"
|
defaults_path = "~/Library/Preferences/com.dortania.opencore-legacy-patcher.plist"
|
||||||
defaults_path = Path(defaults_path).expanduser()
|
defaults_path = Path(defaults_path).expanduser()
|
||||||
|
|
||||||
@@ -52,11 +76,30 @@ class global_settings:
|
|||||||
try:
|
try:
|
||||||
plistlib.dump(global_settings_plist, Path(self.global_settings_plist).open("wb"))
|
plistlib.dump(global_settings_plist, Path(self.global_settings_plist).open("wb"))
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
print("- Permission error: Unable to write to global settings file")
|
logging.info("- Permission error: Unable to write to global settings file")
|
||||||
return
|
return
|
||||||
|
|
||||||
# delete defaults plist
|
# delete defaults plist
|
||||||
try:
|
try:
|
||||||
Path(defaults_path).unlink()
|
Path(defaults_path).unlink()
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
print("- Permission error: Unable to delete defaults plist")
|
logging.info("- Permission error: Unable to delete defaults plist")
|
||||||
|
|
||||||
|
|
||||||
|
def _fix_file_permission(self) -> None:
|
||||||
|
"""
|
||||||
|
Fixes file permission for log file
|
||||||
|
|
||||||
|
If OCLP was invoked as root, file permission will only allow root to write to settings file
|
||||||
|
This in turn breaks normal OCLP execution to write to settings file
|
||||||
|
"""
|
||||||
|
|
||||||
|
if os.geteuid() != 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Set file permission to allow any user to write to log file
|
||||||
|
result = subprocess.run(["chmod", "777", self.global_settings_plist], capture_output=True)
|
||||||
|
if result.returncode != 0:
|
||||||
|
logging.warning("- Failed to fix settings file permissions:")
|
||||||
|
if result.stderr:
|
||||||
|
logging.warning(result.stderr.decode("utf-8"))
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -23,12 +23,15 @@ class RedirectLabel(object):
|
|||||||
self.out=aWxTextCtrl
|
self.out=aWxTextCtrl
|
||||||
|
|
||||||
def write(self,string):
|
def write(self,string):
|
||||||
if string.endswith("MB/s"):
|
if "MB/s" in string:
|
||||||
self.out.SetLabel(string)
|
self.out.SetLabel(string)
|
||||||
self.out.Centre(wx.HORIZONTAL)
|
self.out.Centre(wx.HORIZONTAL)
|
||||||
wx.GetApp().Yield()
|
wx.GetApp().Yield()
|
||||||
time.sleep(0.01)
|
time.sleep(0.01)
|
||||||
|
|
||||||
|
def fileno(self):
|
||||||
|
return 1
|
||||||
|
|
||||||
def flush(self):
|
def flush(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -6,8 +6,9 @@ import plistlib
|
|||||||
import subprocess
|
import subprocess
|
||||||
import shutil
|
import shutil
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from resources import utilities, constants, tui_helpers
|
from resources import utilities, constants
|
||||||
from data import os_data
|
from data import os_data
|
||||||
|
|
||||||
class tui_disk_installation:
|
class tui_disk_installation:
|
||||||
@@ -74,65 +75,6 @@ class tui_disk_installation:
|
|||||||
return supported_partitions
|
return supported_partitions
|
||||||
|
|
||||||
|
|
||||||
def copy_efi(self):
|
|
||||||
utilities.cls()
|
|
||||||
utilities.header(["Installing OpenCore to Drive"])
|
|
||||||
|
|
||||||
if not self.constants.opencore_release_folder.exists():
|
|
||||||
tui_helpers.TUIOnlyPrint(
|
|
||||||
["Installing OpenCore to Drive"],
|
|
||||||
"Press [Enter] to go back.\n",
|
|
||||||
[
|
|
||||||
"""OpenCore folder missing!
|
|
||||||
Please build OpenCore first!"""
|
|
||||||
],
|
|
||||||
).start()
|
|
||||||
return
|
|
||||||
|
|
||||||
print("\nDisk picker is loading...")
|
|
||||||
|
|
||||||
all_disks = self.list_disks()
|
|
||||||
menu = tui_helpers.TUIMenu(
|
|
||||||
["Select Disk"],
|
|
||||||
"Please select the disk you would like to install OpenCore to: ",
|
|
||||||
in_between=["Missing disks? Ensure they have an EFI or FAT32 partition."],
|
|
||||||
return_number_instead_of_direct_call=True,
|
|
||||||
loop=True,
|
|
||||||
)
|
|
||||||
for disk in all_disks:
|
|
||||||
menu.add_menu_option(f"{disk}: {all_disks[disk]['name']} ({all_disks[disk]['size']})", key=disk[4:])
|
|
||||||
|
|
||||||
response = menu.start()
|
|
||||||
|
|
||||||
if response == -1:
|
|
||||||
return
|
|
||||||
|
|
||||||
disk_identifier = "disk" + response
|
|
||||||
selected_disk = all_disks[disk_identifier]
|
|
||||||
|
|
||||||
menu = tui_helpers.TUIMenu(
|
|
||||||
["Select Partition"],
|
|
||||||
"Please select the partition you would like to install OpenCore to: ",
|
|
||||||
return_number_instead_of_direct_call=True,
|
|
||||||
loop=True,
|
|
||||||
in_between=["Missing partitions? Ensure they are formatted as an EFI or FAT32.", "", "* denotes likely candidate."],
|
|
||||||
)
|
|
||||||
for partition in selected_disk["partitions"]:
|
|
||||||
if selected_disk["partitions"][partition]["fs"] not in ("msdos", "EFI"):
|
|
||||||
continue
|
|
||||||
text = f"{partition}: {selected_disk['partitions'][partition]['name']} ({utilities.human_fmt(selected_disk['partitions'][partition]['size'])})"
|
|
||||||
if selected_disk["partitions"][partition]["type"] == "EFI" or (
|
|
||||||
selected_disk["partitions"][partition]["type"] == "Microsoft Basic Data" and selected_disk["partitions"][partition]["size"] < 1024 * 1024 * 512
|
|
||||||
): # 512 megabytes:
|
|
||||||
text += " *"
|
|
||||||
menu.add_menu_option(text, key=partition[len(disk_identifier) + 1 :])
|
|
||||||
|
|
||||||
response = menu.start()
|
|
||||||
|
|
||||||
if response == -1:
|
|
||||||
return
|
|
||||||
self.install_opencore(f"{disk_identifier}s{response}")
|
|
||||||
|
|
||||||
def install_opencore(self, full_disk_identifier):
|
def install_opencore(self, full_disk_identifier):
|
||||||
def determine_sd_card(media_name):
|
def determine_sd_card(media_name):
|
||||||
# Array filled with common SD Card names
|
# Array filled with common SD Card names
|
||||||
@@ -168,18 +110,13 @@ Please build OpenCore first!"""
|
|||||||
# cancelled prompt
|
# cancelled prompt
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
if self.constants.gui_mode is False:
|
logging.info("An error occurred!")
|
||||||
tui_helpers.TUIOnlyPrint(
|
logging.info(result.stderr.decode())
|
||||||
["Copying OpenCore"], "Press [Enter] to go back.\n", ["An error occurred!"] + result.stderr.decode().split("\n") + [""]
|
|
||||||
).start()
|
|
||||||
else:
|
|
||||||
print("An error occurred!")
|
|
||||||
print(result.stderr.decode())
|
|
||||||
|
|
||||||
# Check if we're in Safe Mode, and if so, tell user FAT32 is unsupported
|
# Check if we're in Safe Mode, and if so, tell user FAT32 is unsupported
|
||||||
if utilities.check_boot_mode() == "safe_boot":
|
if utilities.check_boot_mode() == "safe_boot":
|
||||||
print("\nSafe Mode detected. FAT32 is unsupported by macOS in this mode.")
|
logging.info("\nSafe Mode detected. FAT32 is unsupported by macOS in this mode.")
|
||||||
print("Please disable Safe Mode and try again.")
|
logging.info("Please disable Safe Mode and try again.")
|
||||||
return
|
return
|
||||||
partition_info = plistlib.loads(subprocess.run(f"diskutil info -plist {full_disk_identifier}".split(), stdout=subprocess.PIPE).stdout.decode().strip().encode())
|
partition_info = plistlib.loads(subprocess.run(f"diskutil info -plist {full_disk_identifier}".split(), stdout=subprocess.PIPE).stdout.decode().strip().encode())
|
||||||
parent_disk = partition_info["ParentWholeDisk"]
|
parent_disk = partition_info["ParentWholeDisk"]
|
||||||
@@ -196,65 +133,62 @@ Please build OpenCore first!"""
|
|||||||
|
|
||||||
if mount_path.exists():
|
if mount_path.exists():
|
||||||
if (mount_path / Path("EFI/Microsoft")).exists() and self.constants.gui_mode is False:
|
if (mount_path / Path("EFI/Microsoft")).exists() and self.constants.gui_mode is False:
|
||||||
print("- Found Windows Boot Loader")
|
logging.info("- Found Windows Boot Loader")
|
||||||
print("\nWould you like to continue installing OpenCore?")
|
logging.info("\nWould you like to continue installing OpenCore?")
|
||||||
print("Installing OpenCore onto this drive may make Windows unbootable until OpenCore")
|
logging.info("Installing OpenCore onto this drive may make Windows unbootable until OpenCore")
|
||||||
print("is removed from the partition")
|
logging.info("is removed from the partition")
|
||||||
print("We highly recommend users partition 200MB off their drive with Disk Utility")
|
logging.info("We highly recommend users partition 200MB off their drive with Disk Utility")
|
||||||
print(" Name:\t\t OPENCORE")
|
logging.info(" Name:\t\t OPENCORE")
|
||||||
print(" Format:\t\t FAT32")
|
logging.info(" Format:\t\t FAT32")
|
||||||
print(" Size:\t\t 200MB")
|
logging.info(" Size:\t\t 200MB")
|
||||||
choice = input("\nWould you like to still install OpenCore to this drive?(y/n): ")
|
choice = input("\nWould you like to still install OpenCore to this drive?(y/n): ")
|
||||||
if not choice in ["y", "Y", "Yes", "yes"]:
|
if not choice in ["y", "Y", "Yes", "yes"]:
|
||||||
subprocess.run(["diskutil", "umount", mount_path], stdout=subprocess.PIPE).stdout.decode().strip().encode()
|
subprocess.run(["diskutil", "umount", mount_path], stdout=subprocess.PIPE).stdout.decode().strip().encode()
|
||||||
return False
|
return False
|
||||||
if (mount_path / Path("EFI/OC")).exists():
|
if (mount_path / Path("EFI/OC")).exists():
|
||||||
print("- Removing preexisting EFI/OC folder")
|
logging.info("- Removing preexisting EFI/OC folder")
|
||||||
shutil.rmtree(mount_path / Path("EFI/OC"), onerror=rmtree_handler)
|
shutil.rmtree(mount_path / Path("EFI/OC"), onerror=rmtree_handler)
|
||||||
if (mount_path / Path("System")).exists():
|
if (mount_path / Path("System")).exists():
|
||||||
print("- Removing preexisting System folder")
|
logging.info("- Removing preexisting System folder")
|
||||||
shutil.rmtree(mount_path / Path("System"), onerror=rmtree_handler)
|
shutil.rmtree(mount_path / Path("System"), onerror=rmtree_handler)
|
||||||
if (mount_path / Path("boot.efi")).exists():
|
if (mount_path / Path("boot.efi")).exists():
|
||||||
print("- Removing preexisting boot.efi")
|
logging.info("- Removing preexisting boot.efi")
|
||||||
os.remove(mount_path / Path("boot.efi"))
|
os.remove(mount_path / Path("boot.efi"))
|
||||||
print("- Copying OpenCore onto EFI partition")
|
logging.info("- Copying OpenCore onto EFI partition")
|
||||||
shutil.copytree(self.constants.opencore_release_folder / Path("EFI/OC"), mount_path / Path("EFI/OC"))
|
shutil.copytree(self.constants.opencore_release_folder / Path("EFI/OC"), mount_path / Path("EFI/OC"))
|
||||||
shutil.copytree(self.constants.opencore_release_folder / Path("System"), mount_path / Path("System"))
|
shutil.copytree(self.constants.opencore_release_folder / Path("System"), mount_path / Path("System"))
|
||||||
if Path(self.constants.opencore_release_folder / Path("boot.efi")).exists():
|
if Path(self.constants.opencore_release_folder / Path("boot.efi")).exists():
|
||||||
shutil.copy(self.constants.opencore_release_folder / Path("boot.efi"), mount_path / Path("boot.efi"))
|
shutil.copy(self.constants.opencore_release_folder / Path("boot.efi"), mount_path / Path("boot.efi"))
|
||||||
if self.constants.boot_efi is True:
|
if self.constants.boot_efi is True:
|
||||||
print("- Converting Bootstrap to BOOTx64.efi")
|
logging.info("- Converting Bootstrap to BOOTx64.efi")
|
||||||
if (mount_path / Path("EFI/BOOT")).exists():
|
if (mount_path / Path("EFI/BOOT")).exists():
|
||||||
shutil.rmtree(mount_path / Path("EFI/BOOT"), onerror=rmtree_handler)
|
shutil.rmtree(mount_path / Path("EFI/BOOT"), onerror=rmtree_handler)
|
||||||
Path(mount_path / Path("EFI/BOOT")).mkdir()
|
Path(mount_path / Path("EFI/BOOT")).mkdir()
|
||||||
shutil.move(mount_path / Path("System/Library/CoreServices/boot.efi"), mount_path / Path("EFI/BOOT/BOOTx64.efi"))
|
shutil.move(mount_path / Path("System/Library/CoreServices/boot.efi"), mount_path / Path("EFI/BOOT/BOOTx64.efi"))
|
||||||
shutil.rmtree(mount_path / Path("System"), onerror=rmtree_handler)
|
shutil.rmtree(mount_path / Path("System"), onerror=rmtree_handler)
|
||||||
if determine_sd_card(sd_type) is True:
|
if determine_sd_card(sd_type) is True:
|
||||||
print("- Adding SD Card icon")
|
logging.info("- Adding SD Card icon")
|
||||||
shutil.copy(self.constants.icon_path_sd, mount_path)
|
shutil.copy(self.constants.icon_path_sd, mount_path)
|
||||||
elif ssd_type is True:
|
elif ssd_type is True:
|
||||||
print("- Adding SSD icon")
|
logging.info("- Adding SSD icon")
|
||||||
shutil.copy(self.constants.icon_path_ssd, mount_path)
|
shutil.copy(self.constants.icon_path_ssd, mount_path)
|
||||||
elif disk_type == "USB":
|
elif disk_type == "USB":
|
||||||
print("- Adding External USB Drive icon")
|
logging.info("- Adding External USB Drive icon")
|
||||||
shutil.copy(self.constants.icon_path_external, mount_path)
|
shutil.copy(self.constants.icon_path_external, mount_path)
|
||||||
else:
|
else:
|
||||||
print("- Adding Internal Drive icon")
|
logging.info("- Adding Internal Drive icon")
|
||||||
shutil.copy(self.constants.icon_path_internal, mount_path)
|
shutil.copy(self.constants.icon_path_internal, mount_path)
|
||||||
|
|
||||||
print("- Cleaning install location")
|
logging.info("- Cleaning install location")
|
||||||
if not self.constants.recovery_status:
|
if not self.constants.recovery_status:
|
||||||
print("- Unmounting EFI partition")
|
logging.info("- Unmounting EFI partition")
|
||||||
subprocess.run(["diskutil", "umount", mount_path], stdout=subprocess.PIPE).stdout.decode().strip().encode()
|
subprocess.run(["diskutil", "umount", mount_path], stdout=subprocess.PIPE).stdout.decode().strip().encode()
|
||||||
print("- OpenCore transfer complete")
|
logging.info("- OpenCore transfer complete")
|
||||||
if self.constants.gui_mode is False:
|
if self.constants.gui_mode is False:
|
||||||
print("\nPress [Enter] to continue.\n")
|
logging.info("\nPress [Enter] to continue.\n")
|
||||||
input()
|
input()
|
||||||
else:
|
else:
|
||||||
if self.constants.gui_mode is False:
|
logging.info("EFI failed to mount!")
|
||||||
tui_helpers.TUIOnlyPrint(["Copying OpenCore"], "Press [Enter] to go back.\n", ["EFI failed to mount!"]).start()
|
|
||||||
else:
|
|
||||||
print("EFI failed to mount!")
|
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|||||||
@@ -1,483 +0,0 @@
|
|||||||
# Creates a macOS Installer
|
|
||||||
from pathlib import Path
|
|
||||||
import plistlib
|
|
||||||
import subprocess
|
|
||||||
import requests
|
|
||||||
import tempfile
|
|
||||||
from resources import utilities, tui_helpers
|
|
||||||
|
|
||||||
def list_local_macOS_installers():
|
|
||||||
# Finds all applicable macOS installers
|
|
||||||
# within a user's /Applications folder
|
|
||||||
# Returns a list of installers
|
|
||||||
application_list = {}
|
|
||||||
|
|
||||||
for application in Path("/Applications").iterdir():
|
|
||||||
# Verify whether application has createinstallmedia
|
|
||||||
try:
|
|
||||||
if (Path("/Applications") / Path(application) / Path("Contents/Resources/createinstallmedia")).exists():
|
|
||||||
plist = plistlib.load((Path("/Applications") / Path(application) / Path("Contents/Info.plist")).open("rb"))
|
|
||||||
try:
|
|
||||||
# Doesn't reflect true OS build, but best to report SDK in the event multiple installers are found with same version
|
|
||||||
app_version = plist["DTPlatformVersion"]
|
|
||||||
clean_name = plist["CFBundleDisplayName"]
|
|
||||||
try:
|
|
||||||
app_sdk = plist["DTSDKBuild"]
|
|
||||||
except KeyError:
|
|
||||||
app_sdk = "Unknown"
|
|
||||||
|
|
||||||
# app_version can sometimes report GM instead of the actual version
|
|
||||||
# This is a workaround to get the actual version
|
|
||||||
if app_version.startswith("GM"):
|
|
||||||
try:
|
|
||||||
app_version = int(app_sdk[:2])
|
|
||||||
if app_version < 20:
|
|
||||||
app_version = f"10.{app_version - 4}"
|
|
||||||
else:
|
|
||||||
app_version = f"{app_version - 9}.0"
|
|
||||||
except ValueError:
|
|
||||||
app_version = "Unknown"
|
|
||||||
# Check if App Version is High Sierra or newer
|
|
||||||
can_add = False
|
|
||||||
if app_version.startswith("10."):
|
|
||||||
app_sub_version = app_version.split(".")[1]
|
|
||||||
if int(app_sub_version) >= 13:
|
|
||||||
can_add = True
|
|
||||||
else:
|
|
||||||
can_add = False
|
|
||||||
else:
|
|
||||||
can_add = True
|
|
||||||
|
|
||||||
# Check SharedSupport.dmg's data
|
|
||||||
results = parse_sharedsupport_version(Path("/Applications") / Path(application)/ Path("Contents/SharedSupport/SharedSupport.dmg"))
|
|
||||||
if results[0] is not None:
|
|
||||||
app_sdk = results[0]
|
|
||||||
if results[1] is not None:
|
|
||||||
app_version = results[1]
|
|
||||||
|
|
||||||
if can_add is True:
|
|
||||||
application_list.update({
|
|
||||||
application: {
|
|
||||||
"Short Name": clean_name,
|
|
||||||
"Version": app_version,
|
|
||||||
"Build": app_sdk,
|
|
||||||
"Path": application,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
except PermissionError:
|
|
||||||
pass
|
|
||||||
# Sort Applications by version
|
|
||||||
application_list = {k: v for k, v in sorted(application_list.items(), key=lambda item: item[1]["Version"])}
|
|
||||||
return application_list
|
|
||||||
|
|
||||||
def parse_sharedsupport_version(sharedsupport_path):
|
|
||||||
detected_build = None
|
|
||||||
detected_os = None
|
|
||||||
sharedsupport_path = Path(sharedsupport_path)
|
|
||||||
|
|
||||||
if not sharedsupport_path.exists():
|
|
||||||
return (detected_build, detected_os)
|
|
||||||
|
|
||||||
if not sharedsupport_path.name.endswith(".dmg"):
|
|
||||||
return (detected_build, detected_os)
|
|
||||||
|
|
||||||
|
|
||||||
# Create temporary directory to extract SharedSupport.dmg to
|
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
|
||||||
output = subprocess.run(
|
|
||||||
[
|
|
||||||
"hdiutil", "attach", "-noverify", sharedsupport_path,
|
|
||||||
"-mountpoint", tmpdir,
|
|
||||||
"-nobrowse",
|
|
||||||
],
|
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
|
||||||
)
|
|
||||||
if output.returncode != 0:
|
|
||||||
return (detected_build, detected_os)
|
|
||||||
|
|
||||||
ss_info = Path("SFR/com_apple_MobileAsset_SFRSoftwareUpdate/com_apple_MobileAsset_SFRSoftwareUpdate.xml")
|
|
||||||
|
|
||||||
if Path(tmpdir / ss_info).exists():
|
|
||||||
plist = plistlib.load((tmpdir / ss_info).open("rb"))
|
|
||||||
if "Build" in plist["Assets"][0]:
|
|
||||||
detected_build = plist["Assets"][0]["Build"]
|
|
||||||
if "OSVersion" in plist["Assets"][0]:
|
|
||||||
detected_os = plist["Assets"][0]["OSVersion"]
|
|
||||||
|
|
||||||
# Unmount SharedSupport.dmg
|
|
||||||
output = subprocess.run(["hdiutil", "detach", tmpdir], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
||||||
|
|
||||||
return (detected_build, detected_os)
|
|
||||||
|
|
||||||
|
|
||||||
def create_installer(installer_path, volume_name):
|
|
||||||
# Creates a macOS installer
|
|
||||||
# Takes a path to the installer and the Volume
|
|
||||||
# Returns boolean on success status
|
|
||||||
|
|
||||||
createinstallmedia_path = Path("/Applications") / Path(installer_path) / Path("Contents/Resources/createinstallmedia")
|
|
||||||
|
|
||||||
# Sanity check in the event the user somehow deleted it between the time we found it and now
|
|
||||||
if (createinstallmedia_path).exists():
|
|
||||||
utilities.cls()
|
|
||||||
utilities.header(["Starting createinstallmedia"])
|
|
||||||
print("This will take some time, recommend making some coffee while you wait\n")
|
|
||||||
utilities.elevated([createinstallmedia_path, "--volume", f"/Volumes/{volume_name}", "--nointeraction"])
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print("- Failed to find createinstallmedia")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def download_install_assistant(download_path, ia_link):
|
|
||||||
# Downloads InstallAssistant.pkg
|
|
||||||
if utilities.download_file(ia_link, (Path(download_path) / Path("InstallAssistant.pkg"))):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def install_macOS_installer(download_path):
|
|
||||||
print("- Extracting macOS installer from InstallAssistant.pkg\n This may take some time")
|
|
||||||
args = [
|
|
||||||
"osascript",
|
|
||||||
"-e",
|
|
||||||
f'''do shell script "installer -pkg {Path(download_path)}/InstallAssistant.pkg -target /"'''
|
|
||||||
' with prompt "OpenCore Legacy Patcher needs administrator privileges to add InstallAssistant."'
|
|
||||||
" with administrator privileges"
|
|
||||||
" without altering line endings",
|
|
||||||
]
|
|
||||||
|
|
||||||
result = subprocess.run(args,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
||||||
if result.returncode == 0:
|
|
||||||
print("- InstallAssistant installed")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print("- Failed to install InstallAssistant")
|
|
||||||
print(f" Error Code: {result.returncode}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def list_downloadable_macOS_installers(download_path, catalog):
|
|
||||||
available_apps = {}
|
|
||||||
if catalog == "DeveloperSeed":
|
|
||||||
link = "https://swscan.apple.com/content/catalogs/others/index-13seed-13-12-10.16-10.15-10.14-10.13-10.12-10.11-10.10-10.9-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog"
|
|
||||||
elif catalog == "PublicSeed":
|
|
||||||
link = "https://swscan.apple.com/content/catalogs/others/index-13beta-13-12-10.16-10.15-10.14-10.13-10.12-10.11-10.10-10.9-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog"
|
|
||||||
else:
|
|
||||||
link = "https://swscan.apple.com/content/catalogs/others/index-13-12-10.16-10.15-10.14-10.13-10.12-10.11-10.10-10.9-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog"
|
|
||||||
|
|
||||||
if utilities.verify_network_connection(link) is True:
|
|
||||||
try:
|
|
||||||
catalog_plist = plistlib.loads(utilities.SESSION.get(link).content)
|
|
||||||
except plistlib.InvalidFileException:
|
|
||||||
return available_apps
|
|
||||||
|
|
||||||
for item in catalog_plist["Products"]:
|
|
||||||
try:
|
|
||||||
# Check if entry has SharedSupport and BuildManifest
|
|
||||||
# Ensures only Big Sur and newer Installers are listed
|
|
||||||
catalog_plist["Products"][item]["ExtendedMetaInfo"]["InstallAssistantPackageIdentifiers"]["SharedSupport"]
|
|
||||||
catalog_plist["Products"][item]["ExtendedMetaInfo"]["InstallAssistantPackageIdentifiers"]["BuildManifest"]
|
|
||||||
|
|
||||||
for bm_package in catalog_plist["Products"][item]["Packages"]:
|
|
||||||
if "Info.plist" in bm_package["URL"] and "InstallInfo.plist" not in bm_package["URL"]:
|
|
||||||
try:
|
|
||||||
build_plist = plistlib.loads(utilities.SESSION.get(bm_package["URL"]).content)
|
|
||||||
except plistlib.InvalidFileException:
|
|
||||||
continue
|
|
||||||
# Ensure Apple Silicon specific Installers are not listed
|
|
||||||
if "VMM-x86_64" not in build_plist["MobileAssetProperties"]["SupportedDeviceModels"]:
|
|
||||||
continue
|
|
||||||
version = build_plist["MobileAssetProperties"]["OSVersion"]
|
|
||||||
build = build_plist["MobileAssetProperties"]["Build"]
|
|
||||||
try:
|
|
||||||
catalog_url = build_plist["MobileAssetProperties"]["BridgeVersionInfo"]["CatalogURL"]
|
|
||||||
if "beta" in catalog_url:
|
|
||||||
catalog_url = "PublicSeed"
|
|
||||||
elif "customerseed" in catalog_url:
|
|
||||||
catalog_url = "CustomerSeed"
|
|
||||||
elif "seed" in catalog_url:
|
|
||||||
catalog_url = "DeveloperSeed"
|
|
||||||
else:
|
|
||||||
catalog_url = "Public"
|
|
||||||
except KeyError:
|
|
||||||
# Assume Public if no catalog URL is found
|
|
||||||
catalog_url = "Public"
|
|
||||||
for ia_package in catalog_plist["Products"][item]["Packages"]:
|
|
||||||
if "InstallAssistant.pkg" in ia_package["URL"]:
|
|
||||||
download_link = ia_package["URL"]
|
|
||||||
size = ia_package["Size"]
|
|
||||||
integrity = ia_package["IntegrityDataURL"]
|
|
||||||
|
|
||||||
available_apps.update({
|
|
||||||
item: {
|
|
||||||
"Version": version,
|
|
||||||
"Build": build,
|
|
||||||
"Link": download_link,
|
|
||||||
"Size": size,
|
|
||||||
"integrity": integrity,
|
|
||||||
"Source": "Apple Inc.",
|
|
||||||
"Variant": catalog_url,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
available_apps = {k: v for k, v in sorted(available_apps.items(), key=lambda x: x[1]['Version'])}
|
|
||||||
return available_apps
|
|
||||||
|
|
||||||
def only_list_newest_installers(available_apps):
|
|
||||||
# Takes a dictionary of available installers
|
|
||||||
# Returns a dictionary of only the newest installers
|
|
||||||
# This is used to avoid overwhelming the user with installer options
|
|
||||||
|
|
||||||
# Only strip OSes that we know are supported
|
|
||||||
supported_versions = ["10.13", "10.14", "10.15", "11", "12", "13"]
|
|
||||||
|
|
||||||
for version in supported_versions:
|
|
||||||
remote_version_minor = 0
|
|
||||||
remote_version_security = 0
|
|
||||||
os_builds = []
|
|
||||||
|
|
||||||
# First determine the largest version
|
|
||||||
for ia in available_apps:
|
|
||||||
if available_apps[ia]["Version"].startswith(version):
|
|
||||||
if available_apps[ia]["Variant"] not in ["CustomerSeed", "DeveloperSeed", "PublicSeed"]:
|
|
||||||
remote_version = available_apps[ia]["Version"].split(".")
|
|
||||||
if remote_version[0] == "10":
|
|
||||||
remote_version.pop(0)
|
|
||||||
remote_version.pop(0)
|
|
||||||
else:
|
|
||||||
remote_version.pop(0)
|
|
||||||
if int(remote_version[0]) > remote_version_minor:
|
|
||||||
remote_version_minor = int(remote_version[0])
|
|
||||||
remote_version_security = 0 # Reset as new minor version found
|
|
||||||
if len(remote_version) > 1:
|
|
||||||
if int(remote_version[1]) > remote_version_security:
|
|
||||||
remote_version_security = int(remote_version[1])
|
|
||||||
|
|
||||||
# Now remove all versions that are not the largest
|
|
||||||
for ia in list(available_apps):
|
|
||||||
# Don't use Beta builds to determine latest version
|
|
||||||
if available_apps[ia]["Variant"] in ["CustomerSeed", "DeveloperSeed", "PublicSeed"]:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if available_apps[ia]["Version"].startswith(version):
|
|
||||||
remote_version = available_apps[ia]["Version"].split(".")
|
|
||||||
if remote_version[0] == "10":
|
|
||||||
remote_version.pop(0)
|
|
||||||
remote_version.pop(0)
|
|
||||||
else:
|
|
||||||
remote_version.pop(0)
|
|
||||||
if int(remote_version[0]) < remote_version_minor:
|
|
||||||
available_apps.pop(ia)
|
|
||||||
continue
|
|
||||||
if int(remote_version[0]) == remote_version_minor:
|
|
||||||
if len(remote_version) > 1:
|
|
||||||
if int(remote_version[1]) < remote_version_security:
|
|
||||||
available_apps.pop(ia)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if remote_version_security > 0:
|
|
||||||
available_apps.pop(ia)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Remove duplicate builds
|
|
||||||
# ex. macOS 12.5.1 has 2 builds in the Software Update Catalog
|
|
||||||
# ref: https://twitter.com/classicii_mrmac/status/1560357471654379522
|
|
||||||
if available_apps[ia]["Build"] in os_builds:
|
|
||||||
available_apps.pop(ia)
|
|
||||||
continue
|
|
||||||
|
|
||||||
os_builds.append(available_apps[ia]["Build"])
|
|
||||||
|
|
||||||
# Final passthrough
|
|
||||||
# Remove Betas if there's a non-beta version available
|
|
||||||
for ia in list(available_apps):
|
|
||||||
if available_apps[ia]["Variant"] in ["CustomerSeed", "DeveloperSeed", "PublicSeed"]:
|
|
||||||
for ia2 in available_apps:
|
|
||||||
if available_apps[ia2]["Version"].split(".")[0] == available_apps[ia]["Version"].split(".")[0] and available_apps[ia2]["Variant"] not in ["CustomerSeed", "DeveloperSeed", "PublicSeed"]:
|
|
||||||
available_apps.pop(ia)
|
|
||||||
break
|
|
||||||
|
|
||||||
return available_apps
|
|
||||||
|
|
||||||
def format_drive(disk_id):
|
|
||||||
# Formats a disk for macOS install
|
|
||||||
# Takes a disk ID
|
|
||||||
# Returns boolean on success status
|
|
||||||
header = f"# Formatting disk{disk_id} for macOS installer #"
|
|
||||||
box_length = len(header)
|
|
||||||
utilities.cls()
|
|
||||||
print("#" * box_length)
|
|
||||||
print(header)
|
|
||||||
print("#" * box_length)
|
|
||||||
print("")
|
|
||||||
#print(f"- Formatting disk{disk_id} for macOS installer")
|
|
||||||
format_process = utilities.elevated(["diskutil", "eraseDisk", "HFS+", "OCLP-Installer", f"disk{disk_id}"])
|
|
||||||
if format_process.returncode == 0:
|
|
||||||
print("- Disk formatted")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print("- Failed to format disk")
|
|
||||||
print(f" Error Code: {format_process.returncode}")
|
|
||||||
input("\nPress Enter to exit")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def select_disk_to_format():
|
|
||||||
utilities.cls()
|
|
||||||
utilities.header(["Installing OpenCore to Drive"])
|
|
||||||
|
|
||||||
print("\nDisk picker is loading...")
|
|
||||||
|
|
||||||
all_disks = {}
|
|
||||||
# TODO: AllDisksAndPartitions is not supported in Snow Leopard and older
|
|
||||||
try:
|
|
||||||
# High Sierra and newer
|
|
||||||
disks = plistlib.loads(subprocess.run("diskutil list -plist physical".split(), stdout=subprocess.PIPE).stdout.decode().strip().encode())
|
|
||||||
except ValueError:
|
|
||||||
# Sierra and older
|
|
||||||
disks = plistlib.loads(subprocess.run("diskutil list -plist".split(), stdout=subprocess.PIPE).stdout.decode().strip().encode())
|
|
||||||
for disk in disks["AllDisksAndPartitions"]:
|
|
||||||
disk_info = plistlib.loads(subprocess.run(f"diskutil info -plist {disk['DeviceIdentifier']}".split(), stdout=subprocess.PIPE).stdout.decode().strip().encode())
|
|
||||||
try:
|
|
||||||
all_disks[disk["DeviceIdentifier"]] = {"identifier": disk_info["DeviceNode"], "name": disk_info["MediaName"], "size": disk_info["TotalSize"], "removable": disk_info["Internal"], "partitions": {}}
|
|
||||||
except KeyError:
|
|
||||||
# Avoid crashing with CDs installed
|
|
||||||
continue
|
|
||||||
menu = tui_helpers.TUIMenu(
|
|
||||||
["Select Disk to write the macOS Installer onto"],
|
|
||||||
"Please select the disk you would like to install OpenCore to: ",
|
|
||||||
in_between=["Missing drives? Verify they are 14GB+ and external (ie. USB)", "", "Ensure all data is backed up on selected drive, entire drive will be erased!"],
|
|
||||||
return_number_instead_of_direct_call=True,
|
|
||||||
loop=True,
|
|
||||||
)
|
|
||||||
for disk in all_disks:
|
|
||||||
# Strip disks that are under 14GB (15,032,385,536 bytes)
|
|
||||||
# createinstallmedia isn't great at detecting if a disk has enough space
|
|
||||||
if not any(all_disks[disk]['size'] > 15032385536 for partition in all_disks[disk]):
|
|
||||||
continue
|
|
||||||
# Strip internal disks as well (avoid user formatting their SSD/HDD)
|
|
||||||
# Ensure user doesn't format their boot drive
|
|
||||||
if not any(all_disks[disk]['removable'] is False for partition in all_disks[disk]):
|
|
||||||
continue
|
|
||||||
menu.add_menu_option(f"{disk}: {all_disks[disk]['name']} ({utilities.human_fmt(all_disks[disk]['size'])})", key=disk[4:])
|
|
||||||
|
|
||||||
response = menu.start()
|
|
||||||
|
|
||||||
if response == -1:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def list_disk_to_format():
|
|
||||||
all_disks = {}
|
|
||||||
list_disks = {}
|
|
||||||
# TODO: AllDisksAndPartitions is not supported in Snow Leopard and older
|
|
||||||
try:
|
|
||||||
# High Sierra and newer
|
|
||||||
disks = plistlib.loads(subprocess.run("diskutil list -plist physical".split(), stdout=subprocess.PIPE).stdout.decode().strip().encode())
|
|
||||||
except ValueError:
|
|
||||||
# Sierra and older
|
|
||||||
disks = plistlib.loads(subprocess.run("diskutil list -plist".split(), stdout=subprocess.PIPE).stdout.decode().strip().encode())
|
|
||||||
for disk in disks["AllDisksAndPartitions"]:
|
|
||||||
disk_info = plistlib.loads(subprocess.run(f"diskutil info -plist {disk['DeviceIdentifier']}".split(), stdout=subprocess.PIPE).stdout.decode().strip().encode())
|
|
||||||
try:
|
|
||||||
all_disks[disk["DeviceIdentifier"]] = {"identifier": disk_info["DeviceNode"], "name": disk_info["MediaName"], "size": disk_info["TotalSize"], "removable": disk_info["Internal"], "partitions": {}}
|
|
||||||
except KeyError:
|
|
||||||
# Avoid crashing with CDs installed
|
|
||||||
continue
|
|
||||||
for disk in all_disks:
|
|
||||||
# Strip disks that are under 14GB (15,032,385,536 bytes)
|
|
||||||
# createinstallmedia isn't great at detecting if a disk has enough space
|
|
||||||
if not any(all_disks[disk]['size'] > 15032385536 for partition in all_disks[disk]):
|
|
||||||
continue
|
|
||||||
# Strip internal disks as well (avoid user formatting their SSD/HDD)
|
|
||||||
# Ensure user doesn't format their boot drive
|
|
||||||
if not any(all_disks[disk]['removable'] is False for partition in all_disks[disk]):
|
|
||||||
continue
|
|
||||||
print(f"disk {disk}: {all_disks[disk]['name']} ({utilities.human_fmt(all_disks[disk]['size'])})")
|
|
||||||
list_disks.update({
|
|
||||||
disk: {
|
|
||||||
"identifier": all_disks[disk]["identifier"],
|
|
||||||
"name": all_disks[disk]["name"],
|
|
||||||
"size": all_disks[disk]["size"],
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return list_disks
|
|
||||||
|
|
||||||
# Create global tmp directory
|
|
||||||
tmp_dir = tempfile.TemporaryDirectory()
|
|
||||||
|
|
||||||
def generate_installer_creation_script(tmp_location, installer_path, disk):
|
|
||||||
# Creates installer.sh to be piped to OCLP-Helper and run as admin
|
|
||||||
# Goals:
|
|
||||||
# - Format provided disk as HFS+ GPT
|
|
||||||
# - Run createinstallmedia on provided disk
|
|
||||||
# Implementing this into a single installer.sh script allows us to only call
|
|
||||||
# OCLP-Helper once to avoid nagging the user about permissions
|
|
||||||
|
|
||||||
additional_args = ""
|
|
||||||
script_location = Path(tmp_location) / Path("Installer.sh")
|
|
||||||
|
|
||||||
# Due to a bug in createinstallmedia, running from '/Applications' may sometimes error:
|
|
||||||
# 'Failed to extract AssetData/boot/Firmware/Manifests/InstallerBoot/*'
|
|
||||||
# This affects native Macs as well even when manually invoking createinstallmedia
|
|
||||||
|
|
||||||
# To resolve, we'll copy into our temp directory and run from there
|
|
||||||
|
|
||||||
# Create a new tmp directory
|
|
||||||
# Our current one is a disk image, thus CoW will not work
|
|
||||||
global tmp_dir
|
|
||||||
ia_tmp = tmp_dir.name
|
|
||||||
|
|
||||||
print(f"Creating temporary directory at {ia_tmp}")
|
|
||||||
# Delete all files in tmp_dir
|
|
||||||
for file in Path(ia_tmp).glob("*"):
|
|
||||||
subprocess.run(["rm", "-rf", str(file)])
|
|
||||||
|
|
||||||
# Copy installer to tmp (use CoW to avoid extra disk writes)
|
|
||||||
args = ["cp", "-cR", installer_path, ia_tmp]
|
|
||||||
if utilities.check_filesystem_type() != "apfs":
|
|
||||||
# HFS+ disks do not support CoW
|
|
||||||
args[1] = "-R"
|
|
||||||
# Ensure we have enough space for the duplication
|
|
||||||
space_available = utilities.get_free_space()
|
|
||||||
space_needed = Path(ia_tmp).stat().st_size
|
|
||||||
if space_available < space_needed:
|
|
||||||
print("Not enough free space to create installer.sh")
|
|
||||||
print(f"{utilities.human_fmt(space_available)} available, {utilities.human_fmt(space_needed)} required")
|
|
||||||
return False
|
|
||||||
subprocess.run(args)
|
|
||||||
|
|
||||||
# Adjust installer_path to point to the copied installer
|
|
||||||
installer_path = Path(ia_tmp) / Path(Path(installer_path).name)
|
|
||||||
if not Path(installer_path).exists():
|
|
||||||
print(f"Failed to copy installer to {ia_tmp}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
createinstallmedia_path = str(Path(installer_path) / Path("Contents/Resources/createinstallmedia"))
|
|
||||||
plist_path = str(Path(installer_path) / Path("Contents/Info.plist"))
|
|
||||||
if Path(plist_path).exists():
|
|
||||||
plist = plistlib.load(Path(plist_path).open("rb"))
|
|
||||||
if "DTPlatformVersion" in plist:
|
|
||||||
platform_version = plist["DTPlatformVersion"]
|
|
||||||
platform_version = platform_version.split(".")[0]
|
|
||||||
if platform_version[0] == "10":
|
|
||||||
if int(platform_version[1]) < 13:
|
|
||||||
additional_args = f" --applicationpath '{installer_path}'"
|
|
||||||
|
|
||||||
if script_location.exists():
|
|
||||||
script_location.unlink()
|
|
||||||
script_location.touch()
|
|
||||||
|
|
||||||
with script_location.open("w") as script:
|
|
||||||
script.write(f'''#!/bin/bash
|
|
||||||
erase_disk='diskutil eraseDisk HFS+ OCLP-Installer {disk}'
|
|
||||||
if $erase_disk; then
|
|
||||||
"{createinstallmedia_path}" --volume /Volumes/OCLP-Installer --nointeraction{additional_args}
|
|
||||||
fi
|
|
||||||
''')
|
|
||||||
if Path(script_location).exists():
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
@@ -5,6 +5,7 @@
|
|||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
CHUNK_LENGTH = 4 + 32
|
CHUNK_LENGTH = 4 + 32
|
||||||
@@ -42,10 +43,10 @@ def chunk(file_path, chunklist, verbose):
|
|||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
status = hashlib.sha256(f.read(chunk["length"])).digest()
|
status = hashlib.sha256(f.read(chunk["length"])).digest()
|
||||||
if not status == chunk["checksum"]:
|
if not status == chunk["checksum"]:
|
||||||
print(
|
logging.info(
|
||||||
f"Chunk {chunks.index(chunk) + 1} checksum status FAIL: chunk sum {binascii.hexlify(chunk['checksum']).decode()}, calculated sum {binascii.hexlify(status).decode()}")
|
f"Chunk {chunks.index(chunk) + 1} checksum status FAIL: chunk sum {binascii.hexlify(chunk['checksum']).decode()}, calculated sum {binascii.hexlify(status).decode()}")
|
||||||
return False
|
return False
|
||||||
elif verbose:
|
elif verbose:
|
||||||
print(
|
logging.info(
|
||||||
f"Chunk {chunks.index(chunk) + 1} checksum status success")
|
f"Chunk {chunks.index(chunk) + 1} checksum status success")
|
||||||
return True
|
return True
|
||||||
@@ -1,292 +1,665 @@
|
|||||||
# Kernel Debug Kit downloader
|
# Module for parsing and determining best Kernel Debug Kit for host OS
|
||||||
|
# Copyright (C) 2022-2023, Dhinak G, Mykola Grymalyuk
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import re
|
|
||||||
import urllib.parse
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import cast
|
from typing import cast
|
||||||
|
import tempfile
|
||||||
|
import plistlib
|
||||||
|
|
||||||
import packaging.version
|
import packaging.version
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import os
|
||||||
|
|
||||||
from resources import utilities
|
import logging
|
||||||
from resources.constants import Constants
|
|
||||||
|
from resources import utilities, network_handler, constants
|
||||||
|
from data import os_data
|
||||||
|
|
||||||
|
KDK_INSTALL_PATH: str = "/Library/Developer/KDKs"
|
||||||
|
KDK_INFO_PLIST: str = "KDKInfo.plist"
|
||||||
|
KDK_API_LINK: str = "https://raw.githubusercontent.com/dortania/KdkSupportPkg/gh-pages/manifest.json"
|
||||||
|
|
||||||
|
KDK_ASSET_LIST: list = None
|
||||||
|
|
||||||
|
|
||||||
class kernel_debug_kit_handler:
|
class KernelDebugKitObject:
|
||||||
def __init__(self, constants: Constants):
|
"""
|
||||||
self.constants = constants
|
Library for querying and downloading Kernel Debug Kits (KDK) for macOS
|
||||||
|
|
||||||
def get_available_kdks(self):
|
Usage:
|
||||||
KDK_API_LINK = "https://kdk-api.dhinak.net/v1"
|
>>> kdk_object = KernelDebugKitObject(constants, host_build, host_version)
|
||||||
|
|
||||||
print("- Fetching available KDKs")
|
>>> if kdk_object.success:
|
||||||
|
|
||||||
|
>>> # Query whether a KDK is already installed
|
||||||
|
>>> if kdk_object.kdk_already_installed:
|
||||||
|
>>> # Use the installed KDK
|
||||||
|
>>> kdk_path = kdk_object.kdk_installed_path
|
||||||
|
|
||||||
|
>>> else:
|
||||||
|
>>> # Get DownloadObject for the KDK
|
||||||
|
>>> # See network_handler.py's DownloadObject documentation for usage
|
||||||
|
>>> kdk_download_object = kdk_object.retrieve_download()
|
||||||
|
|
||||||
|
>>> # Once downloaded, recommend verifying KDK's checksum
|
||||||
|
>>> valid = kdk_object.validate_kdk_checksum()
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, global_constants: constants.Constants, host_build: str, host_version: str, ignore_installed: bool = False, passive: bool = False) -> None:
|
||||||
|
self.constants: constants.Constants = global_constants
|
||||||
|
|
||||||
|
self.host_build: str = host_build # ex. 20A5384c
|
||||||
|
self.host_version: str = host_version # ex. 11.0.1
|
||||||
|
|
||||||
|
self.passive: bool = passive # Don't perform actions requiring elevated privileges
|
||||||
|
|
||||||
|
self.ignore_installed: bool = ignore_installed # If True, will ignore any installed KDKs and download the latest
|
||||||
|
self.kdk_already_installed: bool = False
|
||||||
|
|
||||||
|
self.kdk_installed_path: str = ""
|
||||||
|
|
||||||
|
self.kdk_url: str = ""
|
||||||
|
self.kdk_url_build: str = ""
|
||||||
|
self.kdk_url_version: str = ""
|
||||||
|
|
||||||
|
self.kdk_url_expected_size: int = 0
|
||||||
|
|
||||||
|
self.kdk_url_is_exactly_match: bool = False
|
||||||
|
|
||||||
|
self.kdk_closest_match_url: str = ""
|
||||||
|
self.kdk_closest_match_url_build: str = ""
|
||||||
|
self.kdk_closest_match_url_version: str = ""
|
||||||
|
|
||||||
|
self.kdk_closest_match_url_expected_size: int = 0
|
||||||
|
|
||||||
|
self.success: bool = False
|
||||||
|
|
||||||
|
self.error_msg: str = ""
|
||||||
|
|
||||||
|
self._get_latest_kdk()
|
||||||
|
|
||||||
|
|
||||||
|
def _get_remote_kdks(self) -> list or None:
|
||||||
|
"""
|
||||||
|
Fetches a list of available KDKs from the KdkSupportPkg API
|
||||||
|
Additionally caches the list for future use, avoiding extra API calls
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: A list of KDKs, sorted by version and date if available. Returns None if the API is unreachable
|
||||||
|
"""
|
||||||
|
|
||||||
|
global KDK_ASSET_LIST
|
||||||
|
|
||||||
|
logging.info("- Pulling KDK list from KdkSupportPkg API")
|
||||||
|
if KDK_ASSET_LIST:
|
||||||
|
return KDK_ASSET_LIST
|
||||||
|
|
||||||
try:
|
try:
|
||||||
results = utilities.SESSION.get(KDK_API_LINK, headers={"User-Agent": f"OCLP/{self.constants.patcher_version}"}, timeout=10)
|
results = network_handler.SESSION.get(
|
||||||
|
KDK_API_LINK,
|
||||||
|
headers={
|
||||||
|
"User-Agent": f"OCLP/{self.constants.patcher_version}"
|
||||||
|
},
|
||||||
|
timeout=5
|
||||||
|
)
|
||||||
except (requests.exceptions.Timeout, requests.exceptions.TooManyRedirects, requests.exceptions.ConnectionError):
|
except (requests.exceptions.Timeout, requests.exceptions.TooManyRedirects, requests.exceptions.ConnectionError):
|
||||||
print("- Could not contact KDK API")
|
logging.info("- Could not contact KDK API")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if results.status_code != 200:
|
if results.status_code != 200:
|
||||||
print("- Could not fetch KDK list")
|
logging.info("- Could not fetch KDK list")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return sorted(results.json(), key=lambda x: (packaging.version.parse(x["version"]), datetime.datetime.fromisoformat(x["date"])), reverse=True)
|
KDK_ASSET_LIST = sorted(results.json(), key=lambda x: (packaging.version.parse(x["version"]), datetime.datetime.fromisoformat(x["date"])), reverse=True)
|
||||||
|
|
||||||
def get_closest_match_legacy(self, host_version: str, host_build: str):
|
return KDK_ASSET_LIST
|
||||||
# Get the closest match to the provided version
|
|
||||||
# KDKs are generally a few days late, so we'll rely on N-1 matching
|
|
||||||
|
|
||||||
# Note: AppleDB is manually updated, so this is not a perfect solution
|
|
||||||
|
|
||||||
OS_DATABASE_LINK = "https://api.appledb.dev/main.json"
|
def _get_latest_kdk(self, host_build: str = None, host_version: str = None) -> None:
|
||||||
VERSION_PATTERN = re.compile(r"\d+\.\d+(\.\d+)?")
|
"""
|
||||||
|
Fetches the latest KDK for the current macOS version
|
||||||
|
|
||||||
parsed_host_version = cast(packaging.version.Version, packaging.version.parse(host_version))
|
Parameters:
|
||||||
|
host_build (str, optional): The build version of the current macOS version.
|
||||||
|
If empty, will use the host_build from the class. Defaults to None.
|
||||||
|
host_version (str, optional): The version of the current macOS version.
|
||||||
|
If empty, will use the host_version from the class. Defaults to None.
|
||||||
|
"""
|
||||||
|
|
||||||
print(f"- Checking closest match for: {host_version} build {host_build}")
|
if host_build is None and host_version is None:
|
||||||
|
host_build = self.host_build
|
||||||
|
host_version = self.host_version
|
||||||
|
|
||||||
try:
|
parsed_version = cast(packaging.version.Version, packaging.version.parse(host_version))
|
||||||
results = utilities.SESSION.get(OS_DATABASE_LINK)
|
|
||||||
except (requests.exceptions.Timeout, requests.exceptions.TooManyRedirects, requests.exceptions.ConnectionError):
|
|
||||||
print("- Could not contact AppleDB")
|
|
||||||
return None, "", ""
|
|
||||||
|
|
||||||
if results.status_code != 200:
|
if os_data.os_conversion.os_to_kernel(str(parsed_version.major)) < os_data.os_data.ventura:
|
||||||
print("- Could not fetch database")
|
self.error_msg = "KDKs are not required for macOS Monterey or older"
|
||||||
return None, "", ""
|
logging.warning(f"- {self.error_msg}")
|
||||||
|
return
|
||||||
|
|
||||||
macos_builds = [i for i in results.json()["ios"] if i["osType"] == "macOS"]
|
self.kdk_installed_path = self._local_kdk_installed()
|
||||||
# If the version is borked, put it at the bottom of the list
|
if self.kdk_installed_path:
|
||||||
# Would omit it, but can't do that in this lambda
|
logging.info(f"- KDK already installed ({Path(self.kdk_installed_path).name}), skipping")
|
||||||
macos_builds.sort(key=lambda x: (packaging.version.parse(VERSION_PATTERN.match(x["version"]).group() if VERSION_PATTERN.match(x["version"]) else "0.0.0"), datetime.datetime.fromisoformat(x["released"] if x["released"] != "" else "1984-01-01")), reverse=True) # type: ignore
|
self.kdk_already_installed = True
|
||||||
|
self.success = True
|
||||||
|
return
|
||||||
|
|
||||||
# Iterate through, find build that is closest to the host version
|
remote_kdk_version = self._get_remote_kdks()
|
||||||
# Use date to determine which is closest
|
|
||||||
for build_info in macos_builds:
|
|
||||||
if build_info["osType"] == "macOS":
|
|
||||||
raw_version = VERSION_PATTERN.match(build_info["version"])
|
|
||||||
if not raw_version:
|
|
||||||
# Skip if version is borked
|
|
||||||
continue
|
|
||||||
version = cast(packaging.version.Version, packaging.version.parse(raw_version.group()))
|
|
||||||
build = build_info["build"]
|
|
||||||
if build == host_build:
|
|
||||||
# Skip, as we want the next closest match
|
|
||||||
continue
|
|
||||||
elif version <= parsed_host_version and version.major == parsed_host_version.major and version.minor == parsed_host_version.minor:
|
|
||||||
# The KDK list is already sorted by date then version, so the first match is the closest
|
|
||||||
print(f"- Closest match: {version} build {build}")
|
|
||||||
return self.generate_kdk_link(str(version), build), str(version), build
|
|
||||||
|
|
||||||
print("- Could not find a match")
|
if remote_kdk_version is None:
|
||||||
return None, "", ""
|
logging.warning("- Failed to fetch KDK list, falling back to local KDK matching")
|
||||||
|
|
||||||
def generate_kdk_link(self, version: str, build: str):
|
# First check if a KDK matching the current macOS version is installed
|
||||||
return f"https://download.developer.apple.com/macOS/Kernel_Debug_Kit_{version}_build_{build}/Kernel_Debug_Kit_{version}_build_{build}.dmg"
|
# ex. 13.0.1 vs 13.0
|
||||||
|
loose_version = f"{parsed_version.major}.{parsed_version.minor}"
|
||||||
|
logging.info(f"- Checking for KDKs loosely matching {loose_version}")
|
||||||
|
self.kdk_installed_path = self._local_kdk_installed(match=loose_version, check_version=True)
|
||||||
|
if self.kdk_installed_path:
|
||||||
|
logging.info(f"- Found matching KDK: {Path(self.kdk_installed_path).name}")
|
||||||
|
self.kdk_already_installed = True
|
||||||
|
self.success = True
|
||||||
|
return
|
||||||
|
|
||||||
def verify_apple_developer_portal(self, link):
|
older_version = f"{parsed_version.major}.{parsed_version.minor - 1 if parsed_version.minor > 0 else 0}"
|
||||||
# Determine whether Apple Developer Portal is up
|
logging.info(f"- Checking for KDKs matching {older_version}")
|
||||||
# and if the requested file is available
|
self.kdk_installed_path = self._local_kdk_installed(match=older_version, check_version=True)
|
||||||
|
if self.kdk_installed_path:
|
||||||
|
logging.info(f"- Found matching KDK: {Path(self.kdk_installed_path).name}")
|
||||||
|
self.kdk_already_installed = True
|
||||||
|
self.success = True
|
||||||
|
return
|
||||||
|
|
||||||
# Returns following:
|
logging.warning(f"- Couldn't find KDK matching {host_version} or {older_version}, please install one manually")
|
||||||
# 0: Portal is up and file is available
|
|
||||||
# 1: Portal is up but file is not available
|
|
||||||
# 2: Portal is down
|
|
||||||
# 3: Network error
|
|
||||||
|
|
||||||
if utilities.verify_network_connection("https://developerservices2.apple.com/services/download") is False:
|
self.error_msg = f"Could not contact KdkSupportPkg API, and no KDK matching {host_version} ({host_build}) or {older_version} was installed.\nPlease ensure you have a network connection or manually install a KDK."
|
||||||
print("- Could not connect to the network")
|
|
||||||
return 3
|
|
||||||
|
|
||||||
TOKEN_URL_BASE = "https://developerservices2.apple.com/services/download"
|
return
|
||||||
remote_path = urllib.parse.urlparse(link).path
|
|
||||||
token_url = urllib.parse.urlunparse(urllib.parse.urlparse(TOKEN_URL_BASE)._replace(query=urllib.parse.urlencode({"path": remote_path})))
|
|
||||||
|
|
||||||
try:
|
for kdk in remote_kdk_version:
|
||||||
response = utilities.SESSION.get(token_url, timeout=5)
|
kdk_version = cast(packaging.version.Version, packaging.version.parse(kdk["version"]))
|
||||||
except (requests.exceptions.Timeout, requests.exceptions.TooManyRedirects, requests.exceptions.ConnectionError):
|
if (kdk["build"] == host_build):
|
||||||
print("- Could not contact Apple download servers")
|
self.kdk_url = kdk["url"]
|
||||||
return 2
|
self.kdk_url_build = kdk["build"]
|
||||||
|
self.kdk_url_version = kdk["version"]
|
||||||
|
self.kdk_url_expected_size = kdk["fileSize"]
|
||||||
|
self.kdk_url_is_exactly_match = True
|
||||||
|
break
|
||||||
|
if kdk_version <= parsed_version and kdk_version.major == parsed_version.major and (kdk_version.minor in range(parsed_version.minor - 1, parsed_version.minor + 1)):
|
||||||
|
# The KDK list is already sorted by version then date, so the first match is the closest
|
||||||
|
self.kdk_closest_match_url = kdk["url"]
|
||||||
|
self.kdk_closest_match_url_build = kdk["build"]
|
||||||
|
self.kdk_closest_match_url_version = kdk["version"]
|
||||||
|
self.kdk_closest_match_url_expected_size = kdk["fileSize"]
|
||||||
|
self.kdk_url_is_exactly_match = False
|
||||||
|
break
|
||||||
|
|
||||||
try:
|
if self.kdk_url == "":
|
||||||
response.raise_for_status()
|
if self.kdk_closest_match_url == "":
|
||||||
except requests.exceptions.HTTPError:
|
logging.warning(f"- No KDKs found for {host_build} ({host_version})")
|
||||||
if response.status_code == 400 and "The path specified is invalid" in response.text:
|
self.error_msg = f"No KDKs found for {host_build} ({host_version})"
|
||||||
print("- File does not exist on Apple download servers")
|
return
|
||||||
return 1
|
logging.info(f"- No direct match found for {host_build}, falling back to closest match")
|
||||||
else:
|
logging.info(f"- Closest Match: {self.kdk_closest_match_url_build} ({self.kdk_closest_match_url_version})")
|
||||||
print("- Could not request download authorization from Apple download servers")
|
|
||||||
return 2
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def download_kdk(self, version: str, build: str):
|
self.kdk_url = self.kdk_closest_match_url
|
||||||
detected_build = build
|
self.kdk_url_build = self.kdk_closest_match_url_build
|
||||||
|
self.kdk_url_version = self.kdk_closest_match_url_version
|
||||||
if self.is_kdk_installed(detected_build) is True:
|
self.kdk_url_expected_size = self.kdk_closest_match_url_expected_size
|
||||||
print("- KDK is already installed")
|
|
||||||
self.remove_unused_kdks(exclude_builds=[detected_build])
|
|
||||||
return True, "", detected_build
|
|
||||||
|
|
||||||
download_link = None
|
|
||||||
closest_match_download_link = None
|
|
||||||
closest_version = ""
|
|
||||||
closest_build = ""
|
|
||||||
|
|
||||||
kdk_list = self.get_available_kdks()
|
|
||||||
|
|
||||||
parsed_version = cast(packaging.version.Version, packaging.version.parse(version))
|
|
||||||
|
|
||||||
if kdk_list:
|
|
||||||
for kdk in kdk_list:
|
|
||||||
kdk_version = cast(packaging.version.Version, packaging.version.parse(kdk["version"]))
|
|
||||||
if kdk["build"] == build:
|
|
||||||
download_link = kdk["url"]
|
|
||||||
elif not closest_match_download_link and kdk_version <= parsed_version and kdk_version.major == parsed_version.major and (kdk_version.minor == parsed_version.minor or kdk_version.minor == parsed_version.minor - 1):
|
|
||||||
# The KDK list is already sorted by date then version, so the first match is the closest
|
|
||||||
closest_match_download_link = kdk["url"]
|
|
||||||
closest_version = kdk["version"]
|
|
||||||
closest_build = kdk["build"]
|
|
||||||
else:
|
else:
|
||||||
print("- Could not fetch KDK list, falling back to brute force")
|
logging.info(f"- Direct match found for {host_build} ({host_version})")
|
||||||
download_link = self.generate_kdk_link(version, build)
|
|
||||||
closest_match_download_link, closest_version, closest_build = self.get_closest_match_legacy(version, build)
|
|
||||||
|
|
||||||
print(f"- Checking for KDK matching macOS {version} build {build}")
|
|
||||||
# download_link is None if no matching KDK is found, so we'll fall back to the closest match
|
|
||||||
result = self.verify_apple_developer_portal(download_link) if download_link else 1
|
|
||||||
if result == 0:
|
|
||||||
print("- Downloading KDK")
|
|
||||||
elif result == 1:
|
|
||||||
print("- Could not find KDK, finding closest match")
|
|
||||||
|
|
||||||
if self.is_kdk_installed(closest_build) is True:
|
# Check if this KDK is already installed
|
||||||
print(f"- Closet Build ({closest_build}) already installed")
|
self.kdk_installed_path = self._local_kdk_installed(match=self.kdk_url_build)
|
||||||
self.remove_unused_kdks(exclude_builds=[detected_build, closest_build])
|
if self.kdk_installed_path:
|
||||||
return True, "", closest_build
|
logging.info(f"- KDK already installed ({Path(self.kdk_installed_path).name}), skipping")
|
||||||
|
self.kdk_already_installed = True
|
||||||
|
self.success = True
|
||||||
|
return
|
||||||
|
|
||||||
if closest_match_download_link is None:
|
logging.info("- Following KDK is recommended:")
|
||||||
msg = "Could not find KDK for host, nor closest match"
|
logging.info(f"- KDK Build: {self.kdk_url_build}")
|
||||||
print(f"- {msg}")
|
logging.info(f"- KDK Version: {self.kdk_url_version}")
|
||||||
return False, msg, ""
|
logging.info(f"- KDK URL: {self.kdk_url}")
|
||||||
|
|
||||||
print(f"- Closest match: {closest_version} build {closest_build}")
|
self.success = True
|
||||||
result = self.verify_apple_developer_portal(closest_match_download_link)
|
|
||||||
|
|
||||||
if result == 0:
|
|
||||||
print("- Downloading KDK")
|
|
||||||
download_link = closest_match_download_link
|
|
||||||
elif result == 1:
|
|
||||||
msg = "Could not find KDK for host on Apple's servers, nor closest match"
|
|
||||||
print(f"- {msg}")
|
|
||||||
return False, msg, ""
|
|
||||||
elif result == 2:
|
|
||||||
msg = "Could not contact Apple download servers"
|
|
||||||
download_link = self.kdk_backup_site(closest_build)
|
|
||||||
if download_link is None:
|
|
||||||
msg += " and could not find a backup copy online"
|
|
||||||
print(f"- {msg}")
|
|
||||||
return False, msg, ""
|
|
||||||
else:
|
|
||||||
msg = "Unknown error"
|
|
||||||
print(f"- {msg}")
|
|
||||||
return False, msg, ""
|
|
||||||
elif result == 2:
|
|
||||||
msg = "Could not contact Apple download servers"
|
|
||||||
download_link = self.kdk_backup_site(build)
|
|
||||||
if download_link is None:
|
|
||||||
msg += " and could not find a backup copy online"
|
|
||||||
print(f"- {msg}")
|
|
||||||
return False, msg, ""
|
|
||||||
elif result == 3:
|
|
||||||
msg = "Failed to connect to the internet"
|
|
||||||
print(f"- {msg}")
|
|
||||||
return False, msg, ""
|
|
||||||
|
|
||||||
if "github" in download_link:
|
def retrieve_download(self, override_path: str = "") -> network_handler.DownloadObject or None:
|
||||||
result = utilities.download_file(download_link, self.constants.kdk_download_path)
|
"""
|
||||||
else:
|
Returns a DownloadObject for the KDK
|
||||||
result = utilities.download_apple_developer_portal(download_link, self.constants.kdk_download_path)
|
|
||||||
|
|
||||||
if result:
|
Parameters:
|
||||||
result = subprocess.run(["hdiutil", "verify", self.constants.kdk_download_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
override_path (str): Override the default download path
|
||||||
if result.returncode != 0:
|
|
||||||
print(f"Error: Kernel Debug Kit checksum verification failed!")
|
|
||||||
print(f"Output: {result.stderr}")
|
|
||||||
msg = "Kernel Debug Kit checksum verification failed, please try again.\n\nIf this continues to fail, ensure you're downloading on a stable network connection (ie. Ethernet)"
|
|
||||||
print(f"- {msg}")
|
|
||||||
return False, msg, ""
|
|
||||||
self.remove_unused_kdks(exclude_builds=[detected_build, closest_build])
|
|
||||||
return True, "", detected_build
|
|
||||||
msg = "Failed to download KDK"
|
|
||||||
print(f"- {msg}")
|
|
||||||
return False, msg, ""
|
|
||||||
|
|
||||||
def is_kdk_installed(self, build):
|
Returns:
|
||||||
kexts_to_check = [
|
DownloadObject: DownloadObject for the KDK, None if no download required
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.success = False
|
||||||
|
self.error_msg = ""
|
||||||
|
|
||||||
|
if self.kdk_already_installed:
|
||||||
|
logging.info("- No download required, KDK already installed")
|
||||||
|
self.success = True
|
||||||
|
return None
|
||||||
|
|
||||||
|
if self.kdk_url == "":
|
||||||
|
self.error_msg = "Could not retrieve KDK catalog, no KDK to download"
|
||||||
|
logging.error(self.error_msg)
|
||||||
|
return None
|
||||||
|
|
||||||
|
logging.info(f"- Returning DownloadObject for KDK: {Path(self.kdk_url).name}")
|
||||||
|
self.success = True
|
||||||
|
|
||||||
|
kdk_download_path = self.constants.kdk_download_path if override_path == "" else Path(override_path)
|
||||||
|
kdk_plist_path = Path(f"{kdk_download_path.parent}/{KDK_INFO_PLIST}") if override_path == "" else Path(f"{Path(override_path).parent}/{KDK_INFO_PLIST}")
|
||||||
|
|
||||||
|
self._generate_kdk_info_plist(kdk_plist_path)
|
||||||
|
return network_handler.DownloadObject(self.kdk_url, kdk_download_path)
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_kdk_info_plist(self, plist_path: str) -> None:
|
||||||
|
"""
|
||||||
|
Generates a KDK Info.plist
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
plist_path = Path(plist_path)
|
||||||
|
if plist_path.exists():
|
||||||
|
plist_path.unlink()
|
||||||
|
|
||||||
|
kdk_dict = {
|
||||||
|
"build": self.kdk_url_build,
|
||||||
|
"version": self.kdk_url_version,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
plist_path.touch()
|
||||||
|
plistlib.dump(kdk_dict, plist_path.open("wb"), sort_keys=False)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"- Failed to generate KDK Info.plist: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def _local_kdk_valid(self, kdk_path: Path) -> bool:
|
||||||
|
"""
|
||||||
|
Validates provided KDK, ensure no corruption
|
||||||
|
|
||||||
|
The reason for this is due to macOS deleting files from the KDK during OS updates,
|
||||||
|
similar to how Install macOS.app is deleted during OS updates
|
||||||
|
|
||||||
|
Uses Apple's pkg receipt system to verify the original contents of the KDK
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
kdk_path (Path): Path to KDK
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if valid, False if invalid
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not Path(f"{kdk_path}/System/Library/CoreServices/SystemVersion.plist").exists():
|
||||||
|
logging.info(f"- Corrupted KDK found ({kdk_path.name}), removing due to missing SystemVersion.plist")
|
||||||
|
self._remove_kdk(kdk_path)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Get build from KDK
|
||||||
|
kdk_plist_data = plistlib.load(Path(f"{kdk_path}/System/Library/CoreServices/SystemVersion.plist").open("rb"))
|
||||||
|
if "ProductBuildVersion" not in kdk_plist_data:
|
||||||
|
logging.info(f"- Corrupted KDK found ({kdk_path.name}), removing due to missing ProductBuildVersion")
|
||||||
|
self._remove_kdk(kdk_path)
|
||||||
|
return False
|
||||||
|
|
||||||
|
kdk_build = kdk_plist_data["ProductBuildVersion"]
|
||||||
|
|
||||||
|
# Check pkg receipts for this build, will give a canonical list if all files that should be present
|
||||||
|
result = subprocess.run(["pkgutil", "--files", f"com.apple.pkg.KDK.{kdk_build}"], capture_output=True)
|
||||||
|
if result.returncode != 0:
|
||||||
|
# If pkg receipt is missing, we'll fallback to legacy validation
|
||||||
|
logging.info(f"- pkg receipt missing for {kdk_path.name}, falling back to legacy validation")
|
||||||
|
return self._local_kdk_valid_legacy(kdk_path)
|
||||||
|
|
||||||
|
# Go through each line of the pkg receipt and ensure it exists
|
||||||
|
for line in result.stdout.decode("utf-8").splitlines():
|
||||||
|
if not line.startswith("System/Library/Extensions"):
|
||||||
|
continue
|
||||||
|
if not Path(f"{kdk_path}/{line}").exists():
|
||||||
|
logging.info(f"- Corrupted KDK found ({kdk_path.name}), removing due to missing file: {line}")
|
||||||
|
self._remove_kdk(kdk_path)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _local_kdk_valid_legacy(self, kdk_path: Path) -> bool:
|
||||||
|
"""
|
||||||
|
Legacy variant of validating provided KDK
|
||||||
|
Uses best guess of files that should be present
|
||||||
|
This should ideally never be invoked, but used as a fallback
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
kdk_path (Path): Path to KDK
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if valid, False if invalid
|
||||||
|
"""
|
||||||
|
|
||||||
|
KEXT_CATALOG = [
|
||||||
"System.kext/PlugIns/Libkern.kext/Libkern",
|
"System.kext/PlugIns/Libkern.kext/Libkern",
|
||||||
"apfs.kext/Contents/MacOS/apfs",
|
"apfs.kext/Contents/MacOS/apfs",
|
||||||
"IOUSBHostFamily.kext/Contents/MacOS/IOUSBHostFamily",
|
"IOUSBHostFamily.kext/Contents/MacOS/IOUSBHostFamily",
|
||||||
"AMDRadeonX6000.kext/Contents/MacOS/AMDRadeonX6000",
|
"AMDRadeonX6000.kext/Contents/MacOS/AMDRadeonX6000",
|
||||||
]
|
]
|
||||||
|
|
||||||
if Path("/Library/Developer/KDKs").exists():
|
for kext in KEXT_CATALOG:
|
||||||
for file in Path("/Library/Developer/KDKs").iterdir():
|
if not Path(f"{kdk_path}/System/Library/Extensions/{kext}").exists():
|
||||||
if file.is_dir():
|
logging.info(f"- Corrupted KDK found, removing due to missing: {kdk_path}/System/Library/Extensions/{kext}")
|
||||||
if file.name.endswith(f"{build}.kdk"):
|
self._remove_kdk(kdk_path)
|
||||||
for kext in kexts_to_check:
|
return False
|
||||||
if not Path(f"{file}/System/Library/Extensions/{kext}").exists():
|
|
||||||
print(f"- Corrupted KDK found, removing due to missing: {file}/System/Library/Extensions/{kext}")
|
return True
|
||||||
utilities.elevated(["rm", "-rf", file], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
||||||
return False
|
|
||||||
return True
|
def _local_kdk_installed(self, match: str = None, check_version: bool = False) -> str or None:
|
||||||
return False
|
"""
|
||||||
|
Checks if KDK matching build is installed
|
||||||
|
If so, validates it has not been corrupted
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
match (str): string to match against (ex. build or version)
|
||||||
|
check_version (bool): If True, match against version, otherwise match against build
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Path to KDK if valid, None if not
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.ignore_installed is True:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if match is None:
|
||||||
|
if check_version:
|
||||||
|
match = self.host_version
|
||||||
|
else:
|
||||||
|
match = self.host_build
|
||||||
|
|
||||||
|
if not Path(KDK_INSTALL_PATH).exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
for kdk_folder in Path(KDK_INSTALL_PATH).iterdir():
|
||||||
|
if not kdk_folder.is_dir():
|
||||||
|
continue
|
||||||
|
if check_version:
|
||||||
|
if match not in kdk_folder.name:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if not kdk_folder.name.endswith(f"{match}.kdk"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if self._local_kdk_valid(kdk_folder):
|
||||||
|
return kdk_folder
|
||||||
|
|
||||||
|
# If we can't find a KDK, next check if there's a backup present
|
||||||
|
# Check for KDK packages in the same directory as the KDK
|
||||||
|
for kdk_pkg in Path(KDK_INSTALL_PATH).iterdir():
|
||||||
|
if kdk_pkg.is_dir():
|
||||||
|
continue
|
||||||
|
if not kdk_pkg.name.endswith(".pkg"):
|
||||||
|
continue
|
||||||
|
if check_version:
|
||||||
|
if match not in kdk_pkg.name:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if not kdk_pkg.name.endswith(f"{match}.pkg"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
logging.info(f"- Found KDK backup: {kdk_pkg.name}")
|
||||||
|
if self.passive is False:
|
||||||
|
logging.info("- Attempting KDK restoration")
|
||||||
|
if KernelDebugKitUtilities().install_kdk_pkg(kdk_pkg):
|
||||||
|
logging.info("- Successfully restored KDK")
|
||||||
|
return self._local_kdk_installed(match=match, check_version=check_version)
|
||||||
|
else:
|
||||||
|
# When in passive mode, we're just checking if a KDK could be restored
|
||||||
|
logging.info("- KDK restoration skipped, running in passive mode")
|
||||||
|
return kdk_pkg
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_kdk(self, kdk_path: str) -> None:
|
||||||
|
"""
|
||||||
|
Removes provided KDK
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
kdk_path (str): Path to KDK
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.passive is True:
|
||||||
|
return
|
||||||
|
|
||||||
|
if os.getuid() != 0:
|
||||||
|
logging.warning("- Cannot remove KDK, not running as root")
|
||||||
|
return
|
||||||
|
|
||||||
|
if not Path(kdk_path).exists():
|
||||||
|
logging.warning(f"- KDK does not exist: {kdk_path}")
|
||||||
|
return
|
||||||
|
|
||||||
|
rm_args = ["rm", "-rf" if Path(kdk_path).is_dir() else "-f", kdk_path]
|
||||||
|
|
||||||
|
result = utilities.elevated(rm_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
if result.returncode != 0:
|
||||||
|
logging.warning(f"- Failed to remove KDK: {kdk_path}")
|
||||||
|
logging.warning(f"- {result.stdout.decode('utf-8')}")
|
||||||
|
return
|
||||||
|
|
||||||
|
logging.info(f"- Successfully removed KDK: {kdk_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_unused_kdks(self, exclude_builds: list = None) -> None:
|
||||||
|
"""
|
||||||
|
Removes KDKs that are not in use
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
exclude_builds (list, optional): Builds to exclude from removal.
|
||||||
|
If None, defaults to host and closest match builds.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.passive is True:
|
||||||
|
return
|
||||||
|
|
||||||
|
if exclude_builds is None:
|
||||||
|
exclude_builds = [
|
||||||
|
self.kdk_url_build,
|
||||||
|
self.kdk_closest_match_url_build,
|
||||||
|
]
|
||||||
|
|
||||||
def remove_unused_kdks(self, exclude_builds=[]):
|
|
||||||
if self.constants.should_nuke_kdks is False:
|
if self.constants.should_nuke_kdks is False:
|
||||||
return
|
return
|
||||||
|
|
||||||
if not Path("/Library/Developer/KDKs").exists():
|
if not Path(KDK_INSTALL_PATH).exists():
|
||||||
return
|
return
|
||||||
|
|
||||||
if exclude_builds == []:
|
logging.info("- Cleaning unused KDKs")
|
||||||
return
|
for kdk_folder in Path(KDK_INSTALL_PATH).iterdir():
|
||||||
|
if kdk_folder.name.endswith(".kdk") or kdk_folder.name.endswith(".pkg"):
|
||||||
print("- Cleaning unused KDKs")
|
should_remove = True
|
||||||
for kdk_folder in Path("/Library/Developer/KDKs").iterdir():
|
for build in exclude_builds:
|
||||||
if kdk_folder.is_dir():
|
if build != "":
|
||||||
if kdk_folder.name.endswith(".kdk"):
|
|
||||||
should_remove = True
|
|
||||||
for build in exclude_builds:
|
|
||||||
if build != "" and kdk_folder.name.endswith(f"{build}.kdk"):
|
|
||||||
should_remove = False
|
|
||||||
break
|
|
||||||
if should_remove is False:
|
|
||||||
continue
|
continue
|
||||||
print(f" - Removing {kdk_folder.name}")
|
if kdk_folder.name.endswith(f"_{build}.kdk") or kdk_folder.name.endswith(f"_{build}.pkg"):
|
||||||
utilities.elevated(["rm", "-rf", kdk_folder], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
should_remove = False
|
||||||
|
break
|
||||||
|
if should_remove is False:
|
||||||
|
continue
|
||||||
|
self._remove_kdk(kdk_folder)
|
||||||
|
|
||||||
|
|
||||||
def kdk_backup_site(self, build):
|
def validate_kdk_checksum(self, kdk_dmg_path: str = None) -> bool:
|
||||||
KDK_MIRROR_REPOSITORY = "https://api.github.com/repos/dortania/KdkSupportPkg/releases"
|
"""
|
||||||
|
Validates KDK DMG checksum
|
||||||
|
|
||||||
# Check if tag exists
|
Parameters:
|
||||||
catalog = requests.get(KDK_MIRROR_REPOSITORY)
|
kdk_dmg_path (str, optional): Path to KDK DMG. Defaults to None.
|
||||||
if catalog.status_code != 200:
|
|
||||||
print(f"- Could not contact KDK mirror repository")
|
|
||||||
return None
|
|
||||||
|
|
||||||
catalog = catalog.json()
|
Returns:
|
||||||
|
bool: True if valid, False if invalid
|
||||||
|
"""
|
||||||
|
|
||||||
for release in catalog:
|
self.success = False
|
||||||
if release["tag_name"] == build:
|
self.error_msg = ""
|
||||||
print(f"- Found KDK mirror for build: {build}")
|
|
||||||
for asset in release["assets"]:
|
|
||||||
if asset["name"].endswith(".dmg"):
|
|
||||||
return asset["browser_download_url"]
|
|
||||||
|
|
||||||
print(f"- Could not find KDK mirror for build {build}")
|
if kdk_dmg_path is None:
|
||||||
return None
|
kdk_dmg_path = self.constants.kdk_download_path
|
||||||
|
|
||||||
|
if not Path(kdk_dmg_path).exists():
|
||||||
|
logging.error(f"KDK DMG does not exist: {kdk_dmg_path}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# TODO: should we use the checksum from the API?
|
||||||
|
result = subprocess.run(["hdiutil", "verify", self.constants.kdk_download_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if result.returncode != 0:
|
||||||
|
logging.info("- Error: Kernel Debug Kit checksum verification failed!")
|
||||||
|
logging.info(f"- Output: {result.stderr.decode('utf-8')}")
|
||||||
|
msg = "Kernel Debug Kit checksum verification failed, please try again.\n\nIf this continues to fail, ensure you're downloading on a stable network connection (ie. Ethernet)"
|
||||||
|
logging.info(f"- {msg}")
|
||||||
|
|
||||||
|
self.error_msg = msg
|
||||||
|
|
||||||
|
self._remove_unused_kdks()
|
||||||
|
|
||||||
|
self.success = True
|
||||||
|
|
||||||
|
|
||||||
|
class KernelDebugKitUtilities:
|
||||||
|
"""
|
||||||
|
Utilities for KDK handling
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def install_kdk_pkg(self, kdk_path: Path) -> bool:
|
||||||
|
"""
|
||||||
|
Installs provided KDK packages
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
kdk_path (Path): Path to KDK package
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if successful, False if not
|
||||||
|
"""
|
||||||
|
|
||||||
|
if os.getuid() != 0:
|
||||||
|
logging.warning("- Cannot install KDK, not running as root")
|
||||||
|
return False
|
||||||
|
|
||||||
|
logging.info(f"- Installing KDK package: {kdk_path.name}")
|
||||||
|
logging.info(f" - This may take a while...")
|
||||||
|
|
||||||
|
# TODO: Check whether enough disk space is available
|
||||||
|
|
||||||
|
result = utilities.elevated(["installer", "-pkg", kdk_path, "-target", "/"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
if result.returncode != 0:
|
||||||
|
logging.info("- Failed to install KDK:")
|
||||||
|
logging.info(result.stdout.decode('utf-8'))
|
||||||
|
if result.stderr:
|
||||||
|
logging.info(result.stderr.decode('utf-8'))
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def install_kdk_dmg(self, kdk_path: Path) -> bool:
|
||||||
|
"""
|
||||||
|
Installs provided KDK disk image
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
kdk_path (Path): Path to KDK disk image
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if successful, False if not
|
||||||
|
"""
|
||||||
|
|
||||||
|
if os.getuid() != 0:
|
||||||
|
logging.warning("- Cannot install KDK, not running as root")
|
||||||
|
return False
|
||||||
|
|
||||||
|
logging.info(f"- Extracting downloaded KDK disk image")
|
||||||
|
with tempfile.TemporaryDirectory() as mount_point:
|
||||||
|
result = subprocess.run(["hdiutil", "attach", kdk_path, "-mountpoint", mount_point, "-nobrowse"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
if result.returncode != 0:
|
||||||
|
logging.info("- Failed to mount KDK:")
|
||||||
|
logging.info(result.stdout.decode('utf-8'))
|
||||||
|
return False
|
||||||
|
|
||||||
|
kdk_pkg_path = Path(f"{mount_point}/KernelDebugKit.pkg")
|
||||||
|
|
||||||
|
if not kdk_pkg_path.exists():
|
||||||
|
logging.warning("- Failed to find KDK package in DMG, likely corrupted!!!")
|
||||||
|
self._unmount_disk_image(mount_point)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.install_kdk_pkg(kdk_pkg_path) is False:
|
||||||
|
self._unmount_disk_image(mount_point)
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._create_backup(kdk_pkg_path, Path(f"{kdk_path.parent}/{KDK_INFO_PLIST}"))
|
||||||
|
self._unmount_disk_image(mount_point)
|
||||||
|
|
||||||
|
logging.info("- Successfully installed KDK")
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _unmount_disk_image(self, mount_point) -> None:
|
||||||
|
"""
|
||||||
|
Unmounts provided disk image silently
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
mount_point (Path): Path to mount point
|
||||||
|
"""
|
||||||
|
subprocess.run(["hdiutil", "detach", mount_point], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_backup(self, kdk_path: Path, kdk_info_plist: Path) -> None:
|
||||||
|
"""
|
||||||
|
Creates a backup of the KDK
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
kdk_path (Path): Path to KDK
|
||||||
|
kdk_info_plist (Path): Path to KDK Info.plist
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not kdk_path.exists():
|
||||||
|
logging.warning("- KDK does not exist, cannot create backup")
|
||||||
|
return
|
||||||
|
if not kdk_info_plist.exists():
|
||||||
|
logging.warning("- KDK Info.plist does not exist, cannot create backup")
|
||||||
|
return
|
||||||
|
|
||||||
|
kdk_info_dict = plistlib.load(kdk_info_plist.open("rb"))
|
||||||
|
|
||||||
|
if 'version' not in kdk_info_dict or 'build' not in kdk_info_dict:
|
||||||
|
logging.warning("- Malformed KDK Info.plist provided, cannot create backup")
|
||||||
|
return
|
||||||
|
|
||||||
|
if os.getuid() != 0:
|
||||||
|
logging.warning("- Cannot create KDK backup, not running as root")
|
||||||
|
return
|
||||||
|
|
||||||
|
kdk_dst_name = f"KDK_{kdk_info_dict['version']}_{kdk_info_dict['build']}.pkg"
|
||||||
|
kdk_dst_path = Path(f"{KDK_INSTALL_PATH}/{kdk_dst_name}")
|
||||||
|
|
||||||
|
logging.info(f"- Creating backup: {kdk_dst_name}")
|
||||||
|
if kdk_dst_path.exists():
|
||||||
|
logging.info("- Backup already exists, skipping")
|
||||||
|
return
|
||||||
|
|
||||||
|
result = utilities.elevated(["cp", "-R", kdk_path, kdk_dst_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
if result.returncode != 0:
|
||||||
|
logging.info("- Failed to create KDK backup:")
|
||||||
|
logging.info(result.stdout.decode('utf-8'))
|
||||||
175
resources/logging_handler.py
Normal file
175
resources/logging_handler.py
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
class InitializeLoggingSupport:
|
||||||
|
"""
|
||||||
|
Initialize logging framework for program
|
||||||
|
|
||||||
|
Primary responsibilities:
|
||||||
|
- Determine where to store log file
|
||||||
|
- Clean log file if it's near the max file size
|
||||||
|
- Initialize logging framework configuration
|
||||||
|
- Implement custom traceback handler
|
||||||
|
- Implement error handling for file write
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
>>> from resources.logging_handler import InitializeLoggingSupport
|
||||||
|
>>> InitializeLoggingSupport()
|
||||||
|
|
||||||
|
FOR DEVELOPERS:
|
||||||
|
- Do not invoke logging until after '_attempt_initialize_logging_configuration()' has been invoked
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.log_filename: str = "OpenCore-Patcher.log"
|
||||||
|
self.log_filepath: Path = None
|
||||||
|
|
||||||
|
self.original_excepthook: sys = sys.excepthook
|
||||||
|
self.original_thread_excepthook: threading = threading.excepthook
|
||||||
|
|
||||||
|
self.max_file_size: int = 1024 * 1024 * 10 # 10 MB
|
||||||
|
self.file_size_redline: int = 1024 * 1024 * 9 # 9 MB, when to start cleaning log file
|
||||||
|
|
||||||
|
self._initialize_logging_path()
|
||||||
|
self._clean_log_file()
|
||||||
|
self._attempt_initialize_logging_configuration()
|
||||||
|
self._implement_custom_traceback_handler()
|
||||||
|
self._fix_file_permission()
|
||||||
|
|
||||||
|
|
||||||
|
def __del__(self) -> None:
|
||||||
|
self._restore_original_excepthook()
|
||||||
|
|
||||||
|
|
||||||
|
def _initialize_logging_path(self) -> None:
|
||||||
|
"""
|
||||||
|
Initialize logging framework storage path
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.log_filepath = Path(f"~/Library/Logs/{self.log_filename}").expanduser()
|
||||||
|
|
||||||
|
if not self.log_filepath.parent.exists():
|
||||||
|
# Likely in an installer environment, store in /Users/Shared
|
||||||
|
self.log_filepath = Path("/Users/Shared") / self.log_filename
|
||||||
|
|
||||||
|
print("- Initializing logging framework...")
|
||||||
|
print(f" - Log file: {self.log_filepath}")
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_log_file(self) -> None:
|
||||||
|
"""
|
||||||
|
Determine if log file should be cleaned
|
||||||
|
|
||||||
|
We check if we're near the max file size, and if so, we clean the log file
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self.log_filepath.exists():
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.log_filepath.stat().st_size < self.file_size_redline:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check if backup log file exists
|
||||||
|
backup_log_filepath = self.log_filepath.with_suffix(".old.log")
|
||||||
|
try:
|
||||||
|
if backup_log_filepath.exists():
|
||||||
|
backup_log_filepath.unlink()
|
||||||
|
|
||||||
|
# Rename current log file to backup log file
|
||||||
|
self.log_filepath.rename(backup_log_filepath)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"- Failed to clean log file: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def _fix_file_permission(self) -> None:
|
||||||
|
"""
|
||||||
|
Fixes file permission for log file
|
||||||
|
|
||||||
|
If OCLP was invoked as root, file permission will only allow root to write to log file
|
||||||
|
This in turn breaks normal OCLP execution to write to log file
|
||||||
|
"""
|
||||||
|
|
||||||
|
if os.geteuid() != 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
result = subprocess.run(["chmod", "777", self.log_filepath], capture_output=True)
|
||||||
|
if result.returncode != 0:
|
||||||
|
print(f"- Failed to fix log file permissions")
|
||||||
|
if result.stderr:
|
||||||
|
print(result.stderr.decode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
|
def _initialize_logging_configuration(self, log_to_file: bool = True) -> None:
|
||||||
|
"""
|
||||||
|
Initialize logging framework configuration
|
||||||
|
|
||||||
|
StreamHandler's format is used to mimic the default behavior of print()
|
||||||
|
While FileHandler's format is for more in-depth logging
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
log_to_file (bool): Whether to log to file or not
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.NOTSET,
|
||||||
|
format="%(asctime)s - %(filename)s (%(lineno)d): %(message)s",
|
||||||
|
handlers=[
|
||||||
|
logging.StreamHandler(stream = sys.stdout),
|
||||||
|
logging.FileHandler(self.log_filepath) if log_to_file is True else logging.NullHandler()
|
||||||
|
],
|
||||||
|
)
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
logging.getLogger().handlers[0].setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
logging.getLogger().handlers[1].maxBytes = self.max_file_size
|
||||||
|
|
||||||
|
|
||||||
|
def _attempt_initialize_logging_configuration(self) -> None:
|
||||||
|
"""
|
||||||
|
Attempt to initialize logging framework configuration
|
||||||
|
|
||||||
|
If we fail to initialize the logging framework, we will disable logging to file
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._initialize_logging_configuration()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"- Failed to initialize logging framework: {e}")
|
||||||
|
print("- Retrying without logging to file...")
|
||||||
|
self._initialize_logging_configuration(log_to_file=False)
|
||||||
|
|
||||||
|
|
||||||
|
def _implement_custom_traceback_handler(self) -> None:
|
||||||
|
"""
|
||||||
|
Reroute traceback to logging module
|
||||||
|
"""
|
||||||
|
|
||||||
|
def custom_excepthook(type, value, tb) -> None:
|
||||||
|
"""
|
||||||
|
Reroute traceback in main thread to logging module
|
||||||
|
"""
|
||||||
|
logging.error("Uncaught exception in main thread", exc_info=(type, value, tb))
|
||||||
|
|
||||||
|
def custom_thread_excepthook(args) -> None:
|
||||||
|
"""
|
||||||
|
Reroute traceback in spawned thread to logging module
|
||||||
|
"""
|
||||||
|
logging.error("Uncaught exception in spawned thread", exc_info=(args))
|
||||||
|
|
||||||
|
sys.excepthook = custom_excepthook
|
||||||
|
threading.excepthook = custom_thread_excepthook
|
||||||
|
|
||||||
|
|
||||||
|
def _restore_original_excepthook(self) -> None:
|
||||||
|
"""
|
||||||
|
Restore original traceback handlers
|
||||||
|
"""
|
||||||
|
|
||||||
|
sys.excepthook = self.original_excepthook
|
||||||
|
threading.excepthook = self.original_thread_excepthook
|
||||||
615
resources/macos_installer_handler.py
Normal file
615
resources/macos_installer_handler.py
Normal file
@@ -0,0 +1,615 @@
|
|||||||
|
# Handler for macOS installers, both local and remote
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
import plistlib
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
import enum
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from data import os_data
|
||||||
|
from resources import network_handler, utilities
|
||||||
|
|
||||||
|
|
||||||
|
APPLICATION_SEARCH_PATH: str = "/Applications"
|
||||||
|
SFR_SOFTWARE_UPDATE_PATH: str = "SFR/com_apple_MobileAsset_SFRSoftwareUpdate/com_apple_MobileAsset_SFRSoftwareUpdate.xml"
|
||||||
|
|
||||||
|
CATALOG_URL_BASE: str = "https://swscan.apple.com/content/catalogs/others/index"
|
||||||
|
CATALOG_URL_EXTENSION: str = "13-12-10.16-10.15-10.14-10.13-10.12-10.11-10.10-10.9-mountainlion-lion-snowleopard-leopard.merged-1.sucatalog"
|
||||||
|
CATALOG_URL_VERSION: str = "13"
|
||||||
|
|
||||||
|
tmp_dir = tempfile.TemporaryDirectory()
|
||||||
|
|
||||||
|
|
||||||
|
class InstallerCreation():
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def install_macOS_installer(self, download_path: str) -> bool:
|
||||||
|
"""
|
||||||
|
Installs InstallAssistant.pkg
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
download_path (str): Path to InstallAssistant.pkg
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if successful, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
logging.info("- Extracting macOS installer from InstallAssistant.pkg\n This may take some time")
|
||||||
|
args = [
|
||||||
|
"osascript",
|
||||||
|
"-e",
|
||||||
|
f'''do shell script "installer -pkg {Path(download_path)}/InstallAssistant.pkg -target /"'''
|
||||||
|
' with prompt "OpenCore Legacy Patcher needs administrator privileges to add InstallAssistant."'
|
||||||
|
" with administrator privileges"
|
||||||
|
" without altering line endings",
|
||||||
|
]
|
||||||
|
|
||||||
|
result = subprocess.run(args,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if result.returncode != 0:
|
||||||
|
logging.info("- Failed to install InstallAssistant")
|
||||||
|
logging.info(f" Error Code: {result.returncode}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
logging.info("- InstallAssistant installed")
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def generate_installer_creation_script(self, tmp_location: str, installer_path: str, disk: str) -> bool:
|
||||||
|
"""
|
||||||
|
Creates installer.sh to be piped to OCLP-Helper and run as admin
|
||||||
|
|
||||||
|
Script includes:
|
||||||
|
- Format provided disk as HFS+ GPT
|
||||||
|
- Run createinstallmedia on provided disk
|
||||||
|
|
||||||
|
Implementing this into a single installer.sh script allows us to only call
|
||||||
|
OCLP-Helper once to avoid nagging the user about permissions
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
tmp_location (str): Path to temporary directory
|
||||||
|
installer_path (str): Path to InstallAssistant.pkg
|
||||||
|
disk (str): Disk to install to
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if successful, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
additional_args = ""
|
||||||
|
script_location = Path(tmp_location) / Path("Installer.sh")
|
||||||
|
|
||||||
|
# Due to a bug in createinstallmedia, running from '/Applications' may sometimes error:
|
||||||
|
# 'Failed to extract AssetData/boot/Firmware/Manifests/InstallerBoot/*'
|
||||||
|
# This affects native Macs as well even when manually invoking createinstallmedia
|
||||||
|
|
||||||
|
# To resolve, we'll copy into our temp directory and run from there
|
||||||
|
|
||||||
|
# Create a new tmp directory
|
||||||
|
# Our current one is a disk image, thus CoW will not work
|
||||||
|
global tmp_dir
|
||||||
|
ia_tmp = tmp_dir.name
|
||||||
|
|
||||||
|
logging.info(f"Creating temporary directory at {ia_tmp}")
|
||||||
|
# Delete all files in tmp_dir
|
||||||
|
for file in Path(ia_tmp).glob("*"):
|
||||||
|
subprocess.run(["rm", "-rf", str(file)])
|
||||||
|
|
||||||
|
# Copy installer to tmp (use CoW to avoid extra disk writes)
|
||||||
|
args = ["cp", "-cR", installer_path, ia_tmp]
|
||||||
|
if utilities.check_filesystem_type() != "apfs":
|
||||||
|
# HFS+ disks do not support CoW
|
||||||
|
args[1] = "-R"
|
||||||
|
|
||||||
|
# Ensure we have enough space for the duplication
|
||||||
|
space_available = utilities.get_free_space()
|
||||||
|
space_needed = Path(ia_tmp).stat().st_size
|
||||||
|
if space_available < space_needed:
|
||||||
|
logging.info("Not enough free space to create installer.sh")
|
||||||
|
logging.info(f"{utilities.human_fmt(space_available)} available, {utilities.human_fmt(space_needed)} required")
|
||||||
|
return False
|
||||||
|
|
||||||
|
subprocess.run(args)
|
||||||
|
|
||||||
|
# Adjust installer_path to point to the copied installer
|
||||||
|
installer_path = Path(ia_tmp) / Path(Path(installer_path).name)
|
||||||
|
if not Path(installer_path).exists():
|
||||||
|
logging.info(f"Failed to copy installer to {ia_tmp}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
createinstallmedia_path = str(Path(installer_path) / Path("Contents/Resources/createinstallmedia"))
|
||||||
|
plist_path = str(Path(installer_path) / Path("Contents/Info.plist"))
|
||||||
|
if Path(plist_path).exists():
|
||||||
|
plist = plistlib.load(Path(plist_path).open("rb"))
|
||||||
|
if "DTPlatformVersion" in plist:
|
||||||
|
platform_version = plist["DTPlatformVersion"]
|
||||||
|
platform_version = platform_version.split(".")[0]
|
||||||
|
if platform_version[0] == "10":
|
||||||
|
if int(platform_version[1]) < 13:
|
||||||
|
additional_args = f" --applicationpath '{installer_path}'"
|
||||||
|
|
||||||
|
if script_location.exists():
|
||||||
|
script_location.unlink()
|
||||||
|
script_location.touch()
|
||||||
|
|
||||||
|
with script_location.open("w") as script:
|
||||||
|
script.write(f'''#!/bin/bash
|
||||||
|
erase_disk='diskutil eraseDisk HFS+ OCLP-Installer {disk}'
|
||||||
|
if $erase_disk; then
|
||||||
|
"{createinstallmedia_path}" --volume /Volumes/OCLP-Installer --nointeraction{additional_args}
|
||||||
|
fi
|
||||||
|
''')
|
||||||
|
if Path(script_location).exists():
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def list_disk_to_format(self) -> dict:
|
||||||
|
"""
|
||||||
|
List applicable disks for macOS installer creation
|
||||||
|
Only lists disks that are:
|
||||||
|
- 14GB or larger
|
||||||
|
- External
|
||||||
|
|
||||||
|
Current limitations:
|
||||||
|
- Does not support PCIe based SD cards readers
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Dictionary of disks
|
||||||
|
"""
|
||||||
|
|
||||||
|
all_disks: dict = {}
|
||||||
|
list_disks: dict = {}
|
||||||
|
|
||||||
|
# TODO: AllDisksAndPartitions is not supported in Snow Leopard and older
|
||||||
|
try:
|
||||||
|
# High Sierra and newer
|
||||||
|
disks = plistlib.loads(subprocess.run("diskutil list -plist physical".split(), stdout=subprocess.PIPE).stdout.decode().strip().encode())
|
||||||
|
except ValueError:
|
||||||
|
# Sierra and older
|
||||||
|
disks = plistlib.loads(subprocess.run("diskutil list -plist".split(), stdout=subprocess.PIPE).stdout.decode().strip().encode())
|
||||||
|
|
||||||
|
for disk in disks["AllDisksAndPartitions"]:
|
||||||
|
disk_info = plistlib.loads(subprocess.run(f"diskutil info -plist {disk['DeviceIdentifier']}".split(), stdout=subprocess.PIPE).stdout.decode().strip().encode())
|
||||||
|
try:
|
||||||
|
all_disks[disk["DeviceIdentifier"]] = {"identifier": disk_info["DeviceNode"], "name": disk_info["MediaName"], "size": disk_info["TotalSize"], "removable": disk_info["Internal"], "partitions": {}}
|
||||||
|
except KeyError:
|
||||||
|
# Avoid crashing with CDs installed
|
||||||
|
continue
|
||||||
|
|
||||||
|
for disk in all_disks:
|
||||||
|
# Strip disks that are under 14GB (15,032,385,536 bytes)
|
||||||
|
# createinstallmedia isn't great at detecting if a disk has enough space
|
||||||
|
if not any(all_disks[disk]['size'] > 15032385536 for partition in all_disks[disk]):
|
||||||
|
continue
|
||||||
|
# Strip internal disks as well (avoid user formatting their SSD/HDD)
|
||||||
|
# Ensure user doesn't format their boot drive
|
||||||
|
if not any(all_disks[disk]['removable'] is False for partition in all_disks[disk]):
|
||||||
|
continue
|
||||||
|
|
||||||
|
logging.info(f"disk {disk}: {all_disks[disk]['name']} ({utilities.human_fmt(all_disks[disk]['size'])})")
|
||||||
|
list_disks.update({
|
||||||
|
disk: {
|
||||||
|
"identifier": all_disks[disk]["identifier"],
|
||||||
|
"name": all_disks[disk]["name"],
|
||||||
|
"size": all_disks[disk]["size"],
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return list_disks
|
||||||
|
|
||||||
|
|
||||||
|
class SeedType(enum.IntEnum):
|
||||||
|
"""
|
||||||
|
Enum for catalog types
|
||||||
|
|
||||||
|
Variants:
|
||||||
|
DeveloperSeed: Developer Beta (Part of the Apple Developer Program)
|
||||||
|
PublicSeed: Public Beta
|
||||||
|
CustomerSeed: AppleSeed Program (Generally mirrors DeveloperSeed)
|
||||||
|
PublicRelease: Public Release
|
||||||
|
"""
|
||||||
|
DeveloperSeed: int = 0
|
||||||
|
PublicSeed: int = 1
|
||||||
|
CustomerSeed: int = 2
|
||||||
|
PublicRelease: int = 3
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteInstallerCatalog:
|
||||||
|
"""
|
||||||
|
Parses Apple's Software Update catalog and finds all macOS installers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, seed_override: SeedType = SeedType.PublicRelease) -> None:
|
||||||
|
|
||||||
|
self.catalog_url: str = self._construct_catalog_url(seed_override)
|
||||||
|
|
||||||
|
self.available_apps: dict = self._parse_catalog()
|
||||||
|
self.available_apps_latest: dict = self._list_newest_installers_only()
|
||||||
|
|
||||||
|
|
||||||
|
def _construct_catalog_url(self, seed_type: SeedType) -> str:
|
||||||
|
"""
|
||||||
|
Constructs the catalog URL based on the seed type
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
seed_type (SeedType): The seed type to use
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The catalog URL
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
url: str = ""
|
||||||
|
|
||||||
|
if seed_type == SeedType.DeveloperSeed:
|
||||||
|
url = f"{CATALOG_URL_BASE}-{CATALOG_URL_VERSION}seed-{CATALOG_URL_EXTENSION}"
|
||||||
|
elif seed_type == SeedType.PublicSeed:
|
||||||
|
url = f"{CATALOG_URL_BASE}-{CATALOG_URL_VERSION}beta-{CATALOG_URL_EXTENSION}"
|
||||||
|
elif seed_type == SeedType.CustomerSeed:
|
||||||
|
url = f"{CATALOG_URL_BASE}-{CATALOG_URL_VERSION}customerseed-{CATALOG_URL_EXTENSION}"
|
||||||
|
else:
|
||||||
|
url = f"{CATALOG_URL_BASE}-{CATALOG_URL_EXTENSION}"
|
||||||
|
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
def _fetch_catalog(self) -> dict:
|
||||||
|
"""
|
||||||
|
Fetches the catalog from Apple's servers
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: The catalog as a dictionary
|
||||||
|
"""
|
||||||
|
|
||||||
|
catalog: dict = {}
|
||||||
|
|
||||||
|
if network_handler.NetworkUtilities(self.catalog_url).verify_network_connection() is False:
|
||||||
|
return catalog
|
||||||
|
|
||||||
|
try:
|
||||||
|
catalog = plistlib.loads(network_handler.SESSION.get(self.catalog_url).content)
|
||||||
|
except plistlib.InvalidFileException:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return catalog
|
||||||
|
|
||||||
|
def _parse_catalog(self) -> dict:
|
||||||
|
"""
|
||||||
|
Parses the catalog and returns a dictionary of available installers
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Dictionary of available installers
|
||||||
|
"""
|
||||||
|
available_apps: dict = {}
|
||||||
|
|
||||||
|
catalog: dict = self._fetch_catalog()
|
||||||
|
if not catalog:
|
||||||
|
return available_apps
|
||||||
|
|
||||||
|
if "Products" not in catalog:
|
||||||
|
return available_apps
|
||||||
|
|
||||||
|
for product in catalog["Products"]:
|
||||||
|
if "ExtendedMetaInfo" not in catalog["Products"][product]:
|
||||||
|
continue
|
||||||
|
if "Packages" not in catalog["Products"][product]:
|
||||||
|
continue
|
||||||
|
if "InstallAssistantPackageIdentifiers" not in catalog["Products"][product]["ExtendedMetaInfo"]:
|
||||||
|
continue
|
||||||
|
if "SharedSupport" not in catalog["Products"][product]["ExtendedMetaInfo"]["InstallAssistantPackageIdentifiers"]:
|
||||||
|
continue
|
||||||
|
if "BuildManifest" not in catalog["Products"][product]["ExtendedMetaInfo"]["InstallAssistantPackageIdentifiers"]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for bm_package in catalog["Products"][product]["Packages"]:
|
||||||
|
if "Info.plist" not in bm_package["URL"]:
|
||||||
|
continue
|
||||||
|
if "InstallInfo.plist" in bm_package["URL"]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
build_plist = plistlib.loads(network_handler.SESSION.get(bm_package["URL"]).content)
|
||||||
|
except plistlib.InvalidFileException:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if "MobileAssetProperties" not in build_plist:
|
||||||
|
continue
|
||||||
|
if "SupportedDeviceModels" not in build_plist["MobileAssetProperties"]:
|
||||||
|
continue
|
||||||
|
if "OSVersion" not in build_plist["MobileAssetProperties"]:
|
||||||
|
continue
|
||||||
|
if "Build" not in build_plist["MobileAssetProperties"]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Ensure Apple Silicon specific Installers are not listed
|
||||||
|
if "VMM-x86_64" not in build_plist["MobileAssetProperties"]["SupportedDeviceModels"]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
version = build_plist["MobileAssetProperties"]["OSVersion"]
|
||||||
|
build = build_plist["MobileAssetProperties"]["Build"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
catalog_url = build_plist["MobileAssetProperties"]["BridgeVersionInfo"]["CatalogURL"]
|
||||||
|
if "beta" in catalog_url:
|
||||||
|
catalog_url = "PublicSeed"
|
||||||
|
elif "customerseed" in catalog_url:
|
||||||
|
catalog_url = "CustomerSeed"
|
||||||
|
elif "seed" in catalog_url:
|
||||||
|
catalog_url = "DeveloperSeed"
|
||||||
|
else:
|
||||||
|
catalog_url = "Public"
|
||||||
|
except KeyError:
|
||||||
|
# Assume Public if no catalog URL is found
|
||||||
|
catalog_url = "Public"
|
||||||
|
|
||||||
|
download_link = None
|
||||||
|
integrity = None
|
||||||
|
size = None
|
||||||
|
|
||||||
|
for ia_package in catalog["Products"][product]["Packages"]:
|
||||||
|
if "InstallAssistant.pkg" not in ia_package["URL"]:
|
||||||
|
continue
|
||||||
|
if "URL" not in ia_package:
|
||||||
|
continue
|
||||||
|
if "IntegrityDataURL" not in ia_package:
|
||||||
|
continue
|
||||||
|
if "Size" not in ia_package:
|
||||||
|
size = 0
|
||||||
|
|
||||||
|
download_link = ia_package["URL"]
|
||||||
|
integrity = ia_package["IntegrityDataURL"]
|
||||||
|
size = ia_package["Size"]
|
||||||
|
|
||||||
|
|
||||||
|
if any([version, build, download_link, size, integrity]) is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
available_apps.update({
|
||||||
|
product: {
|
||||||
|
"Version": version,
|
||||||
|
"Build": build,
|
||||||
|
"Link": download_link,
|
||||||
|
"Size": size,
|
||||||
|
"integrity": integrity,
|
||||||
|
"Source": "Apple Inc.",
|
||||||
|
"Variant": catalog_url,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
available_apps = {k: v for k, v in sorted(available_apps.items(), key=lambda x: x[1]['Version'])}
|
||||||
|
return available_apps
|
||||||
|
|
||||||
|
|
||||||
|
def _list_newest_installers_only(self) -> dict:
|
||||||
|
"""
|
||||||
|
Returns a dictionary of the newest macOS installers only.
|
||||||
|
Primarily used to avoid overwhelming the user with a list of
|
||||||
|
installers that are not the newest version.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: A dictionary of the newest macOS installers only.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.available_apps is None:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
newest_apps: dict = self.available_apps.copy()
|
||||||
|
supported_versions = ["10.13", "10.14", "10.15", "11", "12", "13"]
|
||||||
|
|
||||||
|
|
||||||
|
for version in supported_versions:
|
||||||
|
remote_version_minor = 0
|
||||||
|
remote_version_security = 0
|
||||||
|
os_builds = []
|
||||||
|
|
||||||
|
# First determine the largest version
|
||||||
|
for ia in newest_apps:
|
||||||
|
if newest_apps[ia]["Version"].startswith(version):
|
||||||
|
if newest_apps[ia]["Variant"] not in ["CustomerSeed", "DeveloperSeed", "PublicSeed"]:
|
||||||
|
remote_version = newest_apps[ia]["Version"].split(".")
|
||||||
|
if remote_version[0] == "10":
|
||||||
|
remote_version.pop(0)
|
||||||
|
remote_version.pop(0)
|
||||||
|
else:
|
||||||
|
remote_version.pop(0)
|
||||||
|
if int(remote_version[0]) > remote_version_minor:
|
||||||
|
remote_version_minor = int(remote_version[0])
|
||||||
|
remote_version_security = 0 # Reset as new minor version found
|
||||||
|
if len(remote_version) > 1:
|
||||||
|
if int(remote_version[1]) > remote_version_security:
|
||||||
|
remote_version_security = int(remote_version[1])
|
||||||
|
|
||||||
|
# Now remove all versions that are not the largest
|
||||||
|
for ia in list(newest_apps):
|
||||||
|
# Don't use Beta builds to determine latest version
|
||||||
|
if newest_apps[ia]["Variant"] in ["CustomerSeed", "DeveloperSeed", "PublicSeed"]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if newest_apps[ia]["Version"].startswith(version):
|
||||||
|
remote_version = newest_apps[ia]["Version"].split(".")
|
||||||
|
if remote_version[0] == "10":
|
||||||
|
remote_version.pop(0)
|
||||||
|
remote_version.pop(0)
|
||||||
|
else:
|
||||||
|
remote_version.pop(0)
|
||||||
|
if int(remote_version[0]) < remote_version_minor:
|
||||||
|
newest_apps.pop(ia)
|
||||||
|
continue
|
||||||
|
if int(remote_version[0]) == remote_version_minor:
|
||||||
|
if len(remote_version) > 1:
|
||||||
|
if int(remote_version[1]) < remote_version_security:
|
||||||
|
newest_apps.pop(ia)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if remote_version_security > 0:
|
||||||
|
newest_apps.pop(ia)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Remove duplicate builds
|
||||||
|
# ex. macOS 12.5.1 has 2 builds in the Software Update Catalog
|
||||||
|
# ref: https://twitter.com/classicii_mrmac/status/1560357471654379522
|
||||||
|
if newest_apps[ia]["Build"] in os_builds:
|
||||||
|
newest_apps.pop(ia)
|
||||||
|
continue
|
||||||
|
|
||||||
|
os_builds.append(newest_apps[ia]["Build"])
|
||||||
|
|
||||||
|
# Final passthrough
|
||||||
|
# Remove Betas if there's a non-beta version available
|
||||||
|
for ia in list(newest_apps):
|
||||||
|
if newest_apps[ia]["Variant"] in ["CustomerSeed", "DeveloperSeed", "PublicSeed"]:
|
||||||
|
for ia2 in newest_apps:
|
||||||
|
if newest_apps[ia2]["Version"].split(".")[0] == newest_apps[ia]["Version"].split(".")[0] and newest_apps[ia2]["Variant"] not in ["CustomerSeed", "DeveloperSeed", "PublicSeed"]:
|
||||||
|
newest_apps.pop(ia)
|
||||||
|
break
|
||||||
|
|
||||||
|
return newest_apps
|
||||||
|
|
||||||
|
|
||||||
|
class LocalInstallerCatalog:
|
||||||
|
"""
|
||||||
|
Finds all macOS installers on the local machine.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.available_apps: dict = self._list_local_macOS_installers()
|
||||||
|
|
||||||
|
|
||||||
|
def _list_local_macOS_installers(self) -> dict:
|
||||||
|
"""
|
||||||
|
Searches for macOS installers in /Applications
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: A dictionary of macOS installers found on the local machine.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
"Install macOS Big Sur Beta.app": {
|
||||||
|
"Short Name": "Big Sur Beta",
|
||||||
|
"Version": "11.0",
|
||||||
|
"Build": "20A5343i",
|
||||||
|
"Path": "/Applications/Install macOS Big Sur Beta.app",
|
||||||
|
},
|
||||||
|
etc...
|
||||||
|
"""
|
||||||
|
|
||||||
|
application_list: dict = {}
|
||||||
|
|
||||||
|
for application in Path(APPLICATION_SEARCH_PATH).iterdir():
|
||||||
|
# Certain Microsoft Applications have strange permissions disabling us from reading them
|
||||||
|
try:
|
||||||
|
if not (Path(APPLICATION_SEARCH_PATH) / Path(application) / Path("Contents/Resources/createinstallmedia")).exists():
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not (Path(APPLICATION_SEARCH_PATH) / Path(application) / Path("Contents/Info.plist")).exists():
|
||||||
|
continue
|
||||||
|
except PermissionError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
application_info_plist = plistlib.load((Path(APPLICATION_SEARCH_PATH) / Path(application) / Path("Contents/Info.plist")).open("rb"))
|
||||||
|
except (PermissionError, TypeError, plistlib.InvalidFileException):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if "DTPlatformVersion" not in application_info_plist:
|
||||||
|
continue
|
||||||
|
if "CFBundleDisplayName" not in application_info_plist:
|
||||||
|
continue
|
||||||
|
|
||||||
|
app_version = application_info_plist["DTPlatformVersion"]
|
||||||
|
clean_name = application_info_plist["CFBundleDisplayName"]
|
||||||
|
|
||||||
|
if "DTSDKBuild" in application_info_plist:
|
||||||
|
app_sdk = application_info_plist["DTSDKBuild"]
|
||||||
|
else:
|
||||||
|
app_sdk = "Unknown"
|
||||||
|
|
||||||
|
# app_version can sometimes report GM instead of the actual version
|
||||||
|
# This is a workaround to get the actual version
|
||||||
|
if app_version.startswith("GM"):
|
||||||
|
try:
|
||||||
|
app_version = int(app_sdk[:2])
|
||||||
|
if app_version < 20:
|
||||||
|
app_version = f"10.{app_version - 4}"
|
||||||
|
else:
|
||||||
|
app_version = f"{app_version - 9}.0"
|
||||||
|
except ValueError:
|
||||||
|
app_version = "Unknown"
|
||||||
|
|
||||||
|
# Check if App Version is High Sierra or newer
|
||||||
|
if os_data.os_conversion.os_to_kernel(app_version) < os_data.os_data.high_sierra:
|
||||||
|
continue
|
||||||
|
|
||||||
|
results = self._parse_sharedsupport_version(Path(APPLICATION_SEARCH_PATH) / Path(application)/ Path("Contents/SharedSupport/SharedSupport.dmg"))
|
||||||
|
if results[0] is not None:
|
||||||
|
app_sdk = results[0]
|
||||||
|
if results[1] is not None:
|
||||||
|
app_version = results[1]
|
||||||
|
|
||||||
|
application_list.update({
|
||||||
|
application: {
|
||||||
|
"Short Name": clean_name,
|
||||||
|
"Version": app_version,
|
||||||
|
"Build": app_sdk,
|
||||||
|
"Path": application,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
# Sort Applications by version
|
||||||
|
application_list = {k: v for k, v in sorted(application_list.items(), key=lambda item: item[1]["Version"])}
|
||||||
|
return application_list
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_sharedsupport_version(self, sharedsupport_path: Path) -> tuple:
|
||||||
|
"""
|
||||||
|
Determine true version of macOS installer by parsing SharedSupport.dmg
|
||||||
|
This is required due to Info.plist reporting the application version, not the OS version
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
sharedsupport_path (Path): Path to SharedSupport.dmg
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: Tuple containing the build and OS version
|
||||||
|
"""
|
||||||
|
|
||||||
|
detected_build: str = None
|
||||||
|
detected_os: str = None
|
||||||
|
|
||||||
|
if not sharedsupport_path.exists():
|
||||||
|
return (detected_build, detected_os)
|
||||||
|
|
||||||
|
if not sharedsupport_path.name.endswith(".dmg"):
|
||||||
|
return (detected_build, detected_os)
|
||||||
|
|
||||||
|
|
||||||
|
# Create temporary directory to extract SharedSupport.dmg to
|
||||||
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
|
|
||||||
|
output = subprocess.run(
|
||||||
|
[
|
||||||
|
"hdiutil", "attach", "-noverify", sharedsupport_path,
|
||||||
|
"-mountpoint", tmpdir,
|
||||||
|
"-nobrowse",
|
||||||
|
],
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||||
|
)
|
||||||
|
|
||||||
|
if output.returncode != 0:
|
||||||
|
return (detected_build, detected_os)
|
||||||
|
|
||||||
|
ss_info = Path(SFR_SOFTWARE_UPDATE_PATH)
|
||||||
|
|
||||||
|
if Path(tmpdir / ss_info).exists():
|
||||||
|
plist = plistlib.load((tmpdir / ss_info).open("rb"))
|
||||||
|
if "Assets" in plist:
|
||||||
|
if "Build" in plist["Assets"][0]:
|
||||||
|
detected_build = plist["Assets"][0]["Build"]
|
||||||
|
if "OSVersion" in plist["Assets"][0]:
|
||||||
|
detected_os = plist["Assets"][0]["OSVersion"]
|
||||||
|
|
||||||
|
# Unmount SharedSupport.dmg
|
||||||
|
subprocess.run(["hdiutil", "detach", tmpdir], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
|
||||||
|
return (detected_build, detected_os)
|
||||||
@@ -1,41 +1,70 @@
|
|||||||
# Copyright (C) 2020-2022, Dhinak G, Mykola Grymalyuk
|
# Copyright (C) 2020-2022, Dhinak G, Mykola Grymalyuk
|
||||||
|
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
|
||||||
import time
|
import time
|
||||||
|
import logging
|
||||||
import threading
|
import threading
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from resources.gui import gui_main
|
||||||
|
from resources import (
|
||||||
|
constants,
|
||||||
|
utilities,
|
||||||
|
device_probe,
|
||||||
|
os_probe,
|
||||||
|
defaults,
|
||||||
|
arguments,
|
||||||
|
reroute_payloads,
|
||||||
|
commit_info,
|
||||||
|
logging_handler
|
||||||
|
)
|
||||||
|
|
||||||
from resources import cli_menu, constants, utilities, device_probe, os_probe, defaults, arguments, install, tui_helpers, reroute_payloads, commit_info
|
|
||||||
from resources.build import build
|
|
||||||
from data import model_array
|
|
||||||
|
|
||||||
class OpenCoreLegacyPatcher:
|
class OpenCoreLegacyPatcher:
|
||||||
def __init__(self, launch_gui=False):
|
"""
|
||||||
print("- Loading...")
|
Initial entry point for starting OpenCore Legacy Patcher
|
||||||
self.constants = constants.Constants()
|
"""
|
||||||
self.constants.wxpython_variant = launch_gui
|
|
||||||
self.generate_base_data()
|
|
||||||
if utilities.check_cli_args() is None:
|
|
||||||
if launch_gui is True:
|
|
||||||
utilities.disable_cls()
|
|
||||||
from resources.gui import gui_main
|
|
||||||
gui_main.wx_python_gui(self.constants).main_menu(None)
|
|
||||||
else:
|
|
||||||
self.main_menu()
|
|
||||||
|
|
||||||
def generate_base_data(self):
|
def __init__(self) -> None:
|
||||||
self.constants.detected_os = os_probe.detect_kernel_major()
|
logging_handler.InitializeLoggingSupport()
|
||||||
self.constants.detected_os_minor = os_probe.detect_kernel_minor()
|
|
||||||
self.constants.detected_os_build = os_probe.detect_os_build()
|
self.constants: constants.Constants = constants.Constants()
|
||||||
self.constants.detected_os_version = os_probe.detect_os_version()
|
|
||||||
|
self.constants.wxpython_variant: bool = True
|
||||||
|
|
||||||
|
logging.info(f"- Loading OpenCore Legacy Patcher v{self.constants.patcher_version}...")
|
||||||
|
|
||||||
|
self._generate_base_data()
|
||||||
|
|
||||||
|
if utilities.check_cli_args() is None:
|
||||||
|
gui_main.wx_python_gui(self.constants).main_menu(None)
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_base_data(self) -> None:
|
||||||
|
"""
|
||||||
|
Generate base data required for the patcher to run
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Generate OS data
|
||||||
|
os_data = os_probe.OSProbe()
|
||||||
|
self.constants.detected_os = os_data.detect_kernel_major()
|
||||||
|
self.constants.detected_os_minor = os_data.detect_kernel_minor()
|
||||||
|
self.constants.detected_os_build = os_data.detect_os_build()
|
||||||
|
self.constants.detected_os_version = os_data.detect_os_version()
|
||||||
|
|
||||||
|
# Generate computer data
|
||||||
self.constants.computer = device_probe.Computer.probe()
|
self.constants.computer = device_probe.Computer.probe()
|
||||||
self.constants.recovery_status = utilities.check_recovery()
|
|
||||||
self.computer = self.constants.computer
|
self.computer = self.constants.computer
|
||||||
self.constants.booted_oc_disk = utilities.find_disk_off_uuid(utilities.clean_device_path(self.computer.opencore_path))
|
self.constants.booted_oc_disk = utilities.find_disk_off_uuid(utilities.clean_device_path(self.computer.opencore_path))
|
||||||
if self.constants.computer.firmware_vendor:
|
if self.constants.computer.firmware_vendor:
|
||||||
if self.constants.computer.firmware_vendor != "Apple":
|
if self.constants.computer.firmware_vendor != "Apple":
|
||||||
self.constants.host_is_hackintosh = True
|
self.constants.host_is_hackintosh = True
|
||||||
|
|
||||||
|
# Generate environment data
|
||||||
|
self.constants.recovery_status = utilities.check_recovery()
|
||||||
|
utilities.disable_cls()
|
||||||
|
|
||||||
|
# Generate binary data
|
||||||
launcher_script = None
|
launcher_script = None
|
||||||
launcher_binary = sys.executable
|
launcher_binary = sys.executable
|
||||||
if "python" in launcher_binary:
|
if "python" in launcher_binary:
|
||||||
@@ -45,83 +74,40 @@ class OpenCoreLegacyPatcher:
|
|||||||
launcher_script = launcher_script.replace("/resources/main.py", "/OpenCore-Patcher-GUI.command")
|
launcher_script = launcher_script.replace("/resources/main.py", "/OpenCore-Patcher-GUI.command")
|
||||||
self.constants.launcher_binary = launcher_binary
|
self.constants.launcher_binary = launcher_binary
|
||||||
self.constants.launcher_script = launcher_script
|
self.constants.launcher_script = launcher_script
|
||||||
self.constants.unpack_thread = threading.Thread(target=reroute_payloads.reroute_payloads(self.constants).setup_tmp_disk_image)
|
|
||||||
self.constants.unpack_thread.start()
|
|
||||||
self.constants.commit_info = commit_info.commit_info(self.constants.launcher_binary).generate_commit_info()
|
|
||||||
|
|
||||||
# Now that we have commit info, update nightly link
|
# Initialize working directory
|
||||||
|
self.constants.unpack_thread = threading.Thread(target=reroute_payloads.RoutePayloadDiskImage, args=(self.constants,))
|
||||||
|
self.constants.unpack_thread.start()
|
||||||
|
|
||||||
|
# Generate commit info
|
||||||
|
self.constants.commit_info = commit_info.ParseCommitInfo(self.constants.launcher_binary).generate_commit_info()
|
||||||
if self.constants.commit_info[0] not in ["Running from source", "Built from source"]:
|
if self.constants.commit_info[0] not in ["Running from source", "Built from source"]:
|
||||||
|
# Now that we have commit info, update nightly link
|
||||||
branch = self.constants.commit_info[0]
|
branch = self.constants.commit_info[0]
|
||||||
branch = branch.replace("refs/heads/", "")
|
branch = branch.replace("refs/heads/", "")
|
||||||
self.constants.installer_pkg_url_nightly = self.constants.installer_pkg_url_nightly.replace("main", branch)
|
self.constants.installer_pkg_url_nightly = self.constants.installer_pkg_url_nightly.replace("main", branch)
|
||||||
|
|
||||||
defaults.generate_defaults(self.computer.real_model, True, self.constants)
|
# Generate defaults
|
||||||
|
defaults.GenerateDefaults(self.computer.real_model, True, self.constants)
|
||||||
|
|
||||||
if utilities.check_cli_args() is not None:
|
if utilities.check_cli_args() is None:
|
||||||
print("- Detected arguments, switching to CLI mode")
|
logging.info(f"- No arguments present, loading {'GUI' if self.constants.wxpython_variant is True else 'TUI'} mode")
|
||||||
self.constants.gui_mode = True # Assumes no user interaction is required
|
return
|
||||||
ignore_args = ["--auto_patch", "--gui_patch", "--gui_unpatch"]
|
|
||||||
if not any(x in sys.argv for x in ignore_args):
|
|
||||||
self.constants.current_path = Path.cwd()
|
|
||||||
self.constants.cli_mode = True
|
|
||||||
if getattr(sys, "frozen", False) and hasattr(sys, "_MEIPASS"):
|
|
||||||
print("- Rerouting payloads location")
|
|
||||||
self.constants.payload_path = sys._MEIPASS / Path("payloads")
|
|
||||||
ignore_args = ignore_args.pop(0)
|
|
||||||
if not any(x in sys.argv for x in ignore_args):
|
|
||||||
while self.constants.unpack_thread.is_alive():
|
|
||||||
time.sleep(0.1)
|
|
||||||
arguments.arguments().parse_arguments(self.constants)
|
|
||||||
else:
|
|
||||||
print(f"- No arguments present, loading {'GUI' if self.constants.wxpython_variant is True else 'TUI'} mode")
|
|
||||||
|
|
||||||
|
logging.info("- Detected arguments, switching to CLI mode")
|
||||||
|
self.constants.gui_mode = True # Assumes no user interaction is required
|
||||||
|
|
||||||
def main_menu(self):
|
ignore_args = ["--auto_patch", "--gui_patch", "--gui_unpatch"]
|
||||||
response = None
|
if not any(x in sys.argv for x in ignore_args):
|
||||||
while not (response and response == -1):
|
self.constants.current_path = Path.cwd()
|
||||||
title = [
|
self.constants.cli_mode = True
|
||||||
f"OpenCore Legacy Patcher v{self.constants.patcher_version}",
|
if getattr(sys, "frozen", False) and hasattr(sys, "_MEIPASS"):
|
||||||
f"Selected Model: {self.constants.custom_model or self.computer.real_model}",
|
logging.info("- Rerouting payloads location")
|
||||||
]
|
self.constants.payload_path = sys._MEIPASS / Path("payloads")
|
||||||
|
ignore_args = ignore_args.pop(0)
|
||||||
|
|
||||||
if (self.constants.custom_model or self.computer.real_model) not in model_array.SupportedSMBIOS and self.constants.allow_oc_everywhere is False:
|
if not any(x in sys.argv for x in ignore_args):
|
||||||
in_between = [
|
while self.constants.unpack_thread.is_alive():
|
||||||
"Your model is not supported by this patcher for running unsupported OSes!",
|
time.sleep(0.1)
|
||||||
"",
|
|
||||||
'If you plan to create the USB for another machine, please select the \n"Change Model" option in the menu.',
|
|
||||||
"",
|
|
||||||
'If you want to run OCLP on a native Mac, please toggle \n"Allow OpenCore on native Models" in settings',
|
|
||||||
]
|
|
||||||
elif not self.constants.custom_model and self.computer.real_model == "iMac7,1" and "SSE4.1" not in self.computer.cpu.flags:
|
|
||||||
in_between = [
|
|
||||||
"Your model requires a CPU upgrade to a CPU supporting SSE4.1+ to be supported by this patcher!",
|
|
||||||
"",
|
|
||||||
f'If you plan to create the USB for another {self.computer.real_model} with SSE4.1+, please select the "Change Model" option in the menu.',
|
|
||||||
]
|
|
||||||
elif self.constants.custom_model == "iMac7,1":
|
|
||||||
in_between = ["This model is supported", "However please ensure the CPU has been upgraded to support SSE4.1+"]
|
|
||||||
else:
|
|
||||||
in_between = ["This model is supported"]
|
|
||||||
|
|
||||||
menu = tui_helpers.TUIMenu(title, "Please select an option: ", in_between=in_between, auto_number=True, top_level=True)
|
arguments.arguments(self.constants)
|
||||||
|
|
||||||
options = (
|
|
||||||
[["Build OpenCore", build.build_opencore(self.constants.custom_model or self.constants.computer.real_model, self.constants).build_opencore]]
|
|
||||||
if ((self.constants.custom_model or self.computer.real_model) in model_array.SupportedSMBIOS) or self.constants.allow_oc_everywhere is True
|
|
||||||
else []
|
|
||||||
) + [
|
|
||||||
["Install OpenCore to USB/internal drive", install.tui_disk_installation(self.constants).copy_efi],
|
|
||||||
["Post-Install Volume Patch", cli_menu.MenuOptions(self.constants.custom_model or self.computer.real_model, self.constants).PatchVolume],
|
|
||||||
["Change Model", cli_menu.MenuOptions(self.constants.custom_model or self.computer.real_model, self.constants).change_model],
|
|
||||||
["Patcher Settings", cli_menu.MenuOptions(self.constants.custom_model or self.computer.real_model, self.constants).patcher_settings],
|
|
||||||
["Installer Creation", cli_menu.MenuOptions(self.constants.custom_model or self.computer.real_model, self.constants).download_macOS],
|
|
||||||
["Credits", cli_menu.MenuOptions(self.constants.custom_model or self.computer.real_model, self.constants).credits],
|
|
||||||
]
|
|
||||||
|
|
||||||
for option in options:
|
|
||||||
menu.add_menu_option(option[0], function=option[1])
|
|
||||||
|
|
||||||
response = menu.start()
|
|
||||||
|
|
||||||
if getattr(sys, "frozen", False) and self.constants.recovery_status is False:
|
|
||||||
subprocess.run("""osascript -e 'tell application "Terminal" to close first window' & exit""", shell=True)
|
|
||||||
|
|||||||
388
resources/network_handler.py
Normal file
388
resources/network_handler.py
Normal file
@@ -0,0 +1,388 @@
|
|||||||
|
# Library dedicated to Network Handling tasks including downloading files
|
||||||
|
# Primarily based around the DownloadObject class, which provides a simple
|
||||||
|
# object for libraries to query download progress and status
|
||||||
|
# Copyright (C) 2023, Mykola Grymalyuk
|
||||||
|
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import threading
|
||||||
|
import logging
|
||||||
|
import enum
|
||||||
|
import hashlib
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from resources import utilities
|
||||||
|
|
||||||
|
SESSION = requests.Session()
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadStatus(enum.Enum):
|
||||||
|
"""
|
||||||
|
Enum for download status
|
||||||
|
"""
|
||||||
|
|
||||||
|
INACTIVE: str = "Inactive"
|
||||||
|
DOWNLOADING: str = "Downloading"
|
||||||
|
ERROR: str = "Error"
|
||||||
|
COMPLETE: str = "Complete"
|
||||||
|
|
||||||
|
|
||||||
|
class NetworkUtilities:
|
||||||
|
"""
|
||||||
|
Utilities for network related tasks, primarily used for downloading files
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, url: str = None) -> None:
|
||||||
|
self.url: str = url
|
||||||
|
|
||||||
|
if self.url is None:
|
||||||
|
self.url = "https://github.com"
|
||||||
|
|
||||||
|
|
||||||
|
def verify_network_connection(self) -> bool:
|
||||||
|
"""
|
||||||
|
Verifies that the network is available
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if network is available, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
requests.head(self.url, timeout=5, allow_redirects=True)
|
||||||
|
return True
|
||||||
|
except (
|
||||||
|
requests.exceptions.Timeout,
|
||||||
|
requests.exceptions.TooManyRedirects,
|
||||||
|
requests.exceptions.ConnectionError,
|
||||||
|
requests.exceptions.HTTPError
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_link(self) -> bool:
|
||||||
|
"""
|
||||||
|
Check for 404 error
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if link is valid, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
response = SESSION.head(self.url, timeout=5, allow_redirects=True)
|
||||||
|
if response.status_code == 404:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
except (
|
||||||
|
requests.exceptions.Timeout,
|
||||||
|
requests.exceptions.TooManyRedirects,
|
||||||
|
requests.exceptions.ConnectionError,
|
||||||
|
requests.exceptions.HTTPError
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadObject:
|
||||||
|
"""
|
||||||
|
Object for downloading files from the network
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
>>> download_object = DownloadObject(url, path)
|
||||||
|
>>> download_object.download(display_progress=True)
|
||||||
|
|
||||||
|
>>> if download_object.is_active():
|
||||||
|
>>> print(download_object.get_percent())
|
||||||
|
|
||||||
|
>>> if not download_object.download_complete:
|
||||||
|
>>> print("Download failed")
|
||||||
|
|
||||||
|
>>> print("Download complete"")
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, url: str, path: str) -> None:
|
||||||
|
self.url: str = url
|
||||||
|
self.status: str = DownloadStatus.INACTIVE
|
||||||
|
self.error_msg: str = ""
|
||||||
|
self.filename: str = self._get_filename()
|
||||||
|
|
||||||
|
self.filepath: Path = Path(path)
|
||||||
|
|
||||||
|
self.total_file_size: float = 0.0
|
||||||
|
self.downloaded_file_size: float = 0.0
|
||||||
|
self.start_time: float = time.time()
|
||||||
|
|
||||||
|
self.error: bool = False
|
||||||
|
self.should_stop: bool = False
|
||||||
|
self.download_complete: bool = False
|
||||||
|
self.has_network: bool = NetworkUtilities(self.url).verify_network_connection()
|
||||||
|
|
||||||
|
self.active_thread: threading.Thread = None
|
||||||
|
|
||||||
|
self.should_checksum: bool = False
|
||||||
|
|
||||||
|
self.checksum = None
|
||||||
|
self._checksum_storage: hash = None
|
||||||
|
|
||||||
|
if self.has_network:
|
||||||
|
self._populate_file_size()
|
||||||
|
|
||||||
|
|
||||||
|
def __del__(self) -> None:
|
||||||
|
self.stop()
|
||||||
|
|
||||||
|
|
||||||
|
def download(self, display_progress: bool = False, spawn_thread: bool = True, verify_checksum: bool = False) -> None:
|
||||||
|
"""
|
||||||
|
Download the file
|
||||||
|
|
||||||
|
Spawns a thread to download the file, so that the main thread can continue
|
||||||
|
Note sleep is disabled while the download is active
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
display_progress (bool): Display progress in console
|
||||||
|
spawn_thread (bool): Spawn a thread to download the file, otherwise download in the current thread
|
||||||
|
verify_checksum (bool): Calculate checksum of downloaded file if True
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.status = DownloadStatus.DOWNLOADING
|
||||||
|
logging.info(f"- Starting download: {self.filename}")
|
||||||
|
if spawn_thread:
|
||||||
|
if self.active_thread:
|
||||||
|
logging.error("- Download already in progress")
|
||||||
|
return
|
||||||
|
self.should_checksum = verify_checksum
|
||||||
|
self.active_thread = threading.Thread(target=self._download, args=(display_progress,))
|
||||||
|
self.active_thread.start()
|
||||||
|
return
|
||||||
|
|
||||||
|
self.should_checksum = verify_checksum
|
||||||
|
self._download(display_progress)
|
||||||
|
|
||||||
|
|
||||||
|
def download_simple(self, verify_checksum: bool = False) -> str or bool:
|
||||||
|
"""
|
||||||
|
Alternative to download(), mimics utilities.py's old download_file() function
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
verify_checksum (bool): Return checksum of downloaded file if True
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
If verify_checksum is True, returns the checksum of the downloaded file
|
||||||
|
Otherwise, returns True if download was successful, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
if verify_checksum:
|
||||||
|
self.should_checksum = True
|
||||||
|
self.checksum = hashlib.sha256()
|
||||||
|
|
||||||
|
self.download(spawn_thread=False)
|
||||||
|
|
||||||
|
if not self.download_complete:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return self.checksum.hexdigest() if self.checksum else True
|
||||||
|
|
||||||
|
|
||||||
|
def _get_filename(self) -> str:
|
||||||
|
"""
|
||||||
|
Get the filename from the URL
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Filename
|
||||||
|
"""
|
||||||
|
|
||||||
|
return Path(self.url).name
|
||||||
|
|
||||||
|
|
||||||
|
def _populate_file_size(self) -> None:
|
||||||
|
"""
|
||||||
|
Get the file size of the file to be downloaded
|
||||||
|
|
||||||
|
If unable to get file size, set to zero
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = SESSION.head(self.url, allow_redirects=True, timeout=5)
|
||||||
|
if 'Content-Length' in result.headers:
|
||||||
|
self.total_file_size = float(result.headers['Content-Length'])
|
||||||
|
else:
|
||||||
|
raise Exception("Content-Length missing from headers")
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"- Error determining file size {self.url}: {str(e)}")
|
||||||
|
logging.error("- Assuming file size is 0")
|
||||||
|
self.total_file_size = 0.0
|
||||||
|
|
||||||
|
|
||||||
|
def _update_checksum(self, chunk: bytes) -> None:
|
||||||
|
"""
|
||||||
|
Update checksum with new chunk
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
chunk (bytes): Chunk to update checksum with
|
||||||
|
"""
|
||||||
|
self._checksum_storage.update(chunk)
|
||||||
|
|
||||||
|
|
||||||
|
def _prepare_working_directory(self, path: Path) -> bool:
|
||||||
|
"""
|
||||||
|
Validates working enviroment, including free space and removing existing files
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
path (str): Path to the file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if successful, False if not
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
if Path(path).exists():
|
||||||
|
logging.info(f"- Deleting existing file: {path}")
|
||||||
|
Path(path).unlink()
|
||||||
|
return True
|
||||||
|
|
||||||
|
if not Path(path).parent.exists():
|
||||||
|
logging.info(f"- Creating directory: {Path(path).parent}")
|
||||||
|
Path(path).parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
available_space = utilities.get_free_space(Path(path).parent)
|
||||||
|
if self.total_file_size > available_space:
|
||||||
|
msg = f"- Not enough free space to download {self.filename}, need {utilities.human_fmt(self.total_file_size)}, have {utilities.human_fmt(available_space)}"
|
||||||
|
logging.error(msg)
|
||||||
|
raise Exception(msg)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.error = True
|
||||||
|
self.error_msg = str(e)
|
||||||
|
self.status = DownloadStatus.ERROR
|
||||||
|
logging.error(f"- Error preparing working directory {path}: {self.error_msg}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _download(self, display_progress: bool = False) -> None:
|
||||||
|
"""
|
||||||
|
Download the file
|
||||||
|
|
||||||
|
Libraries should invoke download() instead of this method
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
display_progress (bool): Display progress in console
|
||||||
|
"""
|
||||||
|
|
||||||
|
utilities.disable_sleep_while_running()
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not self.has_network:
|
||||||
|
raise Exception("No network connection")
|
||||||
|
|
||||||
|
if self._prepare_working_directory(self.filepath) is False:
|
||||||
|
raise Exception(self.error_msg)
|
||||||
|
|
||||||
|
response = SESSION.get(self.url, stream=True, timeout=10)
|
||||||
|
|
||||||
|
with open(self.filepath, 'wb') as file:
|
||||||
|
for i, chunk in enumerate(response.iter_content(1024 * 1024 * 4)):
|
||||||
|
if self.should_stop:
|
||||||
|
raise Exception("Download stopped")
|
||||||
|
if chunk:
|
||||||
|
file.write(chunk)
|
||||||
|
self.downloaded_file_size += len(chunk)
|
||||||
|
if self.should_checksum:
|
||||||
|
self._update_checksum(chunk)
|
||||||
|
if display_progress and i % 100:
|
||||||
|
# Don't use logging here, as we'll be spamming the log file
|
||||||
|
if self.total_file_size == 0.0:
|
||||||
|
print(f"- Downloaded {utilities.human_fmt(self.downloaded_file_size)} of {self.filename}")
|
||||||
|
else:
|
||||||
|
print(f"- Downloaded {self.get_percent():.2f}% of {self.filename} ({utilities.human_fmt(self.get_speed())}/s) ({self.get_time_remaining():.2f} seconds remaining)")
|
||||||
|
self.download_complete = True
|
||||||
|
logging.info(f"- Download complete: {self.filename}")
|
||||||
|
logging.info("- Stats:")
|
||||||
|
logging.info(f"- Downloaded size: {utilities.human_fmt(self.downloaded_file_size)}")
|
||||||
|
logging.info(f"- Time elapsed: {(time.time() - self.start_time):.2f} seconds")
|
||||||
|
logging.info(f"- Speed: {utilities.human_fmt(self.downloaded_file_size / (time.time() - self.start_time))}/s")
|
||||||
|
logging.info(f"- Location: {self.filepath}")
|
||||||
|
except Exception as e:
|
||||||
|
self.error = True
|
||||||
|
self.error_msg = str(e)
|
||||||
|
self.status = DownloadStatus.ERROR
|
||||||
|
logging.error(f"- Error downloading {self.url}: {self.error_msg}")
|
||||||
|
|
||||||
|
self.status = DownloadStatus.COMPLETE
|
||||||
|
utilities.enable_sleep_after_running()
|
||||||
|
|
||||||
|
|
||||||
|
def get_percent(self) -> float:
|
||||||
|
"""
|
||||||
|
Query the download percent
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: The download percent, or -1 if unknown
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.total_file_size == 0.0:
|
||||||
|
logging.error("- File size is 0, cannot calculate percent")
|
||||||
|
return -1
|
||||||
|
return self.downloaded_file_size / self.total_file_size * 100
|
||||||
|
|
||||||
|
|
||||||
|
def get_speed(self) -> float:
|
||||||
|
"""
|
||||||
|
Query the download speed
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: The download speed in bytes per second
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.downloaded_file_size / (time.time() - self.start_time)
|
||||||
|
|
||||||
|
|
||||||
|
def get_time_remaining(self) -> float:
|
||||||
|
"""
|
||||||
|
Query the time remaining for the download
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: The time remaining in seconds, or -1 if unknown
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.total_file_size == 0.0:
|
||||||
|
logging.error("- File size is 0, cannot calculate time remaining")
|
||||||
|
return -1
|
||||||
|
return (self.total_file_size - self.downloaded_file_size) / self.get_speed()
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_size(self) -> float:
|
||||||
|
"""
|
||||||
|
Query the file size of the file to be downloaded
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: The file size in bytes, or 0.0 if unknown
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.total_file_size
|
||||||
|
|
||||||
|
|
||||||
|
def is_active(self) -> bool:
|
||||||
|
"""
|
||||||
|
Query if the download is active
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
boolean: True if active, False if completed, failed, stopped, or inactive
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.status == DownloadStatus.DOWNLOADING:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def stop(self) -> None:
|
||||||
|
"""
|
||||||
|
Stop the download
|
||||||
|
|
||||||
|
If the download is active, this function will hold the thread until stopped
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.should_stop = True
|
||||||
|
if self.active_thread:
|
||||||
|
while self.active_thread.is_alive():
|
||||||
|
time.sleep(1)
|
||||||
@@ -5,31 +5,77 @@ import subprocess
|
|||||||
import plistlib
|
import plistlib
|
||||||
|
|
||||||
|
|
||||||
def detect_kernel_major():
|
class OSProbe:
|
||||||
# Return Major Kernel Version
|
"""
|
||||||
# Example Output: 21 (integer)
|
Library for querying OS information specific to macOS
|
||||||
return int(platform.uname().release.partition(".")[0])
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.uname_data = platform.uname()
|
||||||
|
|
||||||
|
|
||||||
def detect_kernel_minor():
|
def detect_kernel_major(self) -> int:
|
||||||
# Return Minor Kernel Version
|
"""
|
||||||
# Example Output: 1 (integer)
|
Detect the booted major kernel version
|
||||||
return int(platform.uname().release.partition(".")[2].partition(".")[0])
|
|
||||||
|
Returns:
|
||||||
|
int: Major kernel version (ex. 21, from 21.1.0)
|
||||||
|
"""
|
||||||
|
|
||||||
|
return int(self.uname_data.release.partition(".")[0])
|
||||||
|
|
||||||
|
|
||||||
def detect_os_version():
|
def detect_kernel_minor(self) -> int:
|
||||||
# Return OS version
|
"""
|
||||||
# Example Output: 12.0 (string)
|
Detect the booted minor kernel version
|
||||||
return subprocess.run("sw_vers -productVersion".split(), stdout=subprocess.PIPE).stdout.decode().strip()
|
|
||||||
|
Returns:
|
||||||
|
int: Minor kernel version (ex. 1, from 21.1.0)
|
||||||
|
"""
|
||||||
|
|
||||||
|
return int(self.uname_data.release.partition(".")[2].partition(".")[0])
|
||||||
|
|
||||||
|
|
||||||
def detect_os_build():
|
def detect_os_version(self) -> str:
|
||||||
# Return OS build
|
"""
|
||||||
# Example Output: 21A5522h (string)
|
Detect the booted OS version
|
||||||
|
|
||||||
# With macOS 13.2, Apple implemented the Rapid Security Response system which
|
Returns:
|
||||||
# will change the reported build to the RSR version and not the original host
|
str: OS version (ex. 12.0)
|
||||||
# To get the proper versions:
|
"""
|
||||||
# - Host: /System/Library/CoreServices/SystemVersion.plist
|
|
||||||
# - RSR: /System/Volumes/Preboot/Cryptexes/OS/System/Library/CoreServices/SystemVersion.plist
|
result = subprocess.run(["sw_vers", "-productVersion"], stdout=subprocess.PIPE)
|
||||||
return plistlib.load(open("/System/Library/CoreServices/SystemVersion.plist", "rb"))["ProductBuildVersion"]
|
if result.returncode != 0:
|
||||||
|
raise RuntimeError("Failed to detect OS version")
|
||||||
|
|
||||||
|
return result.stdout.decode().strip()
|
||||||
|
|
||||||
|
|
||||||
|
def detect_os_build(self, rsr: bool = False) -> str:
|
||||||
|
"""
|
||||||
|
Detect the booted OS build
|
||||||
|
|
||||||
|
Implementation note:
|
||||||
|
With macOS 13.2, Apple implemented the Rapid Security Response system which
|
||||||
|
will change the reported build to the RSR version and not the original host
|
||||||
|
|
||||||
|
To get the proper versions:
|
||||||
|
- Host: /System/Library/CoreServices/SystemVersion.plist
|
||||||
|
- RSR: /System/Volumes/Preboot/Cryptexes/OS/System/Library/CoreServices/SystemVersion.plist
|
||||||
|
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
rsr (bool): Whether to use the RSR version of the build
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: OS build (ex. 21A5522h)
|
||||||
|
"""
|
||||||
|
|
||||||
|
file_path = "/System/Library/CoreServices/SystemVersion.plist"
|
||||||
|
if rsr is True:
|
||||||
|
file_path = f"/System/Volumes/Preboot/Cryptexes/OS{file_path}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
return plistlib.load(open(file_path, "rb"))["ProductBuildVersion"]
|
||||||
|
except Exception as e:
|
||||||
|
raise RuntimeError(f"Failed to detect OS build: {e}")
|
||||||
|
|||||||
@@ -7,22 +7,33 @@ from pathlib import Path
|
|||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
import atexit
|
import atexit
|
||||||
|
import logging
|
||||||
|
|
||||||
class reroute_payloads:
|
from resources import constants
|
||||||
def __init__(self, constants):
|
|
||||||
self.constants = constants
|
class RoutePayloadDiskImage:
|
||||||
|
|
||||||
|
def __init__(self, global_constants: constants.Constants) -> None:
|
||||||
|
self.constants: constants.Constants = global_constants
|
||||||
|
|
||||||
|
self._setup_tmp_disk_image()
|
||||||
|
|
||||||
|
|
||||||
|
def _setup_tmp_disk_image(self) -> None:
|
||||||
|
"""
|
||||||
|
Initialize temp directory and mount payloads.dmg
|
||||||
|
Create overlay for patcher to write to
|
||||||
|
|
||||||
|
Currently only applicable for GUI variant and not running from source
|
||||||
|
"""
|
||||||
|
|
||||||
def setup_tmp_disk_image(self):
|
|
||||||
# Create a temp directory to mount the payloads.dmg
|
|
||||||
# Then reroute r/w to this new temp directory
|
|
||||||
# Currently only applicable for GUI variant
|
|
||||||
if self.constants.wxpython_variant is True and not self.constants.launcher_script:
|
if self.constants.wxpython_variant is True and not self.constants.launcher_script:
|
||||||
print("- Running in Binary GUI mode, switching to tmp directory")
|
logging.info("- Running in Binary GUI mode, switching to tmp directory")
|
||||||
self.temp_dir = tempfile.TemporaryDirectory()
|
self.temp_dir = tempfile.TemporaryDirectory()
|
||||||
print(f"- New payloads location: {self.temp_dir.name}")
|
logging.info(f"- New payloads location: {self.temp_dir.name}")
|
||||||
print("- Creating payloads directory")
|
logging.info("- Creating payloads directory")
|
||||||
Path(self.temp_dir.name / Path("payloads")).mkdir(parents=True, exist_ok=True)
|
Path(self.temp_dir.name / Path("payloads")).mkdir(parents=True, exist_ok=True)
|
||||||
self.unmount_active_dmgs(unmount_all_active=False)
|
self._unmount_active_dmgs(unmount_all_active=False)
|
||||||
output = subprocess.run(
|
output = subprocess.run(
|
||||||
[
|
[
|
||||||
"hdiutil", "attach", "-noverify", f"{self.constants.payload_path}.dmg",
|
"hdiutil", "attach", "-noverify", f"{self.constants.payload_path}.dmg",
|
||||||
@@ -34,19 +45,28 @@ class reroute_payloads:
|
|||||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||||
)
|
)
|
||||||
if output.returncode == 0:
|
if output.returncode == 0:
|
||||||
print("- Mounted payloads.dmg")
|
logging.info("- Mounted payloads.dmg")
|
||||||
self.constants.current_path = Path(self.temp_dir.name)
|
self.constants.current_path = Path(self.temp_dir.name)
|
||||||
self.constants.payload_path = Path(self.temp_dir.name) / Path("payloads")
|
self.constants.payload_path = Path(self.temp_dir.name) / Path("payloads")
|
||||||
atexit.register(self.unmount_active_dmgs, unmount_all_active=False)
|
atexit.register(self._unmount_active_dmgs, unmount_all_active=False)
|
||||||
else:
|
else:
|
||||||
print("- Failed to mount payloads.dmg")
|
logging.info("- Failed to mount payloads.dmg")
|
||||||
print(f"Output: {output.stdout.decode()}")
|
logging.info(f"Output: {output.stdout.decode()}")
|
||||||
print(f"Return Code: {output.returncode}")
|
logging.info(f"Return Code: {output.returncode}")
|
||||||
|
|
||||||
|
|
||||||
|
def _unmount_active_dmgs(self, unmount_all_active=True) -> None:
|
||||||
|
"""
|
||||||
|
Unmounts disk images associated with OCLP
|
||||||
|
|
||||||
|
Finds all DMGs that are mounted, and forcefully unmount them
|
||||||
|
If our disk image was previously mounted, we need to unmount it to use again
|
||||||
|
This can happen if we crash during a previous secession, however 'atexit' class should hopefully avoid this
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
unmount_all_active (bool): If True, unmount all active DMGs, otherwise only unmount our own DMG
|
||||||
|
"""
|
||||||
|
|
||||||
def unmount_active_dmgs(self, unmount_all_active=True):
|
|
||||||
# Find all DMGs that are mounted, and forcefully unmount them
|
|
||||||
# If our disk image was previously mounted, we need to unmount it to use again
|
|
||||||
# This can happen if we crash during a previous secession, however 'atexit' class should hopefully avoid this
|
|
||||||
dmg_info = subprocess.run(["hdiutil", "info", "-plist"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
dmg_info = subprocess.run(["hdiutil", "info", "-plist"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
dmg_info = plistlib.loads(dmg_info.stdout)
|
dmg_info = plistlib.loads(dmg_info.stdout)
|
||||||
|
|
||||||
@@ -56,13 +76,13 @@ class reroute_payloads:
|
|||||||
# Check that only our personal payloads.dmg is unmounted
|
# Check that only our personal payloads.dmg is unmounted
|
||||||
if "shadow-path" in image:
|
if "shadow-path" in image:
|
||||||
if self.temp_dir.name in image["shadow-path"]:
|
if self.temp_dir.name in image["shadow-path"]:
|
||||||
print("- Unmounting personal payloads.dmg")
|
logging.info("- Unmounting personal payloads.dmg")
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
["hdiutil", "detach", image["system-entities"][0]["dev-entry"], "-force"],
|
["hdiutil", "detach", image["system-entities"][0]["dev-entry"], "-force"],
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
print(f"- Unmounting payloads.dmg at: {image['system-entities'][0]['dev-entry']}")
|
logging.info(f"- Unmounting payloads.dmg at: {image['system-entities'][0]['dev-entry']}")
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
["hdiutil", "detach", image["system-entities"][0]["dev-entry"], "-force"],
|
["hdiutil", "detach", image["system-entities"][0]["dev-entry"], "-force"],
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||||
|
|||||||
155
resources/run.py
155
resources/run.py
@@ -1,155 +0,0 @@
|
|||||||
# Module for running processes with real time output
|
|
||||||
# Written by CorpNewt
|
|
||||||
# Source: https://github.com/corpnewt/pymodules/blob/884c3de15b6a2570afde52fe8a14a3e946ffb18a/run.py
|
|
||||||
|
|
||||||
import sys, subprocess, time, threading, shlex
|
|
||||||
try:
|
|
||||||
from Queue import Queue, Empty
|
|
||||||
except:
|
|
||||||
from queue import Queue, Empty
|
|
||||||
|
|
||||||
ON_POSIX = 'posix' in sys.builtin_module_names
|
|
||||||
|
|
||||||
class Run:
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
return
|
|
||||||
|
|
||||||
def _read_output(self, pipe, q):
|
|
||||||
try:
|
|
||||||
for line in iter(lambda: pipe.read(1), b''):
|
|
||||||
q.put(line)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
pipe.close()
|
|
||||||
|
|
||||||
def _create_thread(self, output):
|
|
||||||
# Creates a new queue and thread object to watch based on the output pipe sent
|
|
||||||
q = Queue()
|
|
||||||
t = threading.Thread(target=self._read_output, args=(output, q))
|
|
||||||
t.daemon = True
|
|
||||||
return (q,t)
|
|
||||||
|
|
||||||
def _stream_output(self, comm, shell = False):
|
|
||||||
output = error = ""
|
|
||||||
p = None
|
|
||||||
try:
|
|
||||||
if shell and type(comm) is list:
|
|
||||||
comm = " ".join(shlex.quote(x) for x in comm)
|
|
||||||
if not shell and type(comm) is str:
|
|
||||||
comm = shlex.split(comm)
|
|
||||||
p = subprocess.Popen(comm, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=0, universal_newlines=True, close_fds=ON_POSIX)
|
|
||||||
# Setup the stdout thread/queue
|
|
||||||
q,t = self._create_thread(p.stdout)
|
|
||||||
qe,te = self._create_thread(p.stderr)
|
|
||||||
# Start both threads
|
|
||||||
t.start()
|
|
||||||
te.start()
|
|
||||||
|
|
||||||
while True:
|
|
||||||
c = z = ""
|
|
||||||
try: c = q.get_nowait()
|
|
||||||
except Empty: pass
|
|
||||||
else:
|
|
||||||
sys.stdout.write(c)
|
|
||||||
output += c
|
|
||||||
sys.stdout.flush()
|
|
||||||
try: z = qe.get_nowait()
|
|
||||||
except Empty: pass
|
|
||||||
else:
|
|
||||||
sys.stderr.write(z)
|
|
||||||
error += z
|
|
||||||
sys.stderr.flush()
|
|
||||||
if not c==z=="": continue # Keep going until empty
|
|
||||||
# No output - see if still running
|
|
||||||
p.poll()
|
|
||||||
if p.returncode != None:
|
|
||||||
# Subprocess ended
|
|
||||||
break
|
|
||||||
# No output, but subprocess still running - stall for 20ms
|
|
||||||
time.sleep(0.02)
|
|
||||||
|
|
||||||
o, e = p.communicate()
|
|
||||||
return (output+o, error+e, p.returncode)
|
|
||||||
except:
|
|
||||||
if p:
|
|
||||||
try: o, e = p.communicate()
|
|
||||||
except: o = e = ""
|
|
||||||
return (output+o, error+e, p.returncode)
|
|
||||||
return ("", "Command not found!", 1)
|
|
||||||
|
|
||||||
def _decode(self, value, encoding="utf-8", errors="ignore"):
|
|
||||||
# Helper method to only decode if bytes type
|
|
||||||
if sys.version_info >= (3,0) and isinstance(value, bytes):
|
|
||||||
return value.decode(encoding,errors)
|
|
||||||
return value
|
|
||||||
|
|
||||||
def _run_command(self, comm, shell = False):
|
|
||||||
c = None
|
|
||||||
try:
|
|
||||||
if shell and type(comm) is list:
|
|
||||||
comm = " ".join(shlex.quote(x) for x in comm)
|
|
||||||
if not shell and type(comm) is str:
|
|
||||||
comm = shlex.split(comm)
|
|
||||||
p = subprocess.Popen(comm, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
||||||
c = p.communicate()
|
|
||||||
except:
|
|
||||||
if c == None:
|
|
||||||
return ("", "Command not found!", 1)
|
|
||||||
return (self._decode(c[0]), self._decode(c[1]), p.returncode)
|
|
||||||
|
|
||||||
def run(self, command_list, leave_on_fail = False):
|
|
||||||
# Command list should be an array of dicts
|
|
||||||
if type(command_list) is dict:
|
|
||||||
# We only have one command
|
|
||||||
command_list = [command_list]
|
|
||||||
output_list = []
|
|
||||||
for comm in command_list:
|
|
||||||
args = comm.get("args", [])
|
|
||||||
shell = comm.get("shell", False)
|
|
||||||
stream = comm.get("stream", False)
|
|
||||||
sudo = comm.get("sudo", False)
|
|
||||||
stdout = comm.get("stdout", False)
|
|
||||||
stderr = comm.get("stderr", False)
|
|
||||||
mess = comm.get("message", None)
|
|
||||||
show = comm.get("show", False)
|
|
||||||
|
|
||||||
if not mess == None:
|
|
||||||
print(mess)
|
|
||||||
|
|
||||||
if not len(args):
|
|
||||||
# nothing to process
|
|
||||||
continue
|
|
||||||
if sudo:
|
|
||||||
# Check if we have sudo
|
|
||||||
out = self._run_command(["which", "sudo"])
|
|
||||||
if "sudo" in out[0]:
|
|
||||||
# Can sudo
|
|
||||||
if type(args) is list:
|
|
||||||
args.insert(0, out[0].replace("\n", "")) # add to start of list
|
|
||||||
elif type(args) is str:
|
|
||||||
args = out[0].replace("\n", "") + " " + args # add to start of string
|
|
||||||
|
|
||||||
if show:
|
|
||||||
print(" ".join(args))
|
|
||||||
|
|
||||||
if stream:
|
|
||||||
# Stream it!
|
|
||||||
out = self._stream_output(args, shell)
|
|
||||||
else:
|
|
||||||
# Just run and gather output
|
|
||||||
out = self._run_command(args, shell)
|
|
||||||
if stdout and len(out[0]):
|
|
||||||
print(out[0])
|
|
||||||
if stderr and len(out[1]):
|
|
||||||
print(out[1])
|
|
||||||
# Append output
|
|
||||||
output_list.append(out)
|
|
||||||
# Check for errors
|
|
||||||
if leave_on_fail and out[2] != 0:
|
|
||||||
# Got an error - leave
|
|
||||||
break
|
|
||||||
if len(output_list) == 1:
|
|
||||||
# We only ran one command - just return that output
|
|
||||||
return output_list[0]
|
|
||||||
return output_list
|
|
||||||
@@ -37,17 +37,18 @@ import shutil
|
|||||||
import subprocess
|
import subprocess
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import logging
|
||||||
|
|
||||||
from resources import constants, utilities, kdk_handler
|
from resources import constants, utilities, kdk_handler
|
||||||
from resources.sys_patch import sys_patch_download, sys_patch_detect, sys_patch_auto, sys_patch_helpers
|
from resources.sys_patch import sys_patch_detect, sys_patch_auto, sys_patch_helpers
|
||||||
|
|
||||||
from data import os_data
|
from data import os_data
|
||||||
|
|
||||||
|
|
||||||
class PatchSysVolume:
|
class PatchSysVolume:
|
||||||
def __init__(self, model, versions, hardware_details=None):
|
def __init__(self, model: str, global_constants: constants.Constants, hardware_details: list = None):
|
||||||
self.model = model
|
self.model = model
|
||||||
self.constants: constants.Constants() = versions
|
self.constants: constants.Constants = global_constants
|
||||||
self.computer = self.constants.computer
|
self.computer = self.constants.computer
|
||||||
self.root_mount_path = None
|
self.root_mount_path = None
|
||||||
self.root_supports_snapshot = utilities.check_if_root_is_apfs_snapshot()
|
self.root_supports_snapshot = utilities.check_if_root_is_apfs_snapshot()
|
||||||
@@ -60,9 +61,9 @@ class PatchSysVolume:
|
|||||||
# GUI will detect hardware patches before starting PatchSysVolume()
|
# GUI will detect hardware patches before starting PatchSysVolume()
|
||||||
# However the TUI will not, so allow for data to be passed in manually avoiding multiple calls
|
# However the TUI will not, so allow for data to be passed in manually avoiding multiple calls
|
||||||
if hardware_details is None:
|
if hardware_details is None:
|
||||||
hardware_details = sys_patch_detect.detect_root_patch(self.computer.real_model, self.constants).detect_patch_set()
|
hardware_details = sys_patch_detect.DetectRootPatch(self.computer.real_model, self.constants).detect_patch_set()
|
||||||
self.hardware_details = hardware_details
|
self.hardware_details = hardware_details
|
||||||
self.init_pathing(custom_root_mount_path=None, custom_data_mount_path=None)
|
self._init_pathing(custom_root_mount_path=None, custom_data_mount_path=None)
|
||||||
|
|
||||||
self.skip_root_kmutil_requirement = self.hardware_details["Settings: Supports Auxiliary Cache"]
|
self.skip_root_kmutil_requirement = self.hardware_details["Settings: Supports Auxiliary Cache"]
|
||||||
|
|
||||||
@@ -71,7 +72,15 @@ class PatchSysVolume:
|
|||||||
if Path(self.constants.payload_local_binaries_root_path).exists():
|
if Path(self.constants.payload_local_binaries_root_path).exists():
|
||||||
shutil.rmtree(self.constants.payload_local_binaries_root_path)
|
shutil.rmtree(self.constants.payload_local_binaries_root_path)
|
||||||
|
|
||||||
def init_pathing(self, custom_root_mount_path=None, custom_data_mount_path=None):
|
def _init_pathing(self, custom_root_mount_path: Path = None, custom_data_mount_path: Path = None):
|
||||||
|
"""
|
||||||
|
Initializes the pathing for root volume patching
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
custom_root_mount_path (Path): Custom path to mount the root volume
|
||||||
|
custom_data_mount_path (Path): Custom path to mount the data volume
|
||||||
|
|
||||||
|
"""
|
||||||
if custom_root_mount_path and custom_data_mount_path:
|
if custom_root_mount_path and custom_data_mount_path:
|
||||||
self.mount_location = custom_root_mount_path
|
self.mount_location = custom_root_mount_path
|
||||||
self.data_mount_location = custom_data_mount_path
|
self.data_mount_location = custom_data_mount_path
|
||||||
@@ -82,50 +91,84 @@ class PatchSysVolume:
|
|||||||
else:
|
else:
|
||||||
self.mount_location = ""
|
self.mount_location = ""
|
||||||
self.mount_location_data = ""
|
self.mount_location_data = ""
|
||||||
|
|
||||||
self.mount_extensions = f"{self.mount_location}/System/Library/Extensions"
|
self.mount_extensions = f"{self.mount_location}/System/Library/Extensions"
|
||||||
self.mount_application_support = f"{self.mount_location_data}/Library/Application Support"
|
self.mount_application_support = f"{self.mount_location_data}/Library/Application Support"
|
||||||
|
|
||||||
|
|
||||||
def mount_root_vol(self):
|
def _mount_root_vol(self):
|
||||||
# Returns boolean if Root Volume is available
|
# Returns boolean if Root Volume is available
|
||||||
self.root_mount_path = utilities.get_disk_path()
|
self.root_mount_path = utilities.get_disk_path()
|
||||||
if self.root_mount_path.startswith("disk"):
|
if self.root_mount_path.startswith("disk"):
|
||||||
print(f"- Found Root Volume at: {self.root_mount_path}")
|
logging.info(f"- Found Root Volume at: {self.root_mount_path}")
|
||||||
if Path(self.mount_extensions).exists():
|
if Path(self.mount_extensions).exists():
|
||||||
print("- Root Volume is already mounted")
|
logging.info("- Root Volume is already mounted")
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
if self.root_supports_snapshot is True:
|
if self.root_supports_snapshot is True:
|
||||||
print("- Mounting APFS Snapshot as writable")
|
logging.info("- Mounting APFS Snapshot as writable")
|
||||||
result = utilities.elevated(["mount", "-o", "nobrowse", "-t", "apfs", f"/dev/{self.root_mount_path}", self.mount_location], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
result = utilities.elevated(["mount", "-o", "nobrowse", "-t", "apfs", f"/dev/{self.root_mount_path}", self.mount_location], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
if result.returncode == 0:
|
if result.returncode == 0:
|
||||||
print(f"- Mounted APFS Snapshot as writable at: {self.mount_location}")
|
logging.info(f"- Mounted APFS Snapshot as writable at: {self.mount_location}")
|
||||||
if Path(self.mount_extensions).exists():
|
if Path(self.mount_extensions).exists():
|
||||||
print("- Successfully mounted the Root Volume")
|
logging.info("- Successfully mounted the Root Volume")
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
print("- Root Volume appears to have unmounted unexpectedly")
|
logging.info("- Root Volume appears to have unmounted unexpectedly")
|
||||||
else:
|
else:
|
||||||
print("- Unable to mount APFS Snapshot as writable")
|
logging.info("- Unable to mount APFS Snapshot as writable")
|
||||||
print("Reason for mount failure:")
|
logging.info("Reason for mount failure:")
|
||||||
print(result.stdout.decode().strip())
|
logging.info(result.stdout.decode().strip())
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def merge_kdk_with_root(self, save_hid_cs=False):
|
|
||||||
|
def _merge_kdk_with_root(self, save_hid_cs=False):
|
||||||
if self.skip_root_kmutil_requirement is True:
|
if self.skip_root_kmutil_requirement is True:
|
||||||
return
|
return
|
||||||
if self.constants.detected_os < os_data.os_data.ventura:
|
if self.constants.detected_os < os_data.os_data.ventura:
|
||||||
return
|
return
|
||||||
|
|
||||||
downloaded_kdk = None
|
if self.constants.kdk_download_path.exists():
|
||||||
kdk_path = sys_patch_helpers.sys_patch_helpers(self.constants).determine_kdk_present(match_closest=False)
|
if kdk_handler.KernelDebugKitUtilities().install_kdk_dmg(self.constants.kdk_download_path) is False:
|
||||||
if kdk_path is None:
|
logging.info("Failed to install KDK")
|
||||||
if not self.constants.kdk_download_path.exists():
|
raise Exception("Failed to install KDK")
|
||||||
kdk_result, error_msg, downloaded_kdk = kdk_handler.kernel_debug_kit_handler(self.constants).download_kdk(self.constants.detected_os_version, self.constants.detected_os_build)
|
|
||||||
if kdk_result is False:
|
kdk_obj = kdk_handler.KernelDebugKitObject(self.constants, self.constants.detected_os_build, self.constants.detected_os_version)
|
||||||
raise Exception(f"Unable to download KDK: {error_msg}")
|
if kdk_obj.success is False:
|
||||||
sys_patch_helpers.sys_patch_helpers(self.constants).install_kdk()
|
logging.info(f"Unable to get KDK info: {kdk_obj.error_msg}")
|
||||||
kdk_path = sys_patch_helpers.sys_patch_helpers(self.constants).determine_kdk_present(match_closest=True, override_build=downloaded_kdk)
|
raise Exception(f"Unable to get KDK info: {kdk_obj.error_msg}")
|
||||||
|
|
||||||
|
if kdk_obj.kdk_already_installed is False:
|
||||||
|
|
||||||
|
kdk_download_obj = kdk_obj.retrieve_download()
|
||||||
|
if not kdk_download_obj:
|
||||||
|
logging.info(f"Could not retrieve KDK: {kdk_obj.error_msg}")
|
||||||
|
|
||||||
|
# Hold thread until download is complete
|
||||||
|
kdk_download_obj.download(spawn_thread=False)
|
||||||
|
|
||||||
|
if kdk_download_obj.download_complete is False:
|
||||||
|
error_msg = kdk_download_obj.error_msg
|
||||||
|
logging.info(f"Could not download KDK: {error_msg}")
|
||||||
|
raise Exception(f"Could not download KDK: {error_msg}")
|
||||||
|
|
||||||
|
if kdk_obj.validate_kdk_checksum() is False:
|
||||||
|
logging.info(f"KDK checksum validation failed: {kdk_obj.error_msg}")
|
||||||
|
raise Exception(f"KDK checksum validation failed: {kdk_obj.error_msg}")
|
||||||
|
|
||||||
|
kdk_handler.KernelDebugKitUtilities().install_kdk_dmg(self.constants.kdk_download_path)
|
||||||
|
# re-init kdk_obj to get the new kdk_installed_path
|
||||||
|
kdk_obj = kdk_handler.KernelDebugKitObject(self.constants, self.constants.detected_os_build, self.constants.detected_os_version)
|
||||||
|
if kdk_obj.success is False:
|
||||||
|
logging.info(f"Unable to get KDK info: {kdk_obj.error_msg}")
|
||||||
|
raise Exception(f"Unable to get KDK info: {kdk_obj.error_msg}")
|
||||||
|
|
||||||
|
if kdk_obj.kdk_already_installed is False:
|
||||||
|
# We shouldn't get here, but just in case
|
||||||
|
logging.warning(f"KDK was not installed, but should have been: {kdk_obj.error_msg}")
|
||||||
|
raise Exception("KDK was not installed, but should have been: {kdk_obj.error_msg}")
|
||||||
|
|
||||||
|
kdk_path = Path(kdk_obj.kdk_installed_path) if kdk_obj.kdk_installed_path != "" else None
|
||||||
|
|
||||||
oclp_plist = Path("/System/Library/CoreServices/OpenCore-Legacy-Patcher.plist")
|
oclp_plist = Path("/System/Library/CoreServices/OpenCore-Legacy-Patcher.plist")
|
||||||
if (Path(self.mount_location) / Path("System/Library/Extensions/System.kext/PlugIns/Libkern.kext/Libkern")).exists() and oclp_plist.exists():
|
if (Path(self.mount_location) / Path("System/Library/Extensions/System.kext/PlugIns/Libkern.kext/Libkern")).exists() and oclp_plist.exists():
|
||||||
@@ -135,25 +178,25 @@ class PatchSysVolume:
|
|||||||
oclp_plist_data = plistlib.load(open(oclp_plist, "rb"))
|
oclp_plist_data = plistlib.load(open(oclp_plist, "rb"))
|
||||||
if "Kernel Debug Kit Used" in oclp_plist_data:
|
if "Kernel Debug Kit Used" in oclp_plist_data:
|
||||||
if oclp_plist_data["Kernel Debug Kit Used"] == str(kdk_path):
|
if oclp_plist_data["Kernel Debug Kit Used"] == str(kdk_path):
|
||||||
print("- Matching KDK determined to already be merged, skipping")
|
logging.info("- Matching KDK determined to already be merged, skipping")
|
||||||
return
|
return
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if kdk_path is None:
|
if kdk_path is None:
|
||||||
print(f"- Unable to find Kernel Debug Kit: {downloaded_kdk}")
|
logging.info(f"- Unable to find Kernel Debug Kit")
|
||||||
raise Exception("Unable to find Kernel Debug Kit")
|
raise Exception("Unable to find Kernel Debug Kit")
|
||||||
self.kdk_path = kdk_path
|
self.kdk_path = kdk_path
|
||||||
print(f"- Found KDK at: {kdk_path}")
|
logging.info(f"- Found KDK at: {kdk_path}")
|
||||||
|
|
||||||
# Due to some IOHIDFamily oddities, we need to ensure their CodeSignature is retained
|
# Due to some IOHIDFamily oddities, we need to ensure their CodeSignature is retained
|
||||||
cs_path = Path(self.mount_location) / Path("System/Library/Extensions/IOHIDFamily.kext/Contents/PlugIns/IOHIDEventDriver.kext/Contents/_CodeSignature")
|
cs_path = Path(self.mount_location) / Path("System/Library/Extensions/IOHIDFamily.kext/Contents/PlugIns/IOHIDEventDriver.kext/Contents/_CodeSignature")
|
||||||
if save_hid_cs is True and cs_path.exists():
|
if save_hid_cs is True and cs_path.exists():
|
||||||
print("- Backing up IOHIDEventDriver CodeSignature")
|
logging.info("- Backing up IOHIDEventDriver CodeSignature")
|
||||||
# Note it's a folder, not a file
|
# Note it's a folder, not a file
|
||||||
utilities.elevated(["cp", "-r", cs_path, f"{self.constants.payload_path}/IOHIDEventDriver_CodeSignature.bak"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
utilities.elevated(["cp", "-r", cs_path, f"{self.constants.payload_path}/IOHIDEventDriver_CodeSignature.bak"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
|
||||||
print("- Merging KDK with Root Volume")
|
logging.info(f"- Merging KDK with Root Volume: {kdk_path.name}")
|
||||||
utilities.elevated(
|
utilities.elevated(
|
||||||
# Only merge '/System/Library/Extensions'
|
# Only merge '/System/Library/Extensions'
|
||||||
# 'Kernels' and 'KernelSupport' is wasted space for root patching (we don't care above dev kernels)
|
# 'Kernels' and 'KernelSupport' is wasted space for root patching (we don't care above dev kernels)
|
||||||
@@ -163,52 +206,52 @@ class PatchSysVolume:
|
|||||||
# During reversing, we found that kmutil uses this path to determine whether the KDK was successfully merged
|
# During reversing, we found that kmutil uses this path to determine whether the KDK was successfully merged
|
||||||
# Best to verify now before we cause any damage
|
# Best to verify now before we cause any damage
|
||||||
if not (Path(self.mount_location) / Path("System/Library/Extensions/System.kext/PlugIns/Libkern.kext/Libkern")).exists():
|
if not (Path(self.mount_location) / Path("System/Library/Extensions/System.kext/PlugIns/Libkern.kext/Libkern")).exists():
|
||||||
print("- Failed to merge KDK with Root Volume")
|
logging.info("- Failed to merge KDK with Root Volume")
|
||||||
raise Exception("Failed to merge KDK with Root Volume")
|
raise Exception("Failed to merge KDK with Root Volume")
|
||||||
print("- Successfully merged KDK with Root Volume")
|
logging.info("- Successfully merged KDK with Root Volume")
|
||||||
|
|
||||||
# Restore IOHIDEventDriver CodeSignature
|
# Restore IOHIDEventDriver CodeSignature
|
||||||
if save_hid_cs is True and Path(f"{self.constants.payload_path}/IOHIDEventDriver_CodeSignature.bak").exists():
|
if save_hid_cs is True and Path(f"{self.constants.payload_path}/IOHIDEventDriver_CodeSignature.bak").exists():
|
||||||
print("- Restoring IOHIDEventDriver CodeSignature")
|
logging.info("- Restoring IOHIDEventDriver CodeSignature")
|
||||||
if not cs_path.exists():
|
if not cs_path.exists():
|
||||||
print(" - CodeSignature folder missing, creating")
|
logging.info(" - CodeSignature folder missing, creating")
|
||||||
utilities.elevated(["mkdir", "-p", cs_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
utilities.elevated(["mkdir", "-p", cs_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
utilities.elevated(["cp", "-r", f"{self.constants.payload_path}/IOHIDEventDriver_CodeSignature.bak", cs_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
utilities.elevated(["cp", "-r", f"{self.constants.payload_path}/IOHIDEventDriver_CodeSignature.bak", cs_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
utilities.elevated(["rm", "-rf", f"{self.constants.payload_path}/IOHIDEventDriver_CodeSignature.bak"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
utilities.elevated(["rm", "-rf", f"{self.constants.payload_path}/IOHIDEventDriver_CodeSignature.bak"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
|
||||||
|
|
||||||
def unpatch_root_vol(self):
|
def _unpatch_root_vol(self):
|
||||||
if self.constants.detected_os > os_data.os_data.catalina and self.root_supports_snapshot is True:
|
if self.constants.detected_os > os_data.os_data.catalina and self.root_supports_snapshot is True:
|
||||||
print("- Reverting to last signed APFS snapshot")
|
logging.info("- Reverting to last signed APFS snapshot")
|
||||||
result = utilities.elevated(["bless", "--mount", self.mount_location, "--bootefi", "--last-sealed-snapshot"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
result = utilities.elevated(["bless", "--mount", self.mount_location, "--bootefi", "--last-sealed-snapshot"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
print("- Unable to revert root volume patches")
|
logging.info("- Unable to revert root volume patches")
|
||||||
print("Reason for unpatch Failure:")
|
logging.info("Reason for unpatch Failure:")
|
||||||
print(result.stdout.decode())
|
logging.info(result.stdout.decode())
|
||||||
print("- Failed to revert snapshot via Apple's 'bless' command")
|
logging.info("- Failed to revert snapshot via Apple's 'bless' command")
|
||||||
else:
|
else:
|
||||||
self.clean_skylight_plugins()
|
self._clean_skylight_plugins()
|
||||||
self.delete_nonmetal_enforcement()
|
self._delete_nonmetal_enforcement()
|
||||||
self.clean_auxiliary_kc()
|
self._clean_auxiliary_kc()
|
||||||
self.constants.root_patcher_succeeded = True
|
self.constants.root_patcher_succeeded = True
|
||||||
print("- Unpatching complete")
|
logging.info("- Unpatching complete")
|
||||||
print("\nPlease reboot the machine for patches to take effect")
|
logging.info("\nPlease reboot the machine for patches to take effect")
|
||||||
|
|
||||||
def rebuild_snapshot(self):
|
def _rebuild_snapshot(self):
|
||||||
if self.rebuild_kernel_collection() is True:
|
if self._rebuild_kernel_collection() is True:
|
||||||
self.update_preboot_kernel_cache()
|
self.update_preboot_kernel_cache()
|
||||||
self.rebuild_dyld_shared_cache()
|
self._rebuild_dyld_shared_cache()
|
||||||
if self.create_new_apfs_snapshot() is True:
|
if self._create_new_apfs_snapshot() is True:
|
||||||
print("- Patching complete")
|
logging.info("- Patching complete")
|
||||||
print("\nPlease reboot the machine for patches to take effect")
|
logging.info("\nPlease reboot the machine for patches to take effect")
|
||||||
if self.needs_kmutil_exemptions is True:
|
if self.needs_kmutil_exemptions is True:
|
||||||
print("Note: Apple will require you to open System Preferences -> Security to allow the new kernel extensions to be loaded")
|
logging.info("Note: Apple will require you to open System Preferences -> Security to allow the new kernel extensions to be loaded")
|
||||||
self.constants.root_patcher_succeeded = True
|
self.constants.root_patcher_succeeded = True
|
||||||
if self.constants.gui_mode is False:
|
if self.constants.gui_mode is False:
|
||||||
input("\nPress [ENTER] to continue")
|
input("\nPress [ENTER] to continue")
|
||||||
|
|
||||||
def rebuild_kernel_collection(self):
|
def _rebuild_kernel_collection(self):
|
||||||
print("- Rebuilding Kernel Cache (This may take some time)")
|
logging.info("- Rebuilding Kernel Cache (This may take some time)")
|
||||||
if self.constants.detected_os > os_data.os_data.catalina:
|
if self.constants.detected_os > os_data.os_data.catalina:
|
||||||
# Base Arguments
|
# Base Arguments
|
||||||
args = ["kmutil", "install"]
|
args = ["kmutil", "install"]
|
||||||
@@ -255,7 +298,7 @@ class PatchSysVolume:
|
|||||||
if self.needs_kmutil_exemptions is True:
|
if self.needs_kmutil_exemptions is True:
|
||||||
# When installing to '/Library/Extensions', following args skip kext consent
|
# When installing to '/Library/Extensions', following args skip kext consent
|
||||||
# prompt in System Preferences when SIP's disabled
|
# prompt in System Preferences when SIP's disabled
|
||||||
print(" (You will get a prompt by System Preferences, ignore for now)")
|
logging.info(" (You will get a prompt by System Preferences, ignore for now)")
|
||||||
args.append("--no-authentication")
|
args.append("--no-authentication")
|
||||||
args.append("--no-authorization")
|
args.append("--no-authorization")
|
||||||
else:
|
else:
|
||||||
@@ -271,11 +314,11 @@ class PatchSysVolume:
|
|||||||
# - will return 31 on 'No binaries or codeless kexts were provided'
|
# - will return 31 on 'No binaries or codeless kexts were provided'
|
||||||
# - will return -10 if the volume is missing (ie. unmounted by another process)
|
# - will return -10 if the volume is missing (ie. unmounted by another process)
|
||||||
if result.returncode != 0 or (self.constants.detected_os < os_data.os_data.catalina and "KernelCache ID" not in result.stdout.decode()):
|
if result.returncode != 0 or (self.constants.detected_os < os_data.os_data.catalina and "KernelCache ID" not in result.stdout.decode()):
|
||||||
print("- Unable to build new kernel cache")
|
logging.info("- Unable to build new kernel cache")
|
||||||
print(f"\nReason for Patch Failure ({result.returncode}):")
|
logging.info(f"\nReason for Patch Failure ({result.returncode}):")
|
||||||
print(result.stdout.decode())
|
logging.info(result.stdout.decode())
|
||||||
print("")
|
logging.info("")
|
||||||
print("\nPlease reboot the machine to avoid potential issues rerunning the patcher")
|
logging.info("\nPlease reboot the machine to avoid potential issues rerunning the patcher")
|
||||||
if self.constants.gui_mode is False:
|
if self.constants.gui_mode is False:
|
||||||
input("Press [ENTER] to continue")
|
input("Press [ENTER] to continue")
|
||||||
return False
|
return False
|
||||||
@@ -284,27 +327,27 @@ class PatchSysVolume:
|
|||||||
# Force rebuild the Auxiliary KC
|
# Force rebuild the Auxiliary KC
|
||||||
result = utilities.elevated(["killall", "syspolicyd", "kernelmanagerd"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
result = utilities.elevated(["killall", "syspolicyd", "kernelmanagerd"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
print("- Unable to remove kernel extension policy files")
|
logging.info("- Unable to remove kernel extension policy files")
|
||||||
print(f"\nReason for Patch Failure ({result.returncode}):")
|
logging.info(f"\nReason for Patch Failure ({result.returncode}):")
|
||||||
print(result.stdout.decode())
|
logging.info(result.stdout.decode())
|
||||||
print("")
|
logging.info("")
|
||||||
print("\nPlease reboot the machine to avoid potential issues rerunning the patcher")
|
logging.info("\nPlease reboot the machine to avoid potential issues rerunning the patcher")
|
||||||
if self.constants.gui_mode is False:
|
if self.constants.gui_mode is False:
|
||||||
input("Press [ENTER] to continue")
|
input("Press [ENTER] to continue")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for file in ["KextPolicy", "KextPolicy-shm", "KextPolicy-wal"]:
|
for file in ["KextPolicy", "KextPolicy-shm", "KextPolicy-wal"]:
|
||||||
self.remove_file("/private/var/db/SystemPolicyConfiguration/", file)
|
self._remove_file("/private/var/db/SystemPolicyConfiguration/", file)
|
||||||
else:
|
else:
|
||||||
# Install RSRHelper utility to handle desynced KCs
|
# Install RSRHelper utility to handle desynced KCs
|
||||||
sys_patch_helpers.sys_patch_helpers(self.constants).install_rsr_repair_binary()
|
sys_patch_helpers.SysPatchHelpers(self.constants).install_rsr_repair_binary()
|
||||||
|
|
||||||
print("- Successfully built new kernel cache")
|
logging.info("- Successfully built new kernel cache")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def create_new_apfs_snapshot(self):
|
def _create_new_apfs_snapshot(self):
|
||||||
if self.root_supports_snapshot is True:
|
if self.root_supports_snapshot is True:
|
||||||
print("- Creating new APFS snapshot")
|
logging.info("- Creating new APFS snapshot")
|
||||||
bless = utilities.elevated(
|
bless = utilities.elevated(
|
||||||
[
|
[
|
||||||
"bless",
|
"bless",
|
||||||
@@ -313,46 +356,47 @@ class PatchSysVolume:
|
|||||||
], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||||
)
|
)
|
||||||
if bless.returncode != 0:
|
if bless.returncode != 0:
|
||||||
print("- Unable to create new snapshot")
|
logging.info("- Unable to create new snapshot")
|
||||||
print("Reason for snapshot failure:")
|
logging.info("Reason for snapshot failure:")
|
||||||
print(bless.stdout.decode())
|
logging.info(bless.stdout.decode())
|
||||||
if "Can't use last-sealed-snapshot or create-snapshot on non system volume" in bless.stdout.decode():
|
if "Can't use last-sealed-snapshot or create-snapshot on non system volume" in bless.stdout.decode():
|
||||||
print("- This is an APFS bug with Monterey and newer! Perform a clean installation to ensure your APFS volume is built correctly")
|
logging.info("- This is an APFS bug with Monterey and newer! Perform a clean installation to ensure your APFS volume is built correctly")
|
||||||
return False
|
return False
|
||||||
self.unmount_drive()
|
self._unmount_drive()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def unmount_drive(self):
|
def _unmount_drive(self):
|
||||||
print("- Unmounting Root Volume (Don't worry if this fails)")
|
logging.info("- Unmounting Root Volume (Don't worry if this fails)")
|
||||||
utilities.elevated(["diskutil", "unmount", self.root_mount_path], stdout=subprocess.PIPE).stdout.decode().strip().encode()
|
utilities.elevated(["diskutil", "unmount", self.root_mount_path], stdout=subprocess.PIPE).stdout.decode().strip().encode()
|
||||||
|
|
||||||
def rebuild_dyld_shared_cache(self):
|
def _rebuild_dyld_shared_cache(self):
|
||||||
if self.constants.detected_os <= os_data.os_data.catalina:
|
if self.constants.detected_os > os_data.os_data.catalina:
|
||||||
print("- Rebuilding dyld shared cache")
|
return
|
||||||
utilities.process_status(utilities.elevated(["update_dyld_shared_cache", "-root", f"{self.mount_location}/"]))
|
logging.info("- Rebuilding dyld shared cache")
|
||||||
|
utilities.process_status(utilities.elevated(["update_dyld_shared_cache", "-root", f"{self.mount_location}/"]))
|
||||||
|
|
||||||
def update_preboot_kernel_cache(self):
|
def update_preboot_kernel_cache(self):
|
||||||
if self.constants.detected_os == os_data.os_data.catalina:
|
if self.constants.detected_os == os_data.os_data.catalina:
|
||||||
print("- Rebuilding preboot kernel cache")
|
logging.info("- Rebuilding preboot kernel cache")
|
||||||
utilities.process_status(utilities.elevated(["kcditto"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["kcditto"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
|
|
||||||
def clean_skylight_plugins(self):
|
def _clean_skylight_plugins(self):
|
||||||
if (Path(self.mount_application_support) / Path("SkyLightPlugins/")).exists():
|
if (Path(self.mount_application_support) / Path("SkyLightPlugins/")).exists():
|
||||||
print("- Found SkylightPlugins folder, removing old plugins")
|
logging.info("- Found SkylightPlugins folder, removing old plugins")
|
||||||
utilities.process_status(utilities.elevated(["rm", "-Rf", f"{self.mount_application_support}/SkyLightPlugins"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["rm", "-Rf", f"{self.mount_application_support}/SkyLightPlugins"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
utilities.process_status(utilities.elevated(["mkdir", f"{self.mount_application_support}/SkyLightPlugins"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["mkdir", f"{self.mount_application_support}/SkyLightPlugins"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
else:
|
else:
|
||||||
print("- Creating SkylightPlugins folder")
|
logging.info("- Creating SkylightPlugins folder")
|
||||||
utilities.process_status(utilities.elevated(["mkdir", "-p", f"{self.mount_application_support}/SkyLightPlugins/"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["mkdir", "-p", f"{self.mount_application_support}/SkyLightPlugins/"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
|
|
||||||
def delete_nonmetal_enforcement(self):
|
def _delete_nonmetal_enforcement(self):
|
||||||
for arg in ["useMetal", "useIOP"]:
|
for arg in ["useMetal", "useIOP"]:
|
||||||
result = subprocess.run(["defaults", "read", "/Library/Preferences/com.apple.CoreDisplay", arg], stdout=subprocess.PIPE).stdout.decode("utf-8").strip()
|
result = subprocess.run(["defaults", "read", "/Library/Preferences/com.apple.CoreDisplay", arg], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL).stdout.decode("utf-8").strip()
|
||||||
if result in ["0", "false", "1", "true"]:
|
if result in ["0", "false", "1", "true"]:
|
||||||
print(f"- Removing non-Metal Enforcement Preference: {arg}")
|
logging.info(f"- Removing non-Metal Enforcement Preference: {arg}")
|
||||||
utilities.elevated(["defaults", "delete", "/Library/Preferences/com.apple.CoreDisplay", arg])
|
utilities.elevated(["defaults", "delete", "/Library/Preferences/com.apple.CoreDisplay", arg])
|
||||||
|
|
||||||
def clean_auxiliary_kc(self):
|
def _clean_auxiliary_kc(self):
|
||||||
# When reverting root volume patches, the AuxKC will still retain the UUID
|
# When reverting root volume patches, the AuxKC will still retain the UUID
|
||||||
# it was built against. Thus when Boot/SysKC are reverted, Aux will break
|
# it was built against. Thus when Boot/SysKC are reverted, Aux will break
|
||||||
# To resolve this, delete all installed kexts in /L*/E* and rebuild the AuxKC
|
# To resolve this, delete all installed kexts in /L*/E* and rebuild the AuxKC
|
||||||
@@ -360,7 +404,7 @@ class PatchSysVolume:
|
|||||||
if self.constants.detected_os < os_data.os_data.big_sur:
|
if self.constants.detected_os < os_data.os_data.big_sur:
|
||||||
return
|
return
|
||||||
|
|
||||||
print("- Cleaning Auxiliary Kernel Collection")
|
logging.info("- Cleaning Auxiliary Kernel Collection")
|
||||||
oclp_path = "/System/Library/CoreServices/OpenCore-Legacy-Patcher.plist"
|
oclp_path = "/System/Library/CoreServices/OpenCore-Legacy-Patcher.plist"
|
||||||
if Path(oclp_path).exists():
|
if Path(oclp_path).exists():
|
||||||
oclp_plist_data = plistlib.load(Path(oclp_path).open("rb"))
|
oclp_plist_data = plistlib.load(Path(oclp_path).open("rb"))
|
||||||
@@ -373,7 +417,7 @@ class PatchSysVolume:
|
|||||||
for file in oclp_plist_data[key]["Install"][location]:
|
for file in oclp_plist_data[key]["Install"][location]:
|
||||||
if not file.endswith(".kext"):
|
if not file.endswith(".kext"):
|
||||||
continue
|
continue
|
||||||
self.remove_file("/Library/Extensions", file)
|
self._remove_file("/Library/Extensions", file)
|
||||||
|
|
||||||
# Handle situations where users migrated from older OSes with a lot of garbage in /L*/E*
|
# Handle situations where users migrated from older OSes with a lot of garbage in /L*/E*
|
||||||
# ex. Nvidia Web Drivers, NetUSB, dosdude1's patches, etc.
|
# ex. Nvidia Web Drivers, NetUSB, dosdude1's patches, etc.
|
||||||
@@ -388,7 +432,7 @@ class PatchSysVolume:
|
|||||||
for file in Path("/Library/Extensions").glob("*.kext"):
|
for file in Path("/Library/Extensions").glob("*.kext"):
|
||||||
try:
|
try:
|
||||||
if datetime.fromtimestamp(file.stat().st_mtime) < datetime(2021, 10, 1):
|
if datetime.fromtimestamp(file.stat().st_mtime) < datetime(2021, 10, 1):
|
||||||
print(f" - Relocating {file.name} kext to {relocation_path}")
|
logging.info(f" - Relocating {file.name} kext to {relocation_path}")
|
||||||
if Path(relocation_path) / Path(file.name).exists():
|
if Path(relocation_path) / Path(file.name).exists():
|
||||||
utilities.elevated(["rm", "-Rf", relocation_path / Path(file.name)])
|
utilities.elevated(["rm", "-Rf", relocation_path / Path(file.name)])
|
||||||
utilities.elevated(["mv", file, relocation_path])
|
utilities.elevated(["mv", file, relocation_path])
|
||||||
@@ -397,17 +441,17 @@ class PatchSysVolume:
|
|||||||
# ex. Symlinks pointing to symlinks pointing to dead files
|
# ex. Symlinks pointing to symlinks pointing to dead files
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def write_patchset(self, patchset):
|
def _write_patchset(self, patchset):
|
||||||
destination_path = f"{self.mount_location}/System/Library/CoreServices"
|
destination_path = f"{self.mount_location}/System/Library/CoreServices"
|
||||||
file_name = "OpenCore-Legacy-Patcher.plist"
|
file_name = "OpenCore-Legacy-Patcher.plist"
|
||||||
destination_path_file = f"{destination_path}/{file_name}"
|
destination_path_file = f"{destination_path}/{file_name}"
|
||||||
if sys_patch_helpers.sys_patch_helpers(self.constants).generate_patchset_plist(patchset, file_name, self.kdk_path):
|
if sys_patch_helpers.SysPatchHelpers(self.constants).generate_patchset_plist(patchset, file_name, self.kdk_path):
|
||||||
print("- Writing patchset information to Root Volume")
|
logging.info("- Writing patchset information to Root Volume")
|
||||||
if Path(destination_path_file).exists():
|
if Path(destination_path_file).exists():
|
||||||
utilities.process_status(utilities.elevated(["rm", destination_path_file], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["rm", destination_path_file], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
utilities.process_status(utilities.elevated(["cp", f"{self.constants.payload_path}/{file_name}", destination_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["cp", f"{self.constants.payload_path}/{file_name}", destination_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
|
|
||||||
def add_auxkc_support(self, install_file, source_folder_path, install_patch_directory, destination_folder_path):
|
def _add_auxkc_support(self, install_file, source_folder_path, install_patch_directory, destination_folder_path):
|
||||||
# In macOS Ventura, KDKs are required to build new Boot and System KCs
|
# In macOS Ventura, KDKs are required to build new Boot and System KCs
|
||||||
# However for some patch sets, we're able to use the Auxiliary KCs with '/Library/Extensions'
|
# However for some patch sets, we're able to use the Auxiliary KCs with '/Library/Extensions'
|
||||||
|
|
||||||
@@ -429,7 +473,7 @@ class PatchSysVolume:
|
|||||||
|
|
||||||
updated_install_location = str(self.mount_location_data) + "/Library/Extensions"
|
updated_install_location = str(self.mount_location_data) + "/Library/Extensions"
|
||||||
|
|
||||||
print(f" - Adding AuxKC support to {install_file}")
|
logging.info(f" - Adding AuxKC support to {install_file}")
|
||||||
plist_path = Path(Path(source_folder_path) / Path(install_file) / Path("Contents/Info.plist"))
|
plist_path = Path(Path(source_folder_path) / Path(install_file) / Path("Contents/Info.plist"))
|
||||||
plist_data = plistlib.load((plist_path).open("rb"))
|
plist_data = plistlib.load((plist_path).open("rb"))
|
||||||
|
|
||||||
@@ -443,11 +487,11 @@ class PatchSysVolume:
|
|||||||
plist_data["OSBundleRequired"] = "Auxiliary"
|
plist_data["OSBundleRequired"] = "Auxiliary"
|
||||||
plistlib.dump(plist_data, plist_path.open("wb"))
|
plistlib.dump(plist_data, plist_path.open("wb"))
|
||||||
|
|
||||||
self.check_kexts_needs_authentication(install_file)
|
self._check_kexts_needs_authentication(install_file)
|
||||||
|
|
||||||
return updated_install_location
|
return updated_install_location
|
||||||
|
|
||||||
def check_kexts_needs_authentication(self, kext_name):
|
def _check_kexts_needs_authentication(self, kext_name):
|
||||||
# Verify whether the user needs to authenticate in System Preferences
|
# Verify whether the user needs to authenticate in System Preferences
|
||||||
# Specifically under 'private/var/db/KernelManagement/AuxKC/CurrentAuxKC/com.apple.kcgen.instructions.plist'
|
# Specifically under 'private/var/db/KernelManagement/AuxKC/CurrentAuxKC/com.apple.kcgen.instructions.plist'
|
||||||
# ["kextsToBuild"][i]:
|
# ["kextsToBuild"][i]:
|
||||||
@@ -466,38 +510,38 @@ class PatchSysVolume:
|
|||||||
except PermissionError:
|
except PermissionError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
print(f" - {kext_name} requires authentication in System Preferences")
|
logging.info(f" - {kext_name} requires authentication in System Preferences")
|
||||||
self.constants.needs_to_open_preferences = True # Notify in GUI to open System Preferences
|
self.constants.needs_to_open_preferences = True # Notify in GUI to open System Preferences
|
||||||
|
|
||||||
def patch_root_vol(self):
|
def _patch_root_vol(self):
|
||||||
print(f"- Running patches for {self.model}")
|
logging.info(f"- Running patches for {self.model}")
|
||||||
if self.patch_set_dictionary != {}:
|
if self.patch_set_dictionary != {}:
|
||||||
self.execute_patchset(self.patch_set_dictionary)
|
self._execute_patchset(self.patch_set_dictionary)
|
||||||
else:
|
else:
|
||||||
self.execute_patchset(sys_patch_detect.detect_root_patch(self.computer.real_model, self.constants).generate_patchset(self.hardware_details))
|
self._execute_patchset(sys_patch_detect.DetectRootPatch(self.computer.real_model, self.constants).generate_patchset(self.hardware_details))
|
||||||
|
|
||||||
if self.constants.wxpython_variant is True and self.constants.detected_os >= os_data.os_data.big_sur:
|
if self.constants.wxpython_variant is True and self.constants.detected_os >= os_data.os_data.big_sur:
|
||||||
sys_patch_auto.AutomaticSysPatch(self.constants).install_auto_patcher_launch_agent()
|
sys_patch_auto.AutomaticSysPatch(self.constants).install_auto_patcher_launch_agent()
|
||||||
|
|
||||||
self.rebuild_snapshot()
|
self._rebuild_snapshot()
|
||||||
|
|
||||||
def execute_patchset(self, required_patches):
|
def _execute_patchset(self, required_patches):
|
||||||
source_files_path = str(self.constants.payload_local_binaries_root_path)
|
source_files_path = str(self.constants.payload_local_binaries_root_path)
|
||||||
self.preflight_checks(required_patches, source_files_path)
|
self._preflight_checks(required_patches, source_files_path)
|
||||||
for patch in required_patches:
|
for patch in required_patches:
|
||||||
print("- Installing Patchset: " + patch)
|
logging.info("- Installing Patchset: " + patch)
|
||||||
if "Remove" in required_patches[patch]:
|
if "Remove" in required_patches[patch]:
|
||||||
for remove_patch_directory in required_patches[patch]["Remove"]:
|
for remove_patch_directory in required_patches[patch]["Remove"]:
|
||||||
print("- Remove Files at: " + remove_patch_directory)
|
logging.info("- Remove Files at: " + remove_patch_directory)
|
||||||
for remove_patch_file in required_patches[patch]["Remove"][remove_patch_directory]:
|
for remove_patch_file in required_patches[patch]["Remove"][remove_patch_directory]:
|
||||||
destination_folder_path = str(self.mount_location) + remove_patch_directory
|
destination_folder_path = str(self.mount_location) + remove_patch_directory
|
||||||
self.remove_file(destination_folder_path, remove_patch_file)
|
self._remove_file(destination_folder_path, remove_patch_file)
|
||||||
|
|
||||||
|
|
||||||
for method_install in ["Install", "Install Non-Root"]:
|
for method_install in ["Install", "Install Non-Root"]:
|
||||||
if method_install in required_patches[patch]:
|
if method_install in required_patches[patch]:
|
||||||
for install_patch_directory in list(required_patches[patch][method_install]):
|
for install_patch_directory in list(required_patches[patch][method_install]):
|
||||||
print(f"- Handling Installs in: {install_patch_directory}")
|
logging.info(f"- Handling Installs in: {install_patch_directory}")
|
||||||
for install_file in list(required_patches[patch][method_install][install_patch_directory]):
|
for install_file in list(required_patches[patch][method_install][install_patch_directory]):
|
||||||
source_folder_path = source_files_path + "/" + required_patches[patch][method_install][install_patch_directory][install_file] + install_patch_directory
|
source_folder_path = source_files_path + "/" + required_patches[patch][method_install][install_patch_directory][install_file] + install_patch_directory
|
||||||
if method_install == "Install":
|
if method_install == "Install":
|
||||||
@@ -505,10 +549,10 @@ class PatchSysVolume:
|
|||||||
else:
|
else:
|
||||||
if install_patch_directory == "/Library/Extensions":
|
if install_patch_directory == "/Library/Extensions":
|
||||||
self.needs_kmutil_exemptions = True
|
self.needs_kmutil_exemptions = True
|
||||||
self.check_kexts_needs_authentication(install_file)
|
self._check_kexts_needs_authentication(install_file)
|
||||||
destination_folder_path = str(self.mount_location_data) + install_patch_directory
|
destination_folder_path = str(self.mount_location_data) + install_patch_directory
|
||||||
|
|
||||||
updated_destination_folder_path = self.add_auxkc_support(install_file, source_folder_path, install_patch_directory, destination_folder_path)
|
updated_destination_folder_path = self._add_auxkc_support(install_file, source_folder_path, install_patch_directory, destination_folder_path)
|
||||||
|
|
||||||
if destination_folder_path != updated_destination_folder_path:
|
if destination_folder_path != updated_destination_folder_path:
|
||||||
# Update required_patches to reflect the new destination folder path
|
# Update required_patches to reflect the new destination folder path
|
||||||
@@ -519,37 +563,37 @@ class PatchSysVolume:
|
|||||||
|
|
||||||
destination_folder_path = updated_destination_folder_path
|
destination_folder_path = updated_destination_folder_path
|
||||||
|
|
||||||
self.install_new_file(source_folder_path, destination_folder_path, install_file)
|
self._install_new_file(source_folder_path, destination_folder_path, install_file)
|
||||||
|
|
||||||
if "Processes" in required_patches[patch]:
|
if "Processes" in required_patches[patch]:
|
||||||
for process in required_patches[patch]["Processes"]:
|
for process in required_patches[patch]["Processes"]:
|
||||||
# Some processes need sudo, however we cannot directly call sudo in some scenarios
|
# Some processes need sudo, however we cannot directly call sudo in some scenarios
|
||||||
# Instead, call elevated funtion if string's boolean is True
|
# Instead, call elevated funtion if string's boolean is True
|
||||||
if required_patches[patch]["Processes"][process] is True:
|
if required_patches[patch]["Processes"][process] is True:
|
||||||
print(f"- Running Process as Root:\n{process}")
|
logging.info(f"- Running Process as Root:\n{process}")
|
||||||
utilities.process_status(utilities.elevated(process.split(" "), stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(process.split(" "), stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
else:
|
else:
|
||||||
print(f"- Running Process:\n{process}")
|
logging.info(f"- Running Process:\n{process}")
|
||||||
utilities.process_status(subprocess.run(process, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True))
|
utilities.process_status(subprocess.run(process, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True))
|
||||||
if any(x in required_patches for x in ["AMD Legacy GCN", "AMD Legacy Polaris", "AMD Legacy Vega"]):
|
if any(x in required_patches for x in ["AMD Legacy GCN", "AMD Legacy Polaris", "AMD Legacy Vega"]):
|
||||||
sys_patch_helpers.sys_patch_helpers(self.constants).disable_window_server_caching()
|
sys_patch_helpers.SysPatchHelpers(self.constants).disable_window_server_caching()
|
||||||
if any(x in required_patches for x in ["Intel Ivy Bridge", "Intel Haswell"]):
|
if any(x in required_patches for x in ["Intel Ivy Bridge", "Intel Haswell"]):
|
||||||
sys_patch_helpers.sys_patch_helpers(self.constants).remove_news_widgets()
|
sys_patch_helpers.SysPatchHelpers(self.constants).remove_news_widgets()
|
||||||
self.write_patchset(required_patches)
|
self._write_patchset(required_patches)
|
||||||
|
|
||||||
def preflight_checks(self, required_patches, source_files_path):
|
def _preflight_checks(self, required_patches, source_files_path):
|
||||||
print("- Running Preflight Checks before patching")
|
logging.info("- Running Preflight Checks before patching")
|
||||||
|
|
||||||
# Make sure old SkyLight plugins aren't being used
|
# Make sure old SkyLight plugins aren't being used
|
||||||
self.clean_skylight_plugins()
|
self._clean_skylight_plugins()
|
||||||
# Make sure non-Metal Enforcement preferences are not present
|
# Make sure non-Metal Enforcement preferences are not present
|
||||||
self.delete_nonmetal_enforcement()
|
self._delete_nonmetal_enforcement()
|
||||||
# Make sure we clean old kexts in /L*/E* that are not in the patchset
|
# Make sure we clean old kexts in /L*/E* that are not in the patchset
|
||||||
self.clean_auxiliary_kc()
|
self._clean_auxiliary_kc()
|
||||||
|
|
||||||
# Make sure SNB kexts are compatible with the host
|
# Make sure SNB kexts are compatible with the host
|
||||||
if "Intel Sandy Bridge" in required_patches:
|
if "Intel Sandy Bridge" in required_patches:
|
||||||
sys_patch_helpers.sys_patch_helpers(self.constants).snb_board_id_patch(source_files_path)
|
sys_patch_helpers.SysPatchHelpers(self.constants).snb_board_id_patch(source_files_path)
|
||||||
|
|
||||||
for patch in required_patches:
|
for patch in required_patches:
|
||||||
# Check if all files are present
|
# Check if all files are present
|
||||||
@@ -565,53 +609,53 @@ class PatchSysVolume:
|
|||||||
should_save_cs = False
|
should_save_cs = False
|
||||||
if "Legacy USB 1.1" in required_patches:
|
if "Legacy USB 1.1" in required_patches:
|
||||||
should_save_cs = True
|
should_save_cs = True
|
||||||
self.merge_kdk_with_root(save_hid_cs=should_save_cs)
|
self._merge_kdk_with_root(save_hid_cs=should_save_cs)
|
||||||
|
|
||||||
print("- Finished Preflight, starting patching")
|
logging.info("- Finished Preflight, starting patching")
|
||||||
|
|
||||||
def install_new_file(self, source_folder, destination_folder, file_name):
|
def _install_new_file(self, source_folder, destination_folder, file_name):
|
||||||
# .frameworks are merged
|
# .frameworks are merged
|
||||||
# .kexts and .apps are deleted and replaced
|
# .kexts and .apps are deleted and replaced
|
||||||
file_name_str = str(file_name)
|
file_name_str = str(file_name)
|
||||||
|
|
||||||
if not Path(destination_folder).exists():
|
if not Path(destination_folder).exists():
|
||||||
print(f" - Skipping {file_name}, cannot locate {source_folder}")
|
logging.info(f" - Skipping {file_name}, cannot locate {source_folder}")
|
||||||
return
|
return
|
||||||
|
|
||||||
if file_name_str.endswith(".framework"):
|
if file_name_str.endswith(".framework"):
|
||||||
# merge with rsync
|
# merge with rsync
|
||||||
print(f" - Installing: {file_name}")
|
logging.info(f" - Installing: {file_name}")
|
||||||
utilities.elevated(["rsync", "-r", "-i", "-a", f"{source_folder}/{file_name}", f"{destination_folder}/"], stdout=subprocess.PIPE)
|
utilities.elevated(["rsync", "-r", "-i", "-a", f"{source_folder}/{file_name}", f"{destination_folder}/"], stdout=subprocess.PIPE)
|
||||||
self.fix_permissions(destination_folder + "/" + file_name)
|
self._fix_permissions(destination_folder + "/" + file_name)
|
||||||
elif Path(source_folder + "/" + file_name_str).is_dir():
|
elif Path(source_folder + "/" + file_name_str).is_dir():
|
||||||
# Applicable for .kext, .app, .plugin, .bundle, all of which are directories
|
# Applicable for .kext, .app, .plugin, .bundle, all of which are directories
|
||||||
if Path(destination_folder + "/" + file_name).exists():
|
if Path(destination_folder + "/" + file_name).exists():
|
||||||
print(f" - Found existing {file_name}, overwriting...")
|
logging.info(f" - Found existing {file_name}, overwriting...")
|
||||||
utilities.process_status(utilities.elevated(["rm", "-R", f"{destination_folder}/{file_name}"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["rm", "-R", f"{destination_folder}/{file_name}"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
else:
|
else:
|
||||||
print(f" - Installing: {file_name}")
|
logging.info(f" - Installing: {file_name}")
|
||||||
utilities.process_status(utilities.elevated(["cp", "-R", f"{source_folder}/{file_name}", destination_folder], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["cp", "-R", f"{source_folder}/{file_name}", destination_folder], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
self.fix_permissions(destination_folder + "/" + file_name)
|
self._fix_permissions(destination_folder + "/" + file_name)
|
||||||
else:
|
else:
|
||||||
# Assume it's an individual file, replace as normal
|
# Assume it's an individual file, replace as normal
|
||||||
if Path(destination_folder + "/" + file_name).exists():
|
if Path(destination_folder + "/" + file_name).exists():
|
||||||
print(f" - Found existing {file_name}, overwriting...")
|
logging.info(f" - Found existing {file_name}, overwriting...")
|
||||||
utilities.process_status(utilities.elevated(["rm", f"{destination_folder}/{file_name}"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["rm", f"{destination_folder}/{file_name}"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
else:
|
else:
|
||||||
print(f" - Installing: {file_name}")
|
logging.info(f" - Installing: {file_name}")
|
||||||
utilities.process_status(utilities.elevated(["cp", f"{source_folder}/{file_name}", destination_folder], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["cp", f"{source_folder}/{file_name}", destination_folder], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
self.fix_permissions(destination_folder + "/" + file_name)
|
self._fix_permissions(destination_folder + "/" + file_name)
|
||||||
|
|
||||||
def remove_file(self, destination_folder, file_name):
|
def _remove_file(self, destination_folder, file_name):
|
||||||
if Path(destination_folder + "/" + file_name).exists():
|
if Path(destination_folder + "/" + file_name).exists():
|
||||||
print(f" - Removing: {file_name}")
|
logging.info(f" - Removing: {file_name}")
|
||||||
if Path(destination_folder + "/" + file_name).is_dir():
|
if Path(destination_folder + "/" + file_name).is_dir():
|
||||||
utilities.process_status(utilities.elevated(["rm", "-R", f"{destination_folder}/{file_name}"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["rm", "-R", f"{destination_folder}/{file_name}"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
else:
|
else:
|
||||||
utilities.process_status(utilities.elevated(["rm", f"{destination_folder}/{file_name}"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["rm", f"{destination_folder}/{file_name}"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
|
|
||||||
|
|
||||||
def fix_permissions(self, destination_file):
|
def _fix_permissions(self, destination_file):
|
||||||
chmod_args = ["chmod", "-Rf", "755", destination_file]
|
chmod_args = ["chmod", "-Rf", "755", destination_file]
|
||||||
chown_args = ["chown", "-Rf", "root:wheel", destination_file]
|
chown_args = ["chown", "-Rf", "root:wheel", destination_file]
|
||||||
if not Path(destination_file).is_dir():
|
if not Path(destination_file).is_dir():
|
||||||
@@ -622,89 +666,65 @@ class PatchSysVolume:
|
|||||||
utilities.process_status(utilities.elevated(chown_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(chown_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
|
|
||||||
|
|
||||||
def check_files(self):
|
def _check_files(self):
|
||||||
if Path(self.constants.payload_local_binaries_root_path).exists():
|
if Path(self.constants.payload_local_binaries_root_path).exists():
|
||||||
print("- Found local Apple Binaries")
|
logging.info("- Local PatcherSupportPkg resources available, continuing...")
|
||||||
if self.constants.gui_mode is False:
|
return True
|
||||||
patch_input = input("Would you like to redownload?(y/n): ")
|
|
||||||
if patch_input in {"y", "Y", "yes", "Yes"}:
|
|
||||||
shutil.rmtree(Path(self.constants.payload_local_binaries_root_path))
|
|
||||||
output = self.download_files()
|
|
||||||
else:
|
|
||||||
output = True
|
|
||||||
else:
|
|
||||||
output = self.download_files()
|
|
||||||
else:
|
|
||||||
output = self.download_files()
|
|
||||||
return output
|
|
||||||
|
|
||||||
def download_files(self):
|
if Path(self.constants.payload_local_binaries_root_path_zip).exists():
|
||||||
if self.constants.cli_mode is True:
|
logging.info("- Local PatcherSupportPkg resources available, unzipping...")
|
||||||
download_result, link = sys_patch_download.grab_patcher_support_pkg(self.constants).download_files()
|
logging.info("- Unzipping binaries...")
|
||||||
else:
|
|
||||||
download_result = True
|
|
||||||
link = sys_patch_download.grab_patcher_support_pkg(self.constants).generate_pkg_link()
|
|
||||||
|
|
||||||
if download_result and self.constants.payload_local_binaries_root_path_zip.exists():
|
|
||||||
print("- Unzipping binaries...")
|
|
||||||
utilities.process_status(subprocess.run(["ditto", "-V", "-x", "-k", "--sequesterRsrc", "--rsrc", self.constants.payload_local_binaries_root_path_zip, self.constants.payload_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(subprocess.run(["ditto", "-V", "-x", "-k", "--sequesterRsrc", "--rsrc", self.constants.payload_local_binaries_root_path_zip, self.constants.payload_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
print("- Binaries downloaded to:")
|
return True
|
||||||
print(self.constants.payload_path)
|
|
||||||
return self.constants.payload_local_binaries_root_path
|
logging.info("- PatcherSupportPkg resources missing, Patcher likely corrupted!!!")
|
||||||
else:
|
return False
|
||||||
if self.constants.gui_mode is True:
|
|
||||||
print("- Download failed, please verify the below link work:")
|
|
||||||
print(link)
|
|
||||||
print("\nIf you continue to have issues, try using the Offline builds")
|
|
||||||
print("located on Github next to the other builds")
|
|
||||||
else:
|
|
||||||
input("\nPress enter to continue")
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Entry Function
|
# Entry Function
|
||||||
def start_patch(self):
|
def start_patch(self):
|
||||||
print("- Starting Patch Process")
|
logging.info("- Starting Patch Process")
|
||||||
print(f"- Determining Required Patch set for Darwin {self.constants.detected_os}")
|
logging.info(f"- Determining Required Patch set for Darwin {self.constants.detected_os}")
|
||||||
self.patch_set_dictionary = sys_patch_detect.detect_root_patch(self.computer.real_model, self.constants).generate_patchset(self.hardware_details)
|
self.patch_set_dictionary = sys_patch_detect.DetectRootPatch(self.computer.real_model, self.constants).generate_patchset(self.hardware_details)
|
||||||
|
|
||||||
if self.patch_set_dictionary == {}:
|
if self.patch_set_dictionary == {}:
|
||||||
change_menu = None
|
change_menu = None
|
||||||
print("- No Root Patches required for your machine!")
|
logging.info("- No Root Patches required for your machine!")
|
||||||
if self.constants.gui_mode is False:
|
if self.constants.gui_mode is False:
|
||||||
input("\nPress [ENTER] to return to the main menu: ")
|
input("\nPress [ENTER] to return to the main menu: ")
|
||||||
elif self.constants.gui_mode is False:
|
elif self.constants.gui_mode is False:
|
||||||
change_menu = input("Would you like to continue with Root Volume Patching?(y/n): ")
|
change_menu = input("Would you like to continue with Root Volume Patching?(y/n): ")
|
||||||
else:
|
else:
|
||||||
change_menu = "y"
|
change_menu = "y"
|
||||||
print("- Continuing root patching")
|
logging.info("- Continuing root patching")
|
||||||
if change_menu in ["y", "Y"]:
|
if change_menu in ["y", "Y"]:
|
||||||
print("- Verifying whether Root Patching possible")
|
logging.info("- Verifying whether Root Patching possible")
|
||||||
if sys_patch_detect.detect_root_patch(self.computer.real_model, self.constants).verify_patch_allowed(print_errors=not self.constants.wxpython_variant) is True:
|
if sys_patch_detect.DetectRootPatch(self.computer.real_model, self.constants).verify_patch_allowed(print_errors=not self.constants.wxpython_variant) is True:
|
||||||
print("- Patcher is capable of patching")
|
logging.info("- Patcher is capable of patching")
|
||||||
if self.check_files():
|
if self._check_files():
|
||||||
if self.mount_root_vol() is True:
|
if self._mount_root_vol() is True:
|
||||||
self.patch_root_vol()
|
self._patch_root_vol()
|
||||||
if self.constants.gui_mode is False:
|
if self.constants.gui_mode is False:
|
||||||
input("\nPress [ENTER] to return to the main menu")
|
input("\nPress [ENTER] to return to the main menu")
|
||||||
else:
|
else:
|
||||||
print("- Recommend rebooting the machine and trying to patch again")
|
logging.info("- Recommend rebooting the machine and trying to patch again")
|
||||||
if self.constants.gui_mode is False:
|
if self.constants.gui_mode is False:
|
||||||
input("- Press [ENTER] to exit: ")
|
input("- Press [ENTER] to exit: ")
|
||||||
elif self.constants.gui_mode is False:
|
elif self.constants.gui_mode is False:
|
||||||
input("\nPress [ENTER] to return to the main menu: ")
|
input("\nPress [ENTER] to return to the main menu: ")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print("- Returning to main menu")
|
logging.info("- Returning to main menu")
|
||||||
|
|
||||||
def start_unpatch(self):
|
def start_unpatch(self):
|
||||||
print("- Starting Unpatch Process")
|
logging.info("- Starting Unpatch Process")
|
||||||
if sys_patch_detect.detect_root_patch(self.computer.real_model, self.constants).verify_patch_allowed(print_errors=True) is True:
|
if sys_patch_detect.DetectRootPatch(self.computer.real_model, self.constants).verify_patch_allowed(print_errors=True) is True:
|
||||||
if self.mount_root_vol() is True:
|
if self._mount_root_vol() is True:
|
||||||
self.unpatch_root_vol()
|
self._unpatch_root_vol()
|
||||||
if self.constants.gui_mode is False:
|
if self.constants.gui_mode is False:
|
||||||
input("\nPress [ENTER] to return to the main menu")
|
input("\nPress [ENTER] to return to the main menu")
|
||||||
else:
|
else:
|
||||||
print("- Recommend rebooting the machine and trying to patch again")
|
logging.info("- Recommend rebooting the machine and trying to patch again")
|
||||||
if self.constants.gui_mode is False:
|
if self.constants.gui_mode is False:
|
||||||
input("- Press [ENTER] to exit: ")
|
input("- Press [ENTER] to exit: ")
|
||||||
elif self.constants.gui_mode is False:
|
elif self.constants.gui_mode is False:
|
||||||
|
|||||||
@@ -1,60 +1,74 @@
|
|||||||
# Auto Patching's main purpose is to try and tell the user they're missing root patches
|
|
||||||
# New users may not realize OS updates remove our patches, so we try and run when nessasary
|
|
||||||
# Conditions for running:
|
|
||||||
# - Verify running GUI (TUI users can write their own scripts)
|
|
||||||
# - Verify the Snapshot Seal is intact (if not, assume user is running patches)
|
|
||||||
# - Verify this model needs patching (if not, assume user upgraded hardware and OCLP was not removed)
|
|
||||||
# - Verify there are no updates for OCLP (ensure we have the latest patch sets)
|
|
||||||
# If all these tests pass, start Root Patcher
|
|
||||||
# Copyright (C) 2022, Mykola Grymalyuk
|
# Copyright (C) 2022, Mykola Grymalyuk
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
import plistlib
|
import plistlib
|
||||||
import subprocess
|
import subprocess
|
||||||
import webbrowser
|
import webbrowser
|
||||||
from resources import utilities, updates, global_settings
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from resources import utilities, updates, global_settings, network_handler, constants
|
||||||
from resources.sys_patch import sys_patch_detect
|
from resources.sys_patch import sys_patch_detect
|
||||||
from resources.gui import gui_main
|
from resources.gui import gui_main
|
||||||
|
|
||||||
class AutomaticSysPatch:
|
|
||||||
|
|
||||||
def __init__(self, constants):
|
class AutomaticSysPatch:
|
||||||
self.constants = constants
|
"""
|
||||||
|
Library of functions for launch agent, including automatic patching
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, global_constants: constants.Constants):
|
||||||
|
self.constants: constants.Constants = global_constants
|
||||||
|
|
||||||
|
|
||||||
def start_auto_patch(self):
|
def start_auto_patch(self):
|
||||||
print("- Starting Automatic Patching")
|
"""
|
||||||
|
Initiates automatic patching
|
||||||
|
|
||||||
|
Auto Patching's main purpose is to try and tell the user they're missing root patches
|
||||||
|
New users may not realize OS updates remove our patches, so we try and run when nessasary
|
||||||
|
|
||||||
|
Conditions for running:
|
||||||
|
- Verify running GUI (TUI users can write their own scripts)
|
||||||
|
- Verify the Snapshot Seal is intact (if not, assume user is running patches)
|
||||||
|
- Verify this model needs patching (if not, assume user upgraded hardware and OCLP was not removed)
|
||||||
|
- Verify there are no updates for OCLP (ensure we have the latest patch sets)
|
||||||
|
|
||||||
|
If all these tests pass, start Root Patcher
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
logging.info("- Starting Automatic Patching")
|
||||||
if self.constants.wxpython_variant is False:
|
if self.constants.wxpython_variant is False:
|
||||||
print("- Auto Patch option is not supported on TUI, please use GUI")
|
logging.info("- Auto Patch option is not supported on TUI, please use GUI")
|
||||||
return
|
return
|
||||||
|
|
||||||
if utilities.check_seal() is True:
|
if utilities.check_seal() is True:
|
||||||
print("- Detected Snapshot seal intact, detecting patches")
|
logging.info("- Detected Snapshot seal intact, detecting patches")
|
||||||
patches = sys_patch_detect.detect_root_patch(self.constants.computer.real_model, self.constants).detect_patch_set()
|
patches = sys_patch_detect.DetectRootPatch(self.constants.computer.real_model, self.constants).detect_patch_set()
|
||||||
if not any(not patch.startswith("Settings") and not patch.startswith("Validation") and patches[patch] is True for patch in patches):
|
if not any(not patch.startswith("Settings") and not patch.startswith("Validation") and patches[patch] is True for patch in patches):
|
||||||
patches = []
|
patches = []
|
||||||
if patches:
|
if patches:
|
||||||
print("- Detected applicable patches, determining whether possible to patch")
|
logging.info("- Detected applicable patches, determining whether possible to patch")
|
||||||
if patches["Validation: Patching Possible"] is False:
|
if patches["Validation: Patching Possible"] is False:
|
||||||
print("- Cannot run patching")
|
logging.info("- Cannot run patching")
|
||||||
return
|
return
|
||||||
|
|
||||||
print("- Determined patching is possible, checking for OCLP updates")
|
logging.info("- Determined patching is possible, checking for OCLP updates")
|
||||||
patch_string = ""
|
patch_string = ""
|
||||||
for patch in patches:
|
for patch in patches:
|
||||||
if patches[patch] is True and not patch.startswith("Settings") and not patch.startswith("Validation"):
|
if patches[patch] is True and not patch.startswith("Settings") and not patch.startswith("Validation"):
|
||||||
patch_string += f"- {patch}\n"
|
patch_string += f"- {patch}\n"
|
||||||
# Check for updates
|
# Check for updates
|
||||||
dict = updates.check_binary_updates(self.constants).check_binary_updates()
|
dict = updates.CheckBinaryUpdates(self.constants).check_binary_updates()
|
||||||
if not dict:
|
if not dict:
|
||||||
print("- No new binaries found on Github, proceeding with patching")
|
logging.info("- No new binaries found on Github, proceeding with patching")
|
||||||
if self.constants.launcher_script is None:
|
if self.constants.launcher_script is None:
|
||||||
args_string = f"'{self.constants.launcher_binary}' --gui_patch"
|
args_string = f"'{self.constants.launcher_binary}' --gui_patch"
|
||||||
else:
|
else:
|
||||||
args_string = f"{self.constants.launcher_binary} {self.constants.launcher_script} --gui_patch"
|
args_string = f"{self.constants.launcher_binary} {self.constants.launcher_script} --gui_patch"
|
||||||
|
|
||||||
warning_str = ""
|
warning_str = ""
|
||||||
if utilities.verify_network_connection("https://api.github.com/repos/dortania/OpenCore-Legacy-Patcher/releases/latest") is False:
|
if network_handler.NetworkUtilities("https://api.github.com/repos/dortania/OpenCore-Legacy-Patcher/releases/latest").verify_network_connection() is False:
|
||||||
warning_str = f"""\n\nWARNING: We're unable to verify whether there are any new releases of OpenCore Legacy Patcher on Github. Be aware that you may be using an outdated version for this OS. If you're unsure, verify on Github that OpenCore Legacy Patcher {self.constants.patcher_version} is the latest official release"""
|
warning_str = f"""\n\nWARNING: We're unable to verify whether there are any new releases of OpenCore Legacy Patcher on Github. Be aware that you may be using an outdated version for this OS. If you're unsure, verify on Github that OpenCore Legacy Patcher {self.constants.patcher_version} is the latest official release"""
|
||||||
|
|
||||||
args = [
|
args = [
|
||||||
@@ -87,7 +101,7 @@ class AutomaticSysPatch:
|
|||||||
for key in dict:
|
for key in dict:
|
||||||
version = dict[key]["Version"]
|
version = dict[key]["Version"]
|
||||||
github_link = dict[key]["Github Link"]
|
github_link = dict[key]["Github Link"]
|
||||||
print(f"- Found new version: {version}")
|
logging.info(f"- Found new version: {version}")
|
||||||
|
|
||||||
# launch osascript to ask user if they want to apply the update
|
# launch osascript to ask user if they want to apply the update
|
||||||
# if yes, open the link in the default browser
|
# if yes, open the link in the default browser
|
||||||
@@ -108,30 +122,39 @@ class AutomaticSysPatch:
|
|||||||
|
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
print("- No patches detected")
|
logging.info("- No patches detected")
|
||||||
else:
|
else:
|
||||||
print("- Detected Snapshot seal not intact, skipping")
|
logging.info("- Detected Snapshot seal not intact, skipping")
|
||||||
|
|
||||||
if self.determine_if_versions_match() is False:
|
if self._determine_if_versions_match():
|
||||||
self.determine_if_boot_matches()
|
self._determine_if_boot_matches()
|
||||||
|
|
||||||
|
|
||||||
def determine_if_versions_match(self):
|
def _determine_if_versions_match(self):
|
||||||
print("- Checking booted vs installed OCLP Build")
|
"""
|
||||||
|
Determine if the booted version of OCLP matches the installed version
|
||||||
|
|
||||||
|
ie. Installed app is 0.2.0, but EFI version is 0.1.0
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if versions match, False if not
|
||||||
|
"""
|
||||||
|
|
||||||
|
logging.info("- Checking booted vs installed OCLP Build")
|
||||||
if self.constants.computer.oclp_version is None:
|
if self.constants.computer.oclp_version is None:
|
||||||
print("- Booted version not found")
|
logging.info("- Booted version not found")
|
||||||
return False
|
return True
|
||||||
|
|
||||||
if self.constants.computer.oclp_version == self.constants.patcher_version:
|
if self.constants.computer.oclp_version == self.constants.patcher_version:
|
||||||
print("- Versions match")
|
logging.info("- Versions match")
|
||||||
return False
|
return True
|
||||||
|
|
||||||
# Check if installed version is newer than booted version
|
# Check if installed version is newer than booted version
|
||||||
if updates.check_binary_updates(self.constants).check_if_build_newer(
|
if updates.CheckBinaryUpdates(self.constants)._check_if_build_newer(
|
||||||
self.constants.computer.oclp_version.split("."), self.constants.patcher_version.split(".")
|
self.constants.computer.oclp_version.split("."), self.constants.patcher_version.split(".")
|
||||||
) is True:
|
) is True:
|
||||||
print("- Installed version is newer than booted version")
|
logging.info("- Installed version is newer than booted version")
|
||||||
return False
|
return True
|
||||||
|
|
||||||
args = [
|
args = [
|
||||||
"osascript",
|
"osascript",
|
||||||
@@ -145,45 +168,52 @@ class AutomaticSysPatch:
|
|||||||
stderr=subprocess.STDOUT
|
stderr=subprocess.STDOUT
|
||||||
)
|
)
|
||||||
if output.returncode == 0:
|
if output.returncode == 0:
|
||||||
print("- Launching GUI's Build/Install menu")
|
logging.info("- Launching GUI's Build/Install menu")
|
||||||
self.constants.start_build_install = True
|
self.constants.start_build_install = True
|
||||||
gui_main.wx_python_gui(self.constants).main_menu(None)
|
gui_main.wx_python_gui(self.constants).main_menu(None)
|
||||||
|
|
||||||
return True
|
return False
|
||||||
|
|
||||||
def determine_if_boot_matches(self):
|
|
||||||
# Goal of this function is to determine whether the user
|
|
||||||
# is using a USB drive to Boot OpenCore but macOS does not
|
|
||||||
# reside on the same drive as the USB.
|
|
||||||
|
|
||||||
# If we determine them to be mismatched, notify the user
|
def _determine_if_boot_matches(self):
|
||||||
# and ask if they want to install to install to disk
|
"""
|
||||||
|
Determine if the boot drive matches the macOS drive
|
||||||
|
ie. Booted from USB, but macOS is on internal disk
|
||||||
|
|
||||||
print("- Determining if macOS drive matches boot drive")
|
Goal of this function is to determine whether the user
|
||||||
should_notify = global_settings.global_settings().read_property("AutoPatch_Notify_Mismatched_Disks")
|
is using a USB drive to Boot OpenCore but macOS does not
|
||||||
|
reside on the same drive as the USB.
|
||||||
|
|
||||||
|
If we determine them to be mismatched, notify the user
|
||||||
|
and ask if they want to install to install to disk.
|
||||||
|
"""
|
||||||
|
|
||||||
|
logging.info("- Determining if macOS drive matches boot drive")
|
||||||
|
|
||||||
|
should_notify = global_settings.GlobalEnviromentSettings().read_property("AutoPatch_Notify_Mismatched_Disks")
|
||||||
if should_notify is False:
|
if should_notify is False:
|
||||||
print("- Skipping due to user preference")
|
logging.info("- Skipping due to user preference")
|
||||||
return
|
return
|
||||||
if self.constants.host_is_hackintosh is True:
|
if self.constants.host_is_hackintosh is True:
|
||||||
print("- Skipping due to hackintosh")
|
logging.info("- Skipping due to hackintosh")
|
||||||
return
|
return
|
||||||
if not self.constants.booted_oc_disk:
|
if not self.constants.booted_oc_disk:
|
||||||
print("- Failed to find disk OpenCore launched from")
|
logging.info("- Failed to find disk OpenCore launched from")
|
||||||
return
|
return
|
||||||
|
|
||||||
root_disk = self.constants.booted_oc_disk.strip("disk")
|
root_disk = self.constants.booted_oc_disk.strip("disk")
|
||||||
root_disk = "disk" + root_disk.split("s")[0]
|
root_disk = "disk" + root_disk.split("s")[0]
|
||||||
|
|
||||||
print(f" - Boot Drive: {self.constants.booted_oc_disk} ({root_disk})")
|
logging.info(f" - Boot Drive: {self.constants.booted_oc_disk} ({root_disk})")
|
||||||
macOS_disk = utilities.get_disk_path()
|
macOS_disk = utilities.get_disk_path()
|
||||||
print(f" - macOS Drive: {macOS_disk}")
|
logging.info(f" - macOS Drive: {macOS_disk}")
|
||||||
physical_stores = utilities.find_apfs_physical_volume(macOS_disk)
|
physical_stores = utilities.find_apfs_physical_volume(macOS_disk)
|
||||||
print(f" - APFS Physical Stores: {physical_stores}")
|
logging.info(f" - APFS Physical Stores: {physical_stores}")
|
||||||
|
|
||||||
disk_match = False
|
disk_match = False
|
||||||
for disk in physical_stores:
|
for disk in physical_stores:
|
||||||
if root_disk in disk:
|
if root_disk in disk:
|
||||||
print(f"- Boot drive matches macOS drive ({disk})")
|
logging.info(f"- Boot drive matches macOS drive ({disk})")
|
||||||
disk_match = True
|
disk_match = True
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -191,15 +221,15 @@ class AutomaticSysPatch:
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Check if OpenCore is on a USB drive
|
# Check if OpenCore is on a USB drive
|
||||||
print("- Boot Drive does not match macOS drive, checking if OpenCore is on a USB drive")
|
logging.info("- Boot Drive does not match macOS drive, checking if OpenCore is on a USB drive")
|
||||||
|
|
||||||
disk_info = plistlib.loads(subprocess.run(["diskutil", "info", "-plist", root_disk], stdout=subprocess.PIPE).stdout)
|
disk_info = plistlib.loads(subprocess.run(["diskutil", "info", "-plist", root_disk], stdout=subprocess.PIPE).stdout)
|
||||||
try:
|
try:
|
||||||
if disk_info["Ejectable"] is False:
|
if disk_info["Ejectable"] is False:
|
||||||
print("- Boot Disk is not removable, skipping prompt")
|
logging.info("- Boot Disk is not removable, skipping prompt")
|
||||||
return
|
return
|
||||||
|
|
||||||
print("- Boot Disk is ejectable, prompting user to install to internal")
|
logging.info("- Boot Disk is ejectable, prompting user to install to internal")
|
||||||
|
|
||||||
args = [
|
args = [
|
||||||
"osascript",
|
"osascript",
|
||||||
@@ -213,65 +243,72 @@ class AutomaticSysPatch:
|
|||||||
stderr=subprocess.STDOUT
|
stderr=subprocess.STDOUT
|
||||||
)
|
)
|
||||||
if output.returncode == 0:
|
if output.returncode == 0:
|
||||||
print("- Launching GUI's Build/Install menu")
|
logging.info("- Launching GUI's Build/Install menu")
|
||||||
self.constants.start_build_install = True
|
self.constants.start_build_install = True
|
||||||
gui_main.wx_python_gui(self.constants).main_menu(None)
|
gui_main.wx_python_gui(self.constants).main_menu(None)
|
||||||
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print("- Unable to determine if boot disk is removable, skipping prompt")
|
logging.info("- Unable to determine if boot disk is removable, skipping prompt")
|
||||||
|
|
||||||
|
|
||||||
def install_auto_patcher_launch_agent(self):
|
def install_auto_patcher_launch_agent(self):
|
||||||
# Installs the following:
|
"""
|
||||||
# - OpenCore-Patcher.app in /Library/Application Support/Dortania/
|
Install the Auto Patcher Launch Agent
|
||||||
# - com.dortania.opencore-legacy-patcher.auto-patch.plist in /Library/LaunchAgents/
|
|
||||||
|
Installs the following:
|
||||||
|
- OpenCore-Patcher.app in /Library/Application Support/Dortania/
|
||||||
|
- com.dortania.opencore-legacy-patcher.auto-patch.plist in /Library/LaunchAgents/
|
||||||
|
|
||||||
|
See start_auto_patch() comments for more info
|
||||||
|
"""
|
||||||
|
|
||||||
if self.constants.launcher_script is not None:
|
if self.constants.launcher_script is not None:
|
||||||
print("- Skipping Auto Patcher Launch Agent, not supported when running from source")
|
logging.info("- Skipping Auto Patcher Launch Agent, not supported when running from source")
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.constants.launcher_binary.startswith("/Library/Application Support/Dortania/"):
|
if self.constants.launcher_binary.startswith("/Library/Application Support/Dortania/"):
|
||||||
print("- Skipping Auto Patcher Launch Agent, already installed")
|
logging.info("- Skipping Auto Patcher Launch Agent, already installed")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Verify our binary isn't located in '/Library/Application Support/Dortania/'
|
# Verify our binary isn't located in '/Library/Application Support/Dortania/'
|
||||||
# As we'd simply be duplicating ourselves
|
# As we'd simply be duplicating ourselves
|
||||||
print("- Installing Auto Patcher Launch Agent")
|
logging.info("- Installing Auto Patcher Launch Agent")
|
||||||
|
|
||||||
if not Path("Library/Application Support/Dortania").exists():
|
if not Path("Library/Application Support/Dortania").exists():
|
||||||
print("- Creating /Library/Application Support/Dortania/")
|
logging.info("- Creating /Library/Application Support/Dortania/")
|
||||||
utilities.process_status(utilities.elevated(["mkdir", "-p", "/Library/Application Support/Dortania"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["mkdir", "-p", "/Library/Application Support/Dortania"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
|
|
||||||
print("- Copying OpenCore Patcher to /Library/Application Support/Dortania/")
|
logging.info("- Copying OpenCore Patcher to /Library/Application Support/Dortania/")
|
||||||
if Path("/Library/Application Support/Dortania/OpenCore-Patcher.app").exists():
|
if Path("/Library/Application Support/Dortania/OpenCore-Patcher.app").exists():
|
||||||
print("- Deleting existing OpenCore-Patcher")
|
logging.info("- Deleting existing OpenCore-Patcher")
|
||||||
utilities.process_status(utilities.elevated(["rm", "-R", "/Library/Application Support/Dortania/OpenCore-Patcher.app"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["rm", "-R", "/Library/Application Support/Dortania/OpenCore-Patcher.app"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
|
|
||||||
# Strip everything after OpenCore-Patcher.app
|
# Strip everything after OpenCore-Patcher.app
|
||||||
path = str(self.constants.launcher_binary).split("/Contents/MacOS/OpenCore-Patcher")[0]
|
path = str(self.constants.launcher_binary).split("/Contents/MacOS/OpenCore-Patcher")[0]
|
||||||
print(f"- Copying {path} to /Library/Application Support/Dortania/")
|
logging.info(f"- Copying {path} to /Library/Application Support/Dortania/")
|
||||||
utilities.process_status(utilities.elevated(["ditto", path, "/Library/Application Support/Dortania/OpenCore-Patcher.app"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["ditto", path, "/Library/Application Support/Dortania/OpenCore-Patcher.app"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
|
|
||||||
if not Path("/Library/Application Support/Dortania/OpenCore-Patcher.app").exists():
|
if not Path("/Library/Application Support/Dortania/OpenCore-Patcher.app").exists():
|
||||||
# Sometimes the binary the user launches may have a suffix (ie. OpenCore-Patcher 3.app)
|
# Sometimes the binary the user launches may have a suffix (ie. OpenCore-Patcher 3.app)
|
||||||
# We'll want to rename it to OpenCore-Patcher.app
|
# We'll want to rename it to OpenCore-Patcher.app
|
||||||
path = path.split("/")[-1]
|
path = path.split("/")[-1]
|
||||||
print(f"- Renaming {path} to OpenCore-Patcher.app")
|
logging.info(f"- Renaming {path} to OpenCore-Patcher.app")
|
||||||
utilities.process_status(utilities.elevated(["mv", f"/Library/Application Support/Dortania/{path}", "/Library/Application Support/Dortania/OpenCore-Patcher.app"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["mv", f"/Library/Application Support/Dortania/{path}", "/Library/Application Support/Dortania/OpenCore-Patcher.app"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
|
|
||||||
subprocess.run(["xattr", "-cr", "/Library/Application Support/Dortania/OpenCore-Patcher.app"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
subprocess.run(["xattr", "-cr", "/Library/Application Support/Dortania/OpenCore-Patcher.app"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
|
||||||
# Copy over our launch agent
|
# Copy over our launch agent
|
||||||
print("- Copying auto-patch.plist Launch Agent to /Library/LaunchAgents/")
|
logging.info("- Copying auto-patch.plist Launch Agent to /Library/LaunchAgents/")
|
||||||
if Path("/Library/LaunchAgents/com.dortania.opencore-legacy-patcher.auto-patch.plist").exists():
|
if Path("/Library/LaunchAgents/com.dortania.opencore-legacy-patcher.auto-patch.plist").exists():
|
||||||
print("- Deleting existing auto-patch.plist")
|
logging.info("- Deleting existing auto-patch.plist")
|
||||||
utilities.process_status(utilities.elevated(["rm", "/Library/LaunchAgents/com.dortania.opencore-legacy-patcher.auto-patch.plist"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["rm", "/Library/LaunchAgents/com.dortania.opencore-legacy-patcher.auto-patch.plist"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
if not Path("/Library/LaunchAgents/").exists():
|
if not Path("/Library/LaunchAgents/").exists():
|
||||||
print("- Creating /Library/LaunchAgents/")
|
logging.info("- Creating /Library/LaunchAgents/")
|
||||||
utilities.process_status(utilities.elevated(["mkdir", "-p", "/Library/LaunchAgents/"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["mkdir", "-p", "/Library/LaunchAgents/"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
utilities.process_status(utilities.elevated(["cp", self.constants.auto_patch_launch_agent_path, "/Library/LaunchAgents/"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["cp", self.constants.auto_patch_launch_agent_path, "/Library/LaunchAgents/"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
|
|
||||||
# Set the permissions on the com.dortania.opencore-legacy-patcher.auto-patch.plist
|
# Set the permissions on the com.dortania.opencore-legacy-patcher.auto-patch.plist
|
||||||
print("- Setting permissions on auto-patch.plist")
|
logging.info("- Setting permissions on auto-patch.plist")
|
||||||
utilities.process_status(utilities.elevated(["chmod", "644", "/Library/LaunchAgents/com.dortania.opencore-legacy-patcher.auto-patch.plist"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["chmod", "644", "/Library/LaunchAgents/com.dortania.opencore-legacy-patcher.auto-patch.plist"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
utilities.process_status(utilities.elevated(["chown", "root:wheel", "/Library/LaunchAgents/com.dortania.opencore-legacy-patcher.auto-patch.plist"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["chown", "root:wheel", "/Library/LaunchAgents/com.dortania.opencore-legacy-patcher.auto-patch.plist"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
|
|
||||||
@@ -279,5 +316,5 @@ class AutomaticSysPatch:
|
|||||||
# Simply an easy way for users to notice the app
|
# Simply an easy way for users to notice the app
|
||||||
# If there's already an alias or exiting app, skip
|
# If there's already an alias or exiting app, skip
|
||||||
if not Path("/Applications/OpenCore-Patcher.app").exists():
|
if not Path("/Applications/OpenCore-Patcher.app").exists():
|
||||||
print("- Making app alias")
|
logging.info("- Making app alias")
|
||||||
utilities.process_status(utilities.elevated(["ln", "-s", "/Library/Application Support/Dortania/OpenCore-Patcher.app", "/Applications/OpenCore-Patcher.app"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
utilities.process_status(utilities.elevated(["ln", "-s", "/Library/Application Support/Dortania/OpenCore-Patcher.app", "/Applications/OpenCore-Patcher.app"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
||||||
@@ -3,18 +3,39 @@
|
|||||||
# Used when supplying data to sys_patch.py
|
# Used when supplying data to sys_patch.py
|
||||||
# Copyright (C) 2020-2022, Dhinak G, Mykola Grymalyuk
|
# Copyright (C) 2020-2022, Dhinak G, Mykola Grymalyuk
|
||||||
|
|
||||||
from resources import constants, device_probe, utilities, amfi_detect
|
import plistlib
|
||||||
from resources.sys_patch import sys_patch_helpers
|
import logging
|
||||||
from data import model_array, os_data, sip_data, sys_patch_dict, smbios_data, cpu_data
|
|
||||||
|
|
||||||
import py_sip_xnu
|
import py_sip_xnu
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import plistlib
|
|
||||||
|
|
||||||
class detect_root_patch:
|
from resources import (
|
||||||
def __init__(self, model, versions):
|
constants,
|
||||||
self.model = model
|
device_probe,
|
||||||
self.constants: constants.Constants() = versions
|
utilities,
|
||||||
|
amfi_detect,
|
||||||
|
network_handler,
|
||||||
|
kdk_handler
|
||||||
|
)
|
||||||
|
from data import (
|
||||||
|
model_array,
|
||||||
|
os_data,
|
||||||
|
sip_data,
|
||||||
|
sys_patch_dict,
|
||||||
|
smbios_data,
|
||||||
|
cpu_data
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DetectRootPatch:
|
||||||
|
"""
|
||||||
|
Library for querying root volume patches applicable for booted system
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, model: str, global_constants: constants.Constants):
|
||||||
|
self.model: str = model
|
||||||
|
|
||||||
|
self.constants: constants.Constants = global_constants
|
||||||
|
|
||||||
self.computer = self.constants.computer
|
self.computer = self.constants.computer
|
||||||
|
|
||||||
# GPU Patch Detection
|
# GPU Patch Detection
|
||||||
@@ -62,19 +83,24 @@ class detect_root_patch:
|
|||||||
self.missing_nv_web_opengl = False
|
self.missing_nv_web_opengl = False
|
||||||
self.missing_nv_compat = False
|
self.missing_nv_compat = False
|
||||||
|
|
||||||
def detect_gpus(self):
|
|
||||||
|
def _detect_gpus(self):
|
||||||
|
"""
|
||||||
|
Query GPUs and set flags for applicable patches
|
||||||
|
"""
|
||||||
|
|
||||||
gpus = self.constants.computer.gpus
|
gpus = self.constants.computer.gpus
|
||||||
non_metal_os = os_data.os_data.catalina
|
non_metal_os = os_data.os_data.catalina
|
||||||
for i, gpu in enumerate(gpus):
|
for i, gpu in enumerate(gpus):
|
||||||
if gpu.class_code and gpu.class_code != 0xFFFFFFFF:
|
if gpu.class_code and gpu.class_code != 0xFFFFFFFF:
|
||||||
print(f"- Found GPU ({i}): {utilities.friendly_hex(gpu.vendor_id)}:{utilities.friendly_hex(gpu.device_id)}")
|
logging.info(f"- Found GPU ({i}): {utilities.friendly_hex(gpu.vendor_id)}:{utilities.friendly_hex(gpu.device_id)}")
|
||||||
if gpu.arch in [device_probe.NVIDIA.Archs.Tesla] and self.constants.force_nv_web is False:
|
if gpu.arch in [device_probe.NVIDIA.Archs.Tesla] and self.constants.force_nv_web is False:
|
||||||
if self.constants.detected_os > non_metal_os:
|
if self.constants.detected_os > non_metal_os:
|
||||||
self.nvidia_tesla = True
|
self.nvidia_tesla = True
|
||||||
self.amfi_must_disable = True
|
self.amfi_must_disable = True
|
||||||
if os_data.os_data.ventura in self.constants.legacy_accel_support:
|
if os_data.os_data.ventura in self.constants.legacy_accel_support:
|
||||||
self.amfi_shim_bins = True
|
self.amfi_shim_bins = True
|
||||||
self.legacy_keyboard_backlight = self.check_legacy_keyboard_backlight()
|
self.legacy_keyboard_backlight = self._check_legacy_keyboard_backlight()
|
||||||
self.requires_root_kc = True
|
self.requires_root_kc = True
|
||||||
elif gpu.arch == device_probe.NVIDIA.Archs.Kepler and self.constants.force_nv_web is False:
|
elif gpu.arch == device_probe.NVIDIA.Archs.Kepler and self.constants.force_nv_web is False:
|
||||||
if self.constants.detected_os > os_data.os_data.big_sur:
|
if self.constants.detected_os > os_data.os_data.big_sur:
|
||||||
@@ -94,6 +120,8 @@ class detect_root_patch:
|
|||||||
self.supports_metal = True
|
self.supports_metal = True
|
||||||
if self.constants.detected_os >= os_data.os_data.ventura:
|
if self.constants.detected_os >= os_data.os_data.ventura:
|
||||||
self.amfi_must_disable = True
|
self.amfi_must_disable = True
|
||||||
|
if (self.constants.detected_os == os_data.os_data.ventura and self.constants.detected_os_minor >= 4) or self.constants.detected_os > os_data.os_data.ventura:
|
||||||
|
self.amfi_shim_bins = True
|
||||||
elif gpu.arch in [
|
elif gpu.arch in [
|
||||||
device_probe.NVIDIA.Archs.Fermi,
|
device_probe.NVIDIA.Archs.Fermi,
|
||||||
device_probe.NVIDIA.Archs.Kepler,
|
device_probe.NVIDIA.Archs.Kepler,
|
||||||
@@ -164,7 +192,7 @@ class detect_root_patch:
|
|||||||
self.amfi_must_disable = True
|
self.amfi_must_disable = True
|
||||||
if os_data.os_data.ventura in self.constants.legacy_accel_support:
|
if os_data.os_data.ventura in self.constants.legacy_accel_support:
|
||||||
self.amfi_shim_bins = True
|
self.amfi_shim_bins = True
|
||||||
self.legacy_keyboard_backlight = self.check_legacy_keyboard_backlight()
|
self.legacy_keyboard_backlight = self._check_legacy_keyboard_backlight()
|
||||||
self.requires_root_kc = True
|
self.requires_root_kc = True
|
||||||
elif gpu.arch == device_probe.Intel.Archs.Sandy_Bridge:
|
elif gpu.arch == device_probe.Intel.Archs.Sandy_Bridge:
|
||||||
if self.constants.detected_os > non_metal_os:
|
if self.constants.detected_os > non_metal_os:
|
||||||
@@ -172,18 +200,22 @@ class detect_root_patch:
|
|||||||
self.amfi_must_disable = True
|
self.amfi_must_disable = True
|
||||||
if os_data.os_data.ventura in self.constants.legacy_accel_support:
|
if os_data.os_data.ventura in self.constants.legacy_accel_support:
|
||||||
self.amfi_shim_bins = True
|
self.amfi_shim_bins = True
|
||||||
self.legacy_keyboard_backlight = self.check_legacy_keyboard_backlight()
|
self.legacy_keyboard_backlight = self._check_legacy_keyboard_backlight()
|
||||||
self.requires_root_kc = True
|
self.requires_root_kc = True
|
||||||
elif gpu.arch == device_probe.Intel.Archs.Ivy_Bridge:
|
elif gpu.arch == device_probe.Intel.Archs.Ivy_Bridge:
|
||||||
if self.constants.detected_os > os_data.os_data.big_sur:
|
if self.constants.detected_os > os_data.os_data.big_sur:
|
||||||
self.ivy_gpu = True
|
self.ivy_gpu = True
|
||||||
if self.constants.detected_os >= os_data.os_data.ventura:
|
if self.constants.detected_os >= os_data.os_data.ventura:
|
||||||
self.amfi_must_disable = True
|
self.amfi_must_disable = True
|
||||||
|
if (self.constants.detected_os == os_data.os_data.ventura and self.constants.detected_os_minor >= 4) or self.constants.detected_os > os_data.os_data.ventura:
|
||||||
|
self.amfi_shim_bins = True
|
||||||
self.supports_metal = True
|
self.supports_metal = True
|
||||||
elif gpu.arch == device_probe.Intel.Archs.Haswell:
|
elif gpu.arch == device_probe.Intel.Archs.Haswell:
|
||||||
if self.constants.detected_os > os_data.os_data.monterey:
|
if self.constants.detected_os > os_data.os_data.monterey:
|
||||||
self.haswell_gpu = True
|
self.haswell_gpu = True
|
||||||
self.amfi_must_disable = True
|
self.amfi_must_disable = True
|
||||||
|
if (self.constants.detected_os == os_data.os_data.ventura and self.constants.detected_os_minor >= 4) or self.constants.detected_os > os_data.os_data.ventura:
|
||||||
|
self.amfi_shim_bins = True
|
||||||
self.supports_metal = True
|
self.supports_metal = True
|
||||||
elif gpu.arch == device_probe.Intel.Archs.Broadwell:
|
elif gpu.arch == device_probe.Intel.Archs.Broadwell:
|
||||||
if self.constants.detected_os > os_data.os_data.monterey:
|
if self.constants.detected_os > os_data.os_data.monterey:
|
||||||
@@ -219,17 +251,21 @@ class detect_root_patch:
|
|||||||
self.requires_root_kc = True
|
self.requires_root_kc = True
|
||||||
else:
|
else:
|
||||||
if self.requires_root_kc is True:
|
if self.requires_root_kc is True:
|
||||||
self.missing_kdk = not self.check_kdk()
|
self.missing_kdk = not self._check_kdk()
|
||||||
|
|
||||||
self.check_networking_support()
|
self._check_networking_support()
|
||||||
|
|
||||||
|
|
||||||
def check_networking_support(self):
|
def _check_networking_support(self):
|
||||||
# On macOS Ventura, networking support is required to download KDKs.
|
"""
|
||||||
# However for machines such as BCM94322, BCM94328 and Atheros chipsets,
|
Query for network requirement, ex. KDK downloading
|
||||||
# users may only have wifi as their only supported network interface.
|
|
||||||
# Thus we'll allow for KDK-less installs for these machines on first run.
|
On macOS Ventura, networking support is required to download KDKs.
|
||||||
# On subsequent runs, we'll require networking to be enabled.
|
However for machines such as BCM94322, BCM94328 and Atheros chipsets,
|
||||||
|
users may only have wifi as their only supported network interface.
|
||||||
|
Thus we'll allow for KDK-less installs for these machines on first run.
|
||||||
|
On subsequent runs, we'll require networking to be enabled.
|
||||||
|
"""
|
||||||
|
|
||||||
if self.constants.detected_os < os_data.os_data.ventura:
|
if self.constants.detected_os < os_data.os_data.ventura:
|
||||||
return
|
return
|
||||||
@@ -271,7 +307,11 @@ class detect_root_patch:
|
|||||||
self.legacy_keyboard_backlight = False
|
self.legacy_keyboard_backlight = False
|
||||||
|
|
||||||
|
|
||||||
def check_dgpu_status(self):
|
def _check_dgpu_status(self):
|
||||||
|
"""
|
||||||
|
Query whether system has an active dGPU
|
||||||
|
"""
|
||||||
|
|
||||||
dgpu = self.constants.computer.dgpu
|
dgpu = self.constants.computer.dgpu
|
||||||
if dgpu:
|
if dgpu:
|
||||||
if dgpu.class_code and dgpu.class_code == 0xFFFFFFFF:
|
if dgpu.class_code and dgpu.class_code == 0xFFFFFFFF:
|
||||||
@@ -280,25 +320,45 @@ class detect_root_patch:
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def detect_demux(self):
|
|
||||||
|
def _detect_demux(self):
|
||||||
|
"""
|
||||||
|
Query whether system has been demuxed (ex. MacBookPro8,2, disabled dGPU)
|
||||||
|
"""
|
||||||
|
|
||||||
# If GFX0 is missing, assume machine was demuxed
|
# If GFX0 is missing, assume machine was demuxed
|
||||||
# -wegnoegpu would also trigger this, so ensure arg is not present
|
# -wegnoegpu would also trigger this, so ensure arg is not present
|
||||||
if not "-wegnoegpu" in (utilities.get_nvram("boot-args", decode=True) or ""):
|
if not "-wegnoegpu" in (utilities.get_nvram("boot-args", decode=True) or ""):
|
||||||
igpu = self.constants.computer.igpu
|
igpu = self.constants.computer.igpu
|
||||||
dgpu = self.check_dgpu_status()
|
dgpu = self._check_dgpu_status()
|
||||||
if igpu and not dgpu:
|
if igpu and not dgpu:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def check_legacy_keyboard_backlight(self):
|
|
||||||
|
def _check_legacy_keyboard_backlight(self):
|
||||||
|
"""
|
||||||
|
Query whether system has a legacy keyboard backlight
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if legacy keyboard backlight, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
# iMac12,x+ have an 'ACPI0008' device, but it's not a keyboard backlight
|
# iMac12,x+ have an 'ACPI0008' device, but it's not a keyboard backlight
|
||||||
# Best to assume laptops will have a keyboard backlight
|
# Best to assume laptops will have a keyboard backlight
|
||||||
if self.model.startswith("MacBook"):
|
if self.model.startswith("MacBook"):
|
||||||
return self.constants.computer.ambient_light_sensor
|
return self.constants.computer.ambient_light_sensor
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def check_nv_web_nvram(self):
|
|
||||||
# First check boot-args, then dedicated nvram variable
|
def _check_nv_web_nvram(self):
|
||||||
|
"""
|
||||||
|
Query for Nvidia Web Driver property: nvda_drv_vrl or nvda_drv
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if property is present, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
nv_on = utilities.get_nvram("boot-args", decode=True)
|
nv_on = utilities.get_nvram("boot-args", decode=True)
|
||||||
if nv_on:
|
if nv_on:
|
||||||
if "nvda_drv_vrl=" in nv_on:
|
if "nvda_drv_vrl=" in nv_on:
|
||||||
@@ -308,8 +368,17 @@ class detect_root_patch:
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def check_nv_web_opengl(self):
|
|
||||||
# First check boot-args, then whether property exists on GPU
|
def _check_nv_web_opengl(self):
|
||||||
|
"""
|
||||||
|
Query for Nvidia Web Driver property: ngfxgl
|
||||||
|
|
||||||
|
Verify Web Drivers will run in OpenGL mode
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if property is present, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
nv_on = utilities.get_nvram("boot-args", decode=True)
|
nv_on = utilities.get_nvram("boot-args", decode=True)
|
||||||
if nv_on:
|
if nv_on:
|
||||||
if "ngfxgl=" in nv_on:
|
if "ngfxgl=" in nv_on:
|
||||||
@@ -320,8 +389,17 @@ class detect_root_patch:
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def check_nv_compat(self):
|
|
||||||
# Check for 'nv_web' in boot-args, then whether property exists on GPU
|
def _check_nv_compat(self):
|
||||||
|
"""
|
||||||
|
Query for Nvidia Web Driver property: ngfxcompat
|
||||||
|
|
||||||
|
Verify Web Drivers will skip NVDAStartupWeb compatibility check
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if property is present, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
nv_on = utilities.get_nvram("boot-args", decode=True)
|
nv_on = utilities.get_nvram("boot-args", decode=True)
|
||||||
if nv_on:
|
if nv_on:
|
||||||
if "ngfxcompat=" in nv_on:
|
if "ngfxcompat=" in nv_on:
|
||||||
@@ -332,15 +410,37 @@ class detect_root_patch:
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def check_whatevergreen(self):
|
|
||||||
|
def _check_whatevergreen(self):
|
||||||
|
"""
|
||||||
|
Query whether WhateverGreen.kext is loaded
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if loaded, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
return utilities.check_kext_loaded("WhateverGreen", self.constants.detected_os)
|
return utilities.check_kext_loaded("WhateverGreen", self.constants.detected_os)
|
||||||
|
|
||||||
def check_kdk(self):
|
|
||||||
if sys_patch_helpers.sys_patch_helpers(self.constants).determine_kdk_present() is None:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def check_sip(self):
|
def _check_kdk(self):
|
||||||
|
"""
|
||||||
|
Query whether Kernel Debug Kit is installed
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if installed, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
return kdk_handler.KernelDebugKitObject(self.constants, self.constants.detected_os_build, self.constants.detected_os_version, passive=True).kdk_already_installed
|
||||||
|
|
||||||
|
|
||||||
|
def _check_sip(self):
|
||||||
|
"""
|
||||||
|
Query System Integrity checks required for patching
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: (list, str, str) of SIP values, SIP hex, SIP error message
|
||||||
|
"""
|
||||||
|
|
||||||
if self.constants.detected_os > os_data.os_data.catalina:
|
if self.constants.detected_os > os_data.os_data.catalina:
|
||||||
if self.nvidia_web is True:
|
if self.nvidia_web is True:
|
||||||
sip = sip_data.system_integrity_protection.root_patch_sip_big_sur_3rd_part_kexts
|
sip = sip_data.system_integrity_protection.root_patch_sip_big_sur_3rd_part_kexts
|
||||||
@@ -366,7 +466,15 @@ class detect_root_patch:
|
|||||||
sip_value = f"For Hackintoshes, please set csr-active-config to '03060000' ({sip_hex})\nFor non-OpenCore Macs, please run 'csrutil disable' in RecoveryOS"
|
sip_value = f"For Hackintoshes, please set csr-active-config to '03060000' ({sip_hex})\nFor non-OpenCore Macs, please run 'csrutil disable' in RecoveryOS"
|
||||||
return (sip, sip_value, sip_hex)
|
return (sip, sip_value, sip_hex)
|
||||||
|
|
||||||
def check_uhci_ohci(self):
|
|
||||||
|
def _check_uhci_ohci(self):
|
||||||
|
"""
|
||||||
|
Query whether host has UHCI/OHCI controllers, and requires USB 1.1 patches
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if UHCI/OHCI patches required, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
if self.constants.detected_os < os_data.os_data.ventura:
|
if self.constants.detected_os < os_data.os_data.ventura:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -400,10 +508,19 @@ class detect_root_patch:
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def detect_patch_set(self):
|
|
||||||
self.has_network = utilities.verify_network_connection()
|
|
||||||
|
|
||||||
if self.check_uhci_ohci() is True:
|
# Entry point for patch set detection
|
||||||
|
def detect_patch_set(self):
|
||||||
|
"""
|
||||||
|
Query patch sets required for host
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Dictionary of patch sets
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.has_network = network_handler.NetworkUtilities().verify_network_connection()
|
||||||
|
|
||||||
|
if self._check_uhci_ohci() is True:
|
||||||
self.legacy_uhci_ohci = True
|
self.legacy_uhci_ohci = True
|
||||||
self.requires_root_kc = True
|
self.requires_root_kc = True
|
||||||
|
|
||||||
@@ -436,12 +553,12 @@ class detect_root_patch:
|
|||||||
if self.constants.detected_os > os_data.os_data.high_sierra:
|
if self.constants.detected_os > os_data.os_data.high_sierra:
|
||||||
if self.model in ["MacBookPro8,2", "MacBookPro8,3"]:
|
if self.model in ["MacBookPro8,2", "MacBookPro8,3"]:
|
||||||
# Ref: https://doslabelectronics.com/Demux.html
|
# Ref: https://doslabelectronics.com/Demux.html
|
||||||
if self.detect_demux() is True:
|
if self._detect_demux() is True:
|
||||||
self.legacy_gmux = True
|
self.legacy_gmux = True
|
||||||
else:
|
else:
|
||||||
self.legacy_gmux = True
|
self.legacy_gmux = True
|
||||||
|
|
||||||
self.detect_gpus()
|
self._detect_gpus()
|
||||||
|
|
||||||
self.root_patch_dict = {
|
self.root_patch_dict = {
|
||||||
"Graphics: Nvidia Tesla": self.nvidia_tesla,
|
"Graphics: Nvidia Tesla": self.nvidia_tesla,
|
||||||
@@ -468,10 +585,11 @@ class detect_root_patch:
|
|||||||
"Settings: Supports Auxiliary Cache": not self.requires_root_kc,
|
"Settings: Supports Auxiliary Cache": not self.requires_root_kc,
|
||||||
"Settings: Kernel Debug Kit missing": self.missing_kdk if self.constants.detected_os >= os_data.os_data.ventura.value else False,
|
"Settings: Kernel Debug Kit missing": self.missing_kdk if self.constants.detected_os >= os_data.os_data.ventura.value else False,
|
||||||
"Validation: Patching Possible": self.verify_patch_allowed(),
|
"Validation: Patching Possible": self.verify_patch_allowed(),
|
||||||
f"Validation: SIP is enabled (Required: {self.check_sip()[2]} or higher)": self.sip_enabled,
|
"Validation: Unpatching Possible": self._verify_unpatch_allowed(),
|
||||||
|
f"Validation: SIP is enabled (Required: {self._check_sip()[2]} or higher)": self.sip_enabled,
|
||||||
f"Validation: Currently Booted SIP: ({hex(py_sip_xnu.SipXnu().get_sip_status().value)})": self.sip_enabled,
|
f"Validation: Currently Booted SIP: ({hex(py_sip_xnu.SipXnu().get_sip_status().value)})": self.sip_enabled,
|
||||||
"Validation: SecureBootModel is enabled": self.sbm_enabled,
|
"Validation: SecureBootModel is enabled": self.sbm_enabled,
|
||||||
f"Validation: {'AMFI' if self.constants.host_is_hackintosh is True or self.get_amfi_level_needed() > 2 else 'Library Validation'} is enabled": self.amfi_enabled if self.amfi_must_disable is True else False,
|
f"Validation: {'AMFI' if self.constants.host_is_hackintosh is True or self._get_amfi_level_needed() > 2 else 'Library Validation'} is enabled": self.amfi_enabled if self.amfi_must_disable is True else False,
|
||||||
"Validation: FileVault is enabled": self.fv_enabled,
|
"Validation: FileVault is enabled": self.fv_enabled,
|
||||||
"Validation: System is dosdude1 patched": self.dosdude_patched,
|
"Validation: System is dosdude1 patched": self.dosdude_patched,
|
||||||
"Validation: WhateverGreen.kext missing": self.missing_whatever_green if self.nvidia_web is True else False,
|
"Validation: WhateverGreen.kext missing": self.missing_whatever_green if self.nvidia_web is True else False,
|
||||||
@@ -483,77 +601,100 @@ class detect_root_patch:
|
|||||||
|
|
||||||
return self.root_patch_dict
|
return self.root_patch_dict
|
||||||
|
|
||||||
def get_amfi_level_needed(self):
|
|
||||||
if self.amfi_must_disable is True:
|
|
||||||
if self.constants.detected_os > os_data.os_data.catalina:
|
|
||||||
if self.constants.detected_os >= os_data.os_data.ventura:
|
|
||||||
if self.amfi_shim_bins is True:
|
|
||||||
# Currently we require AMFI outright disabled
|
|
||||||
# in Ventura to work with shim'd binaries
|
|
||||||
return 3
|
|
||||||
return 1
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def verify_patch_allowed(self, print_errors=False):
|
def _get_amfi_level_needed(self):
|
||||||
sip_dict = self.check_sip()
|
"""
|
||||||
|
Query the AMFI level needed for the patcher to work
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: AMFI level needed
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.amfi_must_disable is False:
|
||||||
|
return amfi_detect.AmfiConfigDetectLevel.NO_CHECK
|
||||||
|
|
||||||
|
if self.constants.detected_os < os_data.os_data.big_sur:
|
||||||
|
return amfi_detect.AmfiConfigDetectLevel.NO_CHECK
|
||||||
|
|
||||||
|
if self.constants.detected_os >= os_data.os_data.ventura:
|
||||||
|
if self.amfi_shim_bins is True:
|
||||||
|
# Currently we require AMFI outright disabled
|
||||||
|
# in Ventura to work with shim'd binaries
|
||||||
|
return amfi_detect.AmfiConfigDetectLevel.ALLOW_ALL
|
||||||
|
|
||||||
|
return amfi_detect.AmfiConfigDetectLevel.LIBRARY_VALIDATION
|
||||||
|
|
||||||
|
|
||||||
|
def verify_patch_allowed(self, print_errors: bool = False):
|
||||||
|
"""
|
||||||
|
Validate that the patcher can be run
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
print_errors (bool): Print errors to console
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if patching is allowed, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
sip_dict = self._check_sip()
|
||||||
sip = sip_dict[0]
|
sip = sip_dict[0]
|
||||||
sip_value = sip_dict[1]
|
sip_value = sip_dict[1]
|
||||||
|
|
||||||
self.sip_enabled, self.sbm_enabled, self.fv_enabled, self.dosdude_patched = utilities.patching_status(sip, self.constants.detected_os)
|
self.sip_enabled, self.sbm_enabled, self.fv_enabled, self.dosdude_patched = utilities.patching_status(sip, self.constants.detected_os)
|
||||||
self.amfi_enabled = not amfi_detect.amfi_configuration_detection().check_config(self.get_amfi_level_needed())
|
self.amfi_enabled = not amfi_detect.AmfiConfigurationDetection().check_config(self._get_amfi_level_needed())
|
||||||
|
|
||||||
if self.nvidia_web is True:
|
if self.nvidia_web is True:
|
||||||
self.missing_nv_web_nvram = not self.check_nv_web_nvram()
|
self.missing_nv_web_nvram = not self._check_nv_web_nvram()
|
||||||
self.missing_nv_web_opengl = not self.check_nv_web_opengl()
|
self.missing_nv_web_opengl = not self._check_nv_web_opengl()
|
||||||
self.missing_nv_compat = not self.check_nv_compat()
|
self.missing_nv_compat = not self._check_nv_compat()
|
||||||
self.missing_whatever_green = not self.check_whatevergreen()
|
self.missing_whatever_green = not self._check_whatevergreen()
|
||||||
|
|
||||||
if print_errors is True:
|
if print_errors is True:
|
||||||
if self.sip_enabled is True:
|
if self.sip_enabled is True:
|
||||||
print("\nCannot patch! Please disable System Integrity Protection (SIP).")
|
logging.info("\nCannot patch! Please disable System Integrity Protection (SIP).")
|
||||||
print("Disable SIP in Patcher Settings and Rebuild OpenCore\n")
|
logging.info("Disable SIP in Patcher Settings and Rebuild OpenCore\n")
|
||||||
print("Ensure the following bits are set for csr-active-config:")
|
logging.info("Ensure the following bits are set for csr-active-config:")
|
||||||
print("\n".join(sip))
|
logging.info("\n".join(sip))
|
||||||
print(sip_value)
|
logging.info(sip_value)
|
||||||
|
|
||||||
if self.sbm_enabled is True:
|
if self.sbm_enabled is True:
|
||||||
print("\nCannot patch! Please disable Apple Secure Boot.")
|
logging.info("\nCannot patch! Please disable Apple Secure Boot.")
|
||||||
print("Disable SecureBootModel in Patcher Settings and Rebuild OpenCore")
|
logging.info("Disable SecureBootModel in Patcher Settings and Rebuild OpenCore")
|
||||||
print("For Hackintoshes, set SecureBootModel to Disabled")
|
logging.info("For Hackintoshes, set SecureBootModel to Disabled")
|
||||||
|
|
||||||
if self.fv_enabled is True:
|
if self.fv_enabled is True:
|
||||||
print("\nCannot patch! Please disable FileVault.")
|
logging.info("\nCannot patch! Please disable FileVault.")
|
||||||
print("For OCLP Macs, please rebuild your config with 0.2.5 or newer")
|
logging.info("For OCLP Macs, please rebuild your config with 0.2.5 or newer")
|
||||||
print("For others, Go to System Preferences -> Security and disable FileVault")
|
logging.info("For others, Go to System Preferences -> Security and disable FileVault")
|
||||||
|
|
||||||
if self.amfi_enabled is True and self.amfi_must_disable is True:
|
if self.amfi_enabled is True and self.amfi_must_disable is True:
|
||||||
print("\nCannot patch! Please disable AMFI.")
|
logging.info("\nCannot patch! Please disable AMFI.")
|
||||||
print("For Hackintoshes, please add amfi_get_out_of_my_way=1 to boot-args")
|
logging.info("For Hackintoshes, please add amfi_get_out_of_my_way=1 to boot-args")
|
||||||
|
|
||||||
if self.dosdude_patched is True:
|
if self.dosdude_patched is True:
|
||||||
print("\nCannot patch! Detected machine has already been patched by another patcher")
|
logging.info("\nCannot patch! Detected machine has already been patched by another patcher")
|
||||||
print("Please ensure your install is either clean or patched with OpenCore Legacy Patcher")
|
logging.info("Please ensure your install is either clean or patched with OpenCore Legacy Patcher")
|
||||||
|
|
||||||
if self.nvidia_web is True:
|
if self.nvidia_web is True:
|
||||||
if self.missing_nv_web_opengl is True:
|
if self.missing_nv_web_opengl is True:
|
||||||
print("\nCannot patch! Force OpenGL property missing")
|
logging.info("\nCannot patch! Force OpenGL property missing")
|
||||||
print("Please ensure ngfxgl=1 is set in boot-args")
|
logging.info("Please ensure ngfxgl=1 is set in boot-args")
|
||||||
|
|
||||||
if self.missing_nv_compat is True:
|
if self.missing_nv_compat is True:
|
||||||
print("\nCannot patch! Force Nvidia compatibility property missing")
|
logging.info("\nCannot patch! Force Nvidia compatibility property missing")
|
||||||
print("Please ensure ngfxcompat=1 is set in boot-args")
|
logging.info("Please ensure ngfxcompat=1 is set in boot-args")
|
||||||
|
|
||||||
if self.missing_nv_web_nvram is True:
|
if self.missing_nv_web_nvram is True:
|
||||||
print("\nCannot patch! nvda_drv(_vrl) variable missing")
|
logging.info("\nCannot patch! nvda_drv(_vrl) variable missing")
|
||||||
print("Please ensure nvda_drv_vrl=1 is set in boot-args")
|
logging.info("Please ensure nvda_drv_vrl=1 is set in boot-args")
|
||||||
|
|
||||||
if self.missing_whatever_green is True:
|
if self.missing_whatever_green is True:
|
||||||
print("\nCannot patch! WhateverGreen.kext missing")
|
logging.info("\nCannot patch! WhateverGreen.kext missing")
|
||||||
print("Please ensure WhateverGreen.kext is installed")
|
logging.info("Please ensure WhateverGreen.kext is installed")
|
||||||
|
|
||||||
if (not self.has_network) if (self.requires_root_kc and self.missing_kdk and self.constants.detected_os >= os_data.os_data.ventura.value) else False:
|
if (not self.has_network) if (self.requires_root_kc and self.missing_kdk and self.constants.detected_os >= os_data.os_data.ventura.value) else False:
|
||||||
print("\nCannot patch! Network Connection Required")
|
logging.info("\nCannot patch! Network Connection Required")
|
||||||
print("Please ensure you have an active internet connection")
|
logging.info("Please ensure you have an active internet connection")
|
||||||
|
|
||||||
if any(
|
if any(
|
||||||
[
|
[
|
||||||
@@ -574,47 +715,87 @@ class detect_root_patch:
|
|||||||
]
|
]
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
def generate_patchset(self, hardware_details):
|
return True
|
||||||
all_hardware_patchset = sys_patch_dict.SystemPatchDictionary(self.constants.detected_os, self.constants.detected_os_minor, self.constants.legacy_accel_support)
|
|
||||||
required_patches = {}
|
|
||||||
|
def _verify_unpatch_allowed(self):
|
||||||
|
"""
|
||||||
|
Validate that the unpatcher can be run
|
||||||
|
|
||||||
|
Preconditions:
|
||||||
|
Must be called after verify_patch_allowed()
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if unpatching is allowed, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
return not self.sip_enabled
|
||||||
|
|
||||||
|
|
||||||
|
def generate_patchset(self, hardware_details: dict):
|
||||||
|
"""
|
||||||
|
Generate Patchset dictionary for the current system
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
hardware_details (dict): Dictionary of hardware details generated by detect_patch_set()
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Dictionary of patches to be applied from sys_patch_dict.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
all_hardware_patchset: dict = sys_patch_dict.SystemPatchDictionary(self.constants.detected_os, self.constants.detected_os_minor, self.constants.legacy_accel_support).patchset_dict
|
||||||
|
required_patches: dict = {}
|
||||||
|
|
||||||
utilities.cls()
|
utilities.cls()
|
||||||
print("- The following patches will be applied:")
|
|
||||||
|
logging.info("- The following patches will be applied:")
|
||||||
|
|
||||||
if hardware_details["Graphics: Intel Ironlake"] is True:
|
if hardware_details["Graphics: Intel Ironlake"] is True:
|
||||||
required_patches.update({"Non-Metal Common": all_hardware_patchset["Graphics"]["Non-Metal Common"]})
|
required_patches.update({"Non-Metal Common": all_hardware_patchset["Graphics"]["Non-Metal Common"]})
|
||||||
required_patches.update({"WebKit Monterey Common": all_hardware_patchset["Graphics"]["WebKit Monterey Common"]})
|
required_patches.update({"WebKit Monterey Common": all_hardware_patchset["Graphics"]["WebKit Monterey Common"]})
|
||||||
required_patches.update({"Intel Ironlake": all_hardware_patchset["Graphics"]["Intel Ironlake"]})
|
required_patches.update({"Intel Ironlake": all_hardware_patchset["Graphics"]["Intel Ironlake"]})
|
||||||
|
|
||||||
if hardware_details["Graphics: Intel Sandy Bridge"] is True:
|
if hardware_details["Graphics: Intel Sandy Bridge"] is True:
|
||||||
required_patches.update({"Non-Metal Common": all_hardware_patchset["Graphics"]["Non-Metal Common"]})
|
required_patches.update({"Non-Metal Common": all_hardware_patchset["Graphics"]["Non-Metal Common"]})
|
||||||
required_patches.update({"High Sierra GVA": all_hardware_patchset["Graphics"]["High Sierra GVA"]})
|
required_patches.update({"High Sierra GVA": all_hardware_patchset["Graphics"]["High Sierra GVA"]})
|
||||||
required_patches.update({"WebKit Monterey Common": all_hardware_patchset["Graphics"]["WebKit Monterey Common"]})
|
required_patches.update({"WebKit Monterey Common": all_hardware_patchset["Graphics"]["WebKit Monterey Common"]})
|
||||||
required_patches.update({"Intel Sandy Bridge": all_hardware_patchset["Graphics"]["Intel Sandy Bridge"]})
|
required_patches.update({"Intel Sandy Bridge": all_hardware_patchset["Graphics"]["Intel Sandy Bridge"]})
|
||||||
|
# Patchset breaks Display Profiles, don't install if primary GPU is AMD
|
||||||
|
if self.constants.computer.real_model not in ["Macmini5,2", "iMac12,1", "iMac12,2"]:
|
||||||
|
required_patches.update({"Non-Metal ColorSync Workaround": all_hardware_patchset["Graphics"]["Non-Metal ColorSync Workaround"]})
|
||||||
|
|
||||||
if hardware_details["Graphics: Intel Ivy Bridge"] is True:
|
if hardware_details["Graphics: Intel Ivy Bridge"] is True:
|
||||||
required_patches.update({"Metal 3802 Common": all_hardware_patchset["Graphics"]["Metal 3802 Common"]})
|
required_patches.update({"Metal 3802 Common": all_hardware_patchset["Graphics"]["Metal 3802 Common"]})
|
||||||
|
required_patches.update({"Metal 3802 Common Extended": all_hardware_patchset["Graphics"]["Metal 3802 Common Extended"]})
|
||||||
required_patches.update({"Catalina GVA": all_hardware_patchset["Graphics"]["Catalina GVA"]})
|
required_patches.update({"Catalina GVA": all_hardware_patchset["Graphics"]["Catalina GVA"]})
|
||||||
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
||||||
required_patches.update({"Big Sur OpenCL": all_hardware_patchset["Graphics"]["Big Sur OpenCL"]})
|
required_patches.update({"Big Sur OpenCL": all_hardware_patchset["Graphics"]["Big Sur OpenCL"]})
|
||||||
required_patches.update({"WebKit Monterey Common": all_hardware_patchset["Graphics"]["WebKit Monterey Common"]})
|
required_patches.update({"WebKit Monterey Common": all_hardware_patchset["Graphics"]["WebKit Monterey Common"]})
|
||||||
required_patches.update({"Intel Ivy Bridge": all_hardware_patchset["Graphics"]["Intel Ivy Bridge"]})
|
required_patches.update({"Intel Ivy Bridge": all_hardware_patchset["Graphics"]["Intel Ivy Bridge"]})
|
||||||
|
|
||||||
if hardware_details["Graphics: Intel Haswell"] is True:
|
if hardware_details["Graphics: Intel Haswell"] is True:
|
||||||
required_patches.update({"Metal 3802 Common": all_hardware_patchset["Graphics"]["Metal 3802 Common"]})
|
required_patches.update({"Metal 3802 Common": all_hardware_patchset["Graphics"]["Metal 3802 Common"]})
|
||||||
|
required_patches.update({"Metal 3802 Common Extended": all_hardware_patchset["Graphics"]["Metal 3802 Common Extended"]})
|
||||||
required_patches.update({"Monterey GVA": all_hardware_patchset["Graphics"]["Monterey GVA"]})
|
required_patches.update({"Monterey GVA": all_hardware_patchset["Graphics"]["Monterey GVA"]})
|
||||||
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
||||||
required_patches.update({"Intel Haswell": all_hardware_patchset["Graphics"]["Intel Haswell"]})
|
required_patches.update({"Intel Haswell": all_hardware_patchset["Graphics"]["Intel Haswell"]})
|
||||||
|
|
||||||
if hardware_details["Graphics: Intel Broadwell"] is True:
|
if hardware_details["Graphics: Intel Broadwell"] is True:
|
||||||
required_patches.update({"Monterey GVA": all_hardware_patchset["Graphics"]["Monterey GVA"]})
|
required_patches.update({"Monterey GVA": all_hardware_patchset["Graphics"]["Monterey GVA"]})
|
||||||
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
||||||
required_patches.update({"Intel Broadwell": all_hardware_patchset["Graphics"]["Intel Broadwell"]})
|
required_patches.update({"Intel Broadwell": all_hardware_patchset["Graphics"]["Intel Broadwell"]})
|
||||||
|
|
||||||
if hardware_details["Graphics: Intel Skylake"] is True:
|
if hardware_details["Graphics: Intel Skylake"] is True:
|
||||||
required_patches.update({"Monterey GVA": all_hardware_patchset["Graphics"]["Monterey GVA"]})
|
required_patches.update({"Monterey GVA": all_hardware_patchset["Graphics"]["Monterey GVA"]})
|
||||||
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
||||||
required_patches.update({"Intel Skylake": all_hardware_patchset["Graphics"]["Intel Skylake"]})
|
required_patches.update({"Intel Skylake": all_hardware_patchset["Graphics"]["Intel Skylake"]})
|
||||||
|
|
||||||
if hardware_details["Graphics: Nvidia Tesla"] is True:
|
if hardware_details["Graphics: Nvidia Tesla"] is True:
|
||||||
required_patches.update({"Non-Metal Common": all_hardware_patchset["Graphics"]["Non-Metal Common"]})
|
required_patches.update({"Non-Metal Common": all_hardware_patchset["Graphics"]["Non-Metal Common"]})
|
||||||
required_patches.update({"WebKit Monterey Common": all_hardware_patchset["Graphics"]["WebKit Monterey Common"]})
|
required_patches.update({"WebKit Monterey Common": all_hardware_patchset["Graphics"]["WebKit Monterey Common"]})
|
||||||
required_patches.update({"Nvidia Tesla": all_hardware_patchset["Graphics"]["Nvidia Tesla"]})
|
required_patches.update({"Nvidia Tesla": all_hardware_patchset["Graphics"]["Nvidia Tesla"]})
|
||||||
|
|
||||||
if hardware_details["Graphics: Nvidia Web Drivers"] is True:
|
if hardware_details["Graphics: Nvidia Web Drivers"] is True:
|
||||||
required_patches.update({"Non-Metal Common": all_hardware_patchset["Graphics"]["Non-Metal Common"]})
|
required_patches.update({"Non-Metal Common": all_hardware_patchset["Graphics"]["Non-Metal Common"]})
|
||||||
required_patches.update({"Non-Metal IOAccelerator Common": all_hardware_patchset["Graphics"]["Non-Metal IOAccelerator Common"]})
|
required_patches.update({"Non-Metal IOAccelerator Common": all_hardware_patchset["Graphics"]["Non-Metal IOAccelerator Common"]})
|
||||||
@@ -622,9 +803,10 @@ class detect_root_patch:
|
|||||||
required_patches.update({"WebKit Monterey Common": all_hardware_patchset["Graphics"]["WebKit Monterey Common"]})
|
required_patches.update({"WebKit Monterey Common": all_hardware_patchset["Graphics"]["WebKit Monterey Common"]})
|
||||||
required_patches.update({"Nvidia Web Drivers": all_hardware_patchset["Graphics"]["Nvidia Web Drivers"]})
|
required_patches.update({"Nvidia Web Drivers": all_hardware_patchset["Graphics"]["Nvidia Web Drivers"]})
|
||||||
required_patches.update({"Non-Metal Enforcement": all_hardware_patchset["Graphics"]["Non-Metal Enforcement"]})
|
required_patches.update({"Non-Metal Enforcement": all_hardware_patchset["Graphics"]["Non-Metal Enforcement"]})
|
||||||
|
|
||||||
if hardware_details["Graphics: Nvidia Kepler"] is True:
|
if hardware_details["Graphics: Nvidia Kepler"] is True:
|
||||||
required_patches.update({"Revert Metal Downgrade": all_hardware_patchset["Graphics"]["Revert Metal Downgrade"]})
|
|
||||||
required_patches.update({"Metal 3802 Common": all_hardware_patchset["Graphics"]["Metal 3802 Common"]})
|
required_patches.update({"Metal 3802 Common": all_hardware_patchset["Graphics"]["Metal 3802 Common"]})
|
||||||
|
required_patches.update({"Metal 3802 Common Extended": all_hardware_patchset["Graphics"]["Metal 3802 Common Extended"]})
|
||||||
required_patches.update({"Catalina GVA": all_hardware_patchset["Graphics"]["Catalina GVA"]})
|
required_patches.update({"Catalina GVA": all_hardware_patchset["Graphics"]["Catalina GVA"]})
|
||||||
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
||||||
required_patches.update({"Big Sur OpenCL": all_hardware_patchset["Graphics"]["Big Sur OpenCL"]})
|
required_patches.update({"Big Sur OpenCL": all_hardware_patchset["Graphics"]["Big Sur OpenCL"]})
|
||||||
@@ -636,11 +818,13 @@ class detect_root_patch:
|
|||||||
if "Catalina GVA" in required_patches:
|
if "Catalina GVA" in required_patches:
|
||||||
del(required_patches["Catalina GVA"])
|
del(required_patches["Catalina GVA"])
|
||||||
break
|
break
|
||||||
|
|
||||||
if hardware_details["Graphics: AMD TeraScale 1"] is True:
|
if hardware_details["Graphics: AMD TeraScale 1"] is True:
|
||||||
required_patches.update({"Non-Metal Common": all_hardware_patchset["Graphics"]["Non-Metal Common"]})
|
required_patches.update({"Non-Metal Common": all_hardware_patchset["Graphics"]["Non-Metal Common"]})
|
||||||
required_patches.update({"WebKit Monterey Common": all_hardware_patchset["Graphics"]["WebKit Monterey Common"]})
|
required_patches.update({"WebKit Monterey Common": all_hardware_patchset["Graphics"]["WebKit Monterey Common"]})
|
||||||
required_patches.update({"AMD TeraScale Common": all_hardware_patchset["Graphics"]["AMD TeraScale Common"]})
|
required_patches.update({"AMD TeraScale Common": all_hardware_patchset["Graphics"]["AMD TeraScale Common"]})
|
||||||
required_patches.update({"AMD TeraScale 1": all_hardware_patchset["Graphics"]["AMD TeraScale 1"]})
|
required_patches.update({"AMD TeraScale 1": all_hardware_patchset["Graphics"]["AMD TeraScale 1"]})
|
||||||
|
|
||||||
if hardware_details["Graphics: AMD TeraScale 2"] is True:
|
if hardware_details["Graphics: AMD TeraScale 2"] is True:
|
||||||
required_patches.update({"Non-Metal Common": all_hardware_patchset["Graphics"]["Non-Metal Common"]})
|
required_patches.update({"Non-Metal Common": all_hardware_patchset["Graphics"]["Non-Metal Common"]})
|
||||||
required_patches.update({"Non-Metal IOAccelerator Common": all_hardware_patchset["Graphics"]["Non-Metal IOAccelerator Common"]})
|
required_patches.update({"Non-Metal IOAccelerator Common": all_hardware_patchset["Graphics"]["Non-Metal IOAccelerator Common"]})
|
||||||
@@ -651,16 +835,18 @@ class detect_root_patch:
|
|||||||
# TeraScale 2 MacBooks with faulty GPUs are highly prone to crashing with AMDRadeonX3000 attached
|
# TeraScale 2 MacBooks with faulty GPUs are highly prone to crashing with AMDRadeonX3000 attached
|
||||||
# Additionally, AMDRadeonX3000 requires IOAccelerator downgrade which is not installed without 'Non-Metal IOAccelerator Common'
|
# Additionally, AMDRadeonX3000 requires IOAccelerator downgrade which is not installed without 'Non-Metal IOAccelerator Common'
|
||||||
del(required_patches["AMD TeraScale 2"]["Install"]["/System/Library/Extensions"]["AMDRadeonX3000.kext"])
|
del(required_patches["AMD TeraScale 2"]["Install"]["/System/Library/Extensions"]["AMDRadeonX3000.kext"])
|
||||||
|
|
||||||
if hardware_details["Graphics: AMD Legacy GCN"] is True or hardware_details["Graphics: AMD Legacy Polaris"] is True:
|
if hardware_details["Graphics: AMD Legacy GCN"] is True or hardware_details["Graphics: AMD Legacy Polaris"] is True:
|
||||||
required_patches.update({"Revert Metal Downgrade": all_hardware_patchset["Graphics"]["Revert Metal Downgrade"]})
|
|
||||||
required_patches.update({"Monterey GVA": all_hardware_patchset["Graphics"]["Monterey GVA"]})
|
required_patches.update({"Monterey GVA": all_hardware_patchset["Graphics"]["Monterey GVA"]})
|
||||||
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
||||||
if hardware_details["Graphics: AMD Legacy GCN"] is True:
|
if hardware_details["Graphics: AMD Legacy GCN"] is True:
|
||||||
required_patches.update({"AMD Legacy GCN": all_hardware_patchset["Graphics"]["AMD Legacy GCN"]})
|
required_patches.update({"AMD Legacy GCN": all_hardware_patchset["Graphics"]["AMD Legacy GCN"]})
|
||||||
else:
|
else:
|
||||||
required_patches.update({"AMD Legacy Polaris": all_hardware_patchset["Graphics"]["AMD Legacy Polaris"]})
|
required_patches.update({"AMD Legacy Polaris": all_hardware_patchset["Graphics"]["AMD Legacy Polaris"]})
|
||||||
|
required_patches.update({"Revert GVA Downgrade": all_hardware_patchset["Graphics"]["Revert GVA Downgrade"]})
|
||||||
if "AVX2" not in self.constants.computer.cpu.leafs:
|
if "AVX2" not in self.constants.computer.cpu.leafs:
|
||||||
required_patches.update({"AMD OpenCL": all_hardware_patchset["Graphics"]["AMD OpenCL"]})
|
required_patches.update({"AMD OpenCL": all_hardware_patchset["Graphics"]["AMD OpenCL"]})
|
||||||
|
|
||||||
if hardware_details["Graphics: AMD Legacy Vega"] is True:
|
if hardware_details["Graphics: AMD Legacy Vega"] is True:
|
||||||
required_patches.update({"Monterey GVA": all_hardware_patchset["Graphics"]["Monterey GVA"]})
|
required_patches.update({"Monterey GVA": all_hardware_patchset["Graphics"]["Monterey GVA"]})
|
||||||
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
required_patches.update({"Monterey OpenCL": all_hardware_patchset["Graphics"]["Monterey OpenCL"]})
|
||||||
@@ -668,20 +854,28 @@ class detect_root_patch:
|
|||||||
required_patches.update({"AMD OpenCL": all_hardware_patchset["Graphics"]["AMD OpenCL"]})
|
required_patches.update({"AMD OpenCL": all_hardware_patchset["Graphics"]["AMD OpenCL"]})
|
||||||
if hardware_details["Graphics: AMD Legacy GCN"] is True:
|
if hardware_details["Graphics: AMD Legacy GCN"] is True:
|
||||||
required_patches.update({"AMD Legacy Vega Extended": all_hardware_patchset["Graphics"]["AMD Legacy Vega Extended"]})
|
required_patches.update({"AMD Legacy Vega Extended": all_hardware_patchset["Graphics"]["AMD Legacy Vega Extended"]})
|
||||||
|
else:
|
||||||
|
required_patches.update({"Revert GVA Downgrade": all_hardware_patchset["Graphics"]["Revert GVA Downgrade"]})
|
||||||
|
|
||||||
if hardware_details["Brightness: Legacy Backlight Control"] is True:
|
if hardware_details["Brightness: Legacy Backlight Control"] is True:
|
||||||
required_patches.update({"Legacy Backlight Control": all_hardware_patchset["Brightness"]["Legacy Backlight Control"]})
|
required_patches.update({"Legacy Backlight Control": all_hardware_patchset["Brightness"]["Legacy Backlight Control"]})
|
||||||
|
|
||||||
if hardware_details["Audio: Legacy Realtek"] is True:
|
if hardware_details["Audio: Legacy Realtek"] is True:
|
||||||
if self.model in ["iMac7,1", "iMac8,1"]:
|
if self.model in ["iMac7,1", "iMac8,1"]:
|
||||||
required_patches.update({"Legacy Realtek": all_hardware_patchset["Audio"]["Legacy Realtek"]})
|
required_patches.update({"Legacy Realtek": all_hardware_patchset["Audio"]["Legacy Realtek"]})
|
||||||
else:
|
else:
|
||||||
required_patches.update({"Legacy Non-GOP": all_hardware_patchset["Audio"]["Legacy Non-GOP"]})
|
required_patches.update({"Legacy Non-GOP": all_hardware_patchset["Audio"]["Legacy Non-GOP"]})
|
||||||
|
|
||||||
if hardware_details["Networking: Legacy Wireless"] is True:
|
if hardware_details["Networking: Legacy Wireless"] is True:
|
||||||
required_patches.update({"Legacy Wireless": all_hardware_patchset["Networking"]["Legacy Wireless"]})
|
required_patches.update({"Legacy Wireless": all_hardware_patchset["Networking"]["Legacy Wireless"]})
|
||||||
required_patches.update({"Legacy Wireless Extended": all_hardware_patchset["Networking"]["Legacy Wireless Extended"]})
|
required_patches.update({"Legacy Wireless Extended": all_hardware_patchset["Networking"]["Legacy Wireless Extended"]})
|
||||||
|
|
||||||
if hardware_details["Miscellaneous: Legacy GMUX"] is True:
|
if hardware_details["Miscellaneous: Legacy GMUX"] is True:
|
||||||
required_patches.update({"Legacy GMUX": all_hardware_patchset["Miscellaneous"]["Legacy GMUX"]})
|
required_patches.update({"Legacy GMUX": all_hardware_patchset["Miscellaneous"]["Legacy GMUX"]})
|
||||||
|
|
||||||
if hardware_details["Miscellaneous: Legacy Keyboard Backlight"] is True:
|
if hardware_details["Miscellaneous: Legacy Keyboard Backlight"] is True:
|
||||||
required_patches.update({"Legacy Keyboard Backlight": all_hardware_patchset["Miscellaneous"]["Legacy Keyboard Backlight"]})
|
required_patches.update({"Legacy Keyboard Backlight": all_hardware_patchset["Miscellaneous"]["Legacy Keyboard Backlight"]})
|
||||||
|
|
||||||
if hardware_details["Miscellaneous: Legacy USB 1.1"] is True:
|
if hardware_details["Miscellaneous: Legacy USB 1.1"] is True:
|
||||||
required_patches.update({"Legacy USB 1.1": all_hardware_patchset["Miscellaneous"]["Legacy USB 1.1"]})
|
required_patches.update({"Legacy USB 1.1": all_hardware_patchset["Miscellaneous"]["Legacy USB 1.1"]})
|
||||||
|
|
||||||
@@ -699,8 +893,8 @@ class detect_root_patch:
|
|||||||
del(required_patches[patch_name])
|
del(required_patches[patch_name])
|
||||||
else:
|
else:
|
||||||
if required_patches[patch_name]["Display Name"]:
|
if required_patches[patch_name]["Display Name"]:
|
||||||
print(f" - {required_patches[patch_name]['Display Name']}")
|
logging.info(f" - {required_patches[patch_name]['Display Name']}")
|
||||||
else:
|
else:
|
||||||
print(" - No patch sets found for booted model")
|
logging.info(" - No patch sets found for booted model")
|
||||||
|
|
||||||
return required_patches
|
return required_patches
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
# Download PatcherSupportPkg for usage with Root Patching
|
|
||||||
# Copyright (C) 2020-2022, Dhinak G, Mykola Grymalyuk
|
|
||||||
|
|
||||||
from resources import utilities
|
|
||||||
from pathlib import Path
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
class grab_patcher_support_pkg:
|
|
||||||
|
|
||||||
def __init__(self, constants):
|
|
||||||
self.constants = constants
|
|
||||||
|
|
||||||
def generate_pkg_link(self):
|
|
||||||
link = f"{self.constants.url_patcher_support_pkg}{self.constants.patcher_support_pkg_version}/Universal-Binaries.zip"
|
|
||||||
return link
|
|
||||||
|
|
||||||
def download_files(self):
|
|
||||||
link = self.generate_pkg_link()
|
|
||||||
if Path(self.constants.payload_local_binaries_root_path).exists():
|
|
||||||
print("- Removing old Root Patcher Payload folder")
|
|
||||||
# Delete folder
|
|
||||||
shutil.rmtree(self.constants.payload_local_binaries_root_path)
|
|
||||||
|
|
||||||
download_result = None
|
|
||||||
if Path(self.constants.payload_local_binaries_root_path_zip).exists():
|
|
||||||
print(f"- Found local Universal-Binaries.zip, skipping download")
|
|
||||||
download_result = True
|
|
||||||
else:
|
|
||||||
print(f"- No local version found, downloading...")
|
|
||||||
download_result = utilities.download_file(link, self.constants.payload_local_binaries_root_path_zip)
|
|
||||||
|
|
||||||
return download_result, link
|
|
||||||
@@ -1,57 +1,86 @@
|
|||||||
# Additional support functions for sys_patch.py
|
# Additional support functions for sys_patch.py
|
||||||
# Copyright (C) 2020-2022, Dhinak G, Mykola Grymalyuk
|
# Copyright (C) 2020-2023, Dhinak G, Mykola Grymalyuk
|
||||||
|
|
||||||
import subprocess
|
|
||||||
import tempfile
|
|
||||||
from data import os_data
|
|
||||||
from resources import generate_smbios, utilities
|
|
||||||
from pathlib import Path
|
|
||||||
from datetime import datetime
|
|
||||||
import plistlib
|
import plistlib
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
from resources import constants, bplist
|
from data import os_data
|
||||||
|
from resources import bplist, constants, generate_smbios, utilities
|
||||||
class sys_patch_helpers:
|
|
||||||
|
|
||||||
def __init__(self, constants):
|
|
||||||
self.constants = constants
|
|
||||||
|
|
||||||
|
|
||||||
def snb_board_id_patch(self, source_files_path):
|
class SysPatchHelpers:
|
||||||
# AppleIntelSNBGraphicsFB hard codes the supported Board IDs for Sandy Bridge iGPUs
|
"""
|
||||||
# Because of this, the kext errors out on unsupported systems
|
Library of helper functions for sys_patch.py and related libraries
|
||||||
# This function simply patches in a supported Board ID, using 'determine_best_board_id_for_sandy()'
|
"""
|
||||||
# to supplement the ideal Board ID
|
|
||||||
|
def __init__(self, global_constants: constants.Constants):
|
||||||
|
self.constants: constants.Constants = global_constants
|
||||||
|
|
||||||
|
|
||||||
|
def snb_board_id_patch(self, source_files_path: str):
|
||||||
|
"""
|
||||||
|
Patch AppleIntelSNBGraphicsFB.kext to support unsupported Board IDs
|
||||||
|
|
||||||
|
AppleIntelSNBGraphicsFB hard codes the supported Board IDs for Sandy Bridge iGPUs
|
||||||
|
Because of this, the kext errors out on unsupported systems
|
||||||
|
This function simply patches in a supported Board ID, using 'determine_best_board_id_for_sandy()'
|
||||||
|
to supplement the ideal Board ID
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
source_files_path (str): Path to the source files
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
source_files_path = str(source_files_path)
|
source_files_path = str(source_files_path)
|
||||||
if self.constants.computer.reported_board_id not in self.constants.sandy_board_id_stock:
|
|
||||||
print(f"- Found unsupported Board ID {self.constants.computer.reported_board_id}, performing AppleIntelSNBGraphicsFB bin patching")
|
|
||||||
board_to_patch = generate_smbios.determine_best_board_id_for_sandy(self.constants.computer.reported_board_id, self.constants.computer.gpus)
|
|
||||||
print(f"- Replacing {board_to_patch} with {self.constants.computer.reported_board_id}")
|
|
||||||
|
|
||||||
board_to_patch_hex = bytes.fromhex(board_to_patch.encode('utf-8').hex())
|
if self.constants.computer.reported_board_id in self.constants.sandy_board_id_stock:
|
||||||
reported_board_hex = bytes.fromhex(self.constants.computer.reported_board_id.encode('utf-8').hex())
|
return
|
||||||
|
|
||||||
if len(board_to_patch_hex) > len(reported_board_hex):
|
logging.info(f"- Found unsupported Board ID {self.constants.computer.reported_board_id}, performing AppleIntelSNBGraphicsFB bin patching")
|
||||||
# Pad the reported Board ID with zeros to match the length of the board to patch
|
|
||||||
reported_board_hex = reported_board_hex + bytes(len(board_to_patch_hex) - len(reported_board_hex))
|
|
||||||
elif len(board_to_patch_hex) < len(reported_board_hex):
|
|
||||||
print(f"- Error: Board ID {self.constants.computer.reported_board_id} is longer than {board_to_patch}")
|
|
||||||
raise Exception("Host's Board ID is longer than the kext's Board ID, cannot patch!!!")
|
|
||||||
|
|
||||||
path = source_files_path + "/10.13.6/System/Library/Extensions/AppleIntelSNBGraphicsFB.kext/Contents/MacOS/AppleIntelSNBGraphicsFB"
|
board_to_patch = generate_smbios.determine_best_board_id_for_sandy(self.constants.computer.reported_board_id, self.constants.computer.gpus)
|
||||||
if Path(path).exists():
|
logging.info(f"- Replacing {board_to_patch} with {self.constants.computer.reported_board_id}")
|
||||||
with open(path, 'rb') as f:
|
|
||||||
data = f.read()
|
board_to_patch_hex = bytes.fromhex(board_to_patch.encode('utf-8').hex())
|
||||||
data = data.replace(board_to_patch_hex, reported_board_hex)
|
reported_board_hex = bytes.fromhex(self.constants.computer.reported_board_id.encode('utf-8').hex())
|
||||||
with open(path, 'wb') as f:
|
|
||||||
f.write(data)
|
if len(board_to_patch_hex) > len(reported_board_hex):
|
||||||
else:
|
# Pad the reported Board ID with zeros to match the length of the board to patch
|
||||||
print(f"- Error: Could not find {path}")
|
reported_board_hex = reported_board_hex + bytes(len(board_to_patch_hex) - len(reported_board_hex))
|
||||||
raise Exception("Failed to find AppleIntelSNBGraphicsFB.kext, cannot patch!!!")
|
elif len(board_to_patch_hex) < len(reported_board_hex):
|
||||||
|
logging.info(f"- Error: Board ID {self.constants.computer.reported_board_id} is longer than {board_to_patch}")
|
||||||
|
raise Exception("Host's Board ID is longer than the kext's Board ID, cannot patch!!!")
|
||||||
|
|
||||||
|
path = source_files_path + "/10.13.6/System/Library/Extensions/AppleIntelSNBGraphicsFB.kext/Contents/MacOS/AppleIntelSNBGraphicsFB"
|
||||||
|
if not Path(path).exists():
|
||||||
|
logging.info(f"- Error: Could not find {path}")
|
||||||
|
raise Exception("Failed to find AppleIntelSNBGraphicsFB.kext, cannot patch!!!")
|
||||||
|
|
||||||
|
with open(path, 'rb') as f:
|
||||||
|
data = f.read()
|
||||||
|
data = data.replace(board_to_patch_hex, reported_board_hex)
|
||||||
|
with open(path, 'wb') as f:
|
||||||
|
f.write(data)
|
||||||
|
|
||||||
|
|
||||||
def generate_patchset_plist(self, patchset, file_name, kdk_used):
|
def generate_patchset_plist(self, patchset: dict, file_name: str, kdk_used: Path):
|
||||||
|
"""
|
||||||
|
Generate patchset file for user reference
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
patchset (dict): Dictionary of patchset, see sys_patch_detect.py and sys_patch_dict.py
|
||||||
|
file_name (str): Name of the file to write to
|
||||||
|
kdk_used (Path): Path to the KDK used, if any
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if successful, False if not
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
source_path = f"{self.constants.payload_path}"
|
source_path = f"{self.constants.payload_path}"
|
||||||
source_path_file = f"{source_path}/{file_name}"
|
source_path_file = f"{source_path}/{file_name}"
|
||||||
|
|
||||||
@@ -67,94 +96,36 @@ class sys_patch_helpers:
|
|||||||
"Kernel Debug Kit Used": f"{kdk_string}",
|
"Kernel Debug Kit Used": f"{kdk_string}",
|
||||||
"OS Version": f"{self.constants.detected_os}.{self.constants.detected_os_minor} ({self.constants.detected_os_build})",
|
"OS Version": f"{self.constants.detected_os}.{self.constants.detected_os_minor} ({self.constants.detected_os_build})",
|
||||||
}
|
}
|
||||||
|
|
||||||
data.update(patchset)
|
data.update(patchset)
|
||||||
|
|
||||||
if Path(source_path_file).exists():
|
if Path(source_path_file).exists():
|
||||||
os.remove(source_path_file)
|
os.remove(source_path_file)
|
||||||
|
|
||||||
# Need to write to a safe location
|
# Need to write to a safe location
|
||||||
plistlib.dump(data, Path(source_path_file).open("wb"), sort_keys=False)
|
plistlib.dump(data, Path(source_path_file).open("wb"), sort_keys=False)
|
||||||
|
|
||||||
if Path(source_path_file).exists():
|
if Path(source_path_file).exists():
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def install_kdk(self):
|
|
||||||
if not self.constants.kdk_download_path.exists():
|
|
||||||
return
|
|
||||||
|
|
||||||
print(f"- Installing downloaded KDK (this may take a while)")
|
|
||||||
with tempfile.TemporaryDirectory() as mount_point:
|
|
||||||
utilities.process_status(subprocess.run(["hdiutil", "attach", self.constants.kdk_download_path, "-mountpoint", mount_point, "-nobrowse"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
|
||||||
# Due to a permissions bug in macOS, sometimes the OS will fail on a Read-only file system error
|
|
||||||
# We don't actually need to write inside the KDK DMG, however macOS will do whatever it wants
|
|
||||||
# Thus move the KDK to another location, and run the installer from there
|
|
||||||
kdk_dst_path = Path(f"{self.constants.payload_path}/KernelDebugKit.pkg")
|
|
||||||
if kdk_dst_path.exists():
|
|
||||||
utilities.process_status(utilities.elevated(["rm", kdk_dst_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
|
||||||
utilities.process_status(subprocess.run(["cp", f"{mount_point}/KernelDebugKit.pkg", self.constants.payload_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
|
||||||
result = utilities.elevated(["installer", "-pkg", kdk_dst_path, "-target", "/"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
||||||
if result.returncode != 0:
|
|
||||||
print("- Failed to install KDK:")
|
|
||||||
print(result.stdout.decode('utf-8'))
|
|
||||||
if result.stderr:
|
|
||||||
print(result.stderr.decode('utf-8'))
|
|
||||||
utilities.elevated(["hdiutil", "detach", mount_point], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
||||||
raise Exception("Failed to install KDK")
|
|
||||||
utilities.process_status(utilities.elevated(["rm", kdk_dst_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
|
|
||||||
utilities.elevated(["hdiutil", "detach", mount_point], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
||||||
print("- Successfully installed KDK")
|
|
||||||
|
|
||||||
|
|
||||||
def determine_kdk_present(self, match_closest=False, override_build=None):
|
|
||||||
# Check if KDK is present
|
|
||||||
# If 'match_closest' is True, will provide the closest match to the reported KDK
|
|
||||||
|
|
||||||
kdk_array = []
|
|
||||||
|
|
||||||
search_build = self.constants.detected_os_build
|
|
||||||
if override_build:
|
|
||||||
search_build = override_build
|
|
||||||
|
|
||||||
if not Path("/Library/Developer/KDKs").exists():
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
for kdk_folder in Path("/Library/Developer/KDKs").iterdir():
|
|
||||||
if not kdk_folder.name.endswith(".kdk"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Ensure direct match
|
|
||||||
if kdk_folder.name.endswith(f"{search_build}.kdk"):
|
|
||||||
# Verify that the KDK is valid
|
|
||||||
if (kdk_folder / Path("System/Library/Extensions/System.kext/PlugIns/Libkern.kext/Libkern")).exists():
|
|
||||||
return kdk_folder
|
|
||||||
if match_closest is True:
|
|
||||||
# ex: KDK_13.0_22A5266r.kdk -> 22A5266r.kdk -> 22A5266r
|
|
||||||
try:
|
|
||||||
build = kdk_folder.name.split("_")[2].split(".")[0]
|
|
||||||
# Don't append if Darwin Major is different
|
|
||||||
if build.startswith(str(self.constants.detected_os)):
|
|
||||||
kdk_array.append(build)
|
|
||||||
except IndexError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if match_closest is True:
|
|
||||||
result = os_data.os_conversion.find_largest_build(kdk_array)
|
|
||||||
print(f"- Closest KDK match to {search_build}: {result}")
|
|
||||||
for kdk_folder in Path("/Library/Developer/KDKs").iterdir():
|
|
||||||
if kdk_folder.name.endswith(f"{result}.kdk"):
|
|
||||||
# Verify that the KDK is valid
|
|
||||||
if (kdk_folder / Path("System/Library/Extensions/System.kext/PlugIns/Libkern.kext/Libkern")).exists():
|
|
||||||
return kdk_folder
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def disable_window_server_caching(self):
|
def disable_window_server_caching(self):
|
||||||
# On legacy GCN GPUs, the WindowServer cache generated creates
|
"""
|
||||||
# corrupted Opaque shaders.
|
Disable WindowServer's asset caching
|
||||||
# To work-around this, we disable WindowServer caching
|
|
||||||
# And force macOS into properly generating the Opaque shaders
|
On legacy GCN GPUs, the WindowServer cache generated creates
|
||||||
|
corrupted Opaque shaders.
|
||||||
|
|
||||||
|
To work-around this, we disable WindowServer caching
|
||||||
|
And force macOS into properly generating the Opaque shaders
|
||||||
|
"""
|
||||||
|
|
||||||
if self.constants.detected_os < os_data.os_data.ventura:
|
if self.constants.detected_os < os_data.os_data.ventura:
|
||||||
return
|
return
|
||||||
print("- Disabling WindowServer Caching")
|
|
||||||
|
logging.info("- Disabling WindowServer Caching")
|
||||||
# Invoke via 'bash -c' to resolve pathing
|
# Invoke via 'bash -c' to resolve pathing
|
||||||
utilities.elevated(["bash", "-c", "rm -rf /private/var/folders/*/*/*/WindowServer/com.apple.WindowServer"])
|
utilities.elevated(["bash", "-c", "rm -rf /private/var/folders/*/*/*/WindowServer/com.apple.WindowServer"])
|
||||||
# Disable writing to WindowServer folder
|
# Disable writing to WindowServer folder
|
||||||
@@ -165,38 +136,49 @@ class sys_patch_helpers:
|
|||||||
|
|
||||||
|
|
||||||
def remove_news_widgets(self):
|
def remove_news_widgets(self):
|
||||||
# On Ivy Bridge and Haswell iGPUs, RenderBox will crash the News Widgets in
|
"""
|
||||||
# Notification Centre. To ensure users can access Notifications normally,
|
Remove News Widgets from Notification Centre
|
||||||
# we manually remove all News Widgets
|
|
||||||
|
On Ivy Bridge and Haswell iGPUs, RenderBox will crash the News Widgets in
|
||||||
|
Notification Centre. To ensure users can access Notifications normally,
|
||||||
|
we manually remove all News Widgets
|
||||||
|
"""
|
||||||
|
|
||||||
if self.constants.detected_os < os_data.os_data.ventura:
|
if self.constants.detected_os < os_data.os_data.ventura:
|
||||||
return
|
return
|
||||||
print("- Parsing Notification Centre Widgets")
|
|
||||||
|
logging.info("- Parsing Notification Centre Widgets")
|
||||||
file_path = "~/Library/Containers/com.apple.notificationcenterui/Data/Library/Preferences/com.apple.notificationcenterui.plist"
|
file_path = "~/Library/Containers/com.apple.notificationcenterui/Data/Library/Preferences/com.apple.notificationcenterui.plist"
|
||||||
file_path = Path(file_path).expanduser()
|
file_path = Path(file_path).expanduser()
|
||||||
|
|
||||||
if not file_path.exists():
|
if not file_path.exists():
|
||||||
print(" - Defaults file not found, skipping")
|
logging.info(" - Defaults file not found, skipping")
|
||||||
return
|
return
|
||||||
|
|
||||||
did_find = False
|
did_find = False
|
||||||
with open(file_path, "rb") as f:
|
with open(file_path, "rb") as f:
|
||||||
data = plistlib.load(f)
|
data = plistlib.load(f)
|
||||||
if "widgets" in data:
|
if "widgets" not in data:
|
||||||
if "instances" in data["widgets"]:
|
return
|
||||||
for widget in list(data["widgets"]["instances"]):
|
|
||||||
widget_data = bplist.BPListReader(widget).parse()
|
if "instances" not in data["widgets"]:
|
||||||
for entry in widget_data:
|
return
|
||||||
if not 'widget' in entry:
|
|
||||||
continue
|
for widget in list(data["widgets"]["instances"]):
|
||||||
sub_data = bplist.BPListReader(widget_data[entry]).parse()
|
widget_data = bplist.BPListReader(widget).parse()
|
||||||
for sub_entry in sub_data:
|
for entry in widget_data:
|
||||||
if not '$object' in sub_entry:
|
if 'widget' not in entry:
|
||||||
continue
|
continue
|
||||||
if not b'com.apple.news' in sub_data[sub_entry][2]:
|
sub_data = bplist.BPListReader(widget_data[entry]).parse()
|
||||||
continue
|
for sub_entry in sub_data:
|
||||||
print(f" - Found News Widget to remove: {sub_data[sub_entry][2].decode('ascii')}")
|
if not '$object' in sub_entry:
|
||||||
data["widgets"]["instances"].remove(widget)
|
continue
|
||||||
did_find = True
|
if not b'com.apple.news' in sub_data[sub_entry][2]:
|
||||||
|
continue
|
||||||
|
logging.info(f" - Found News Widget to remove: {sub_data[sub_entry][2].decode('ascii')}")
|
||||||
|
data["widgets"]["instances"].remove(widget)
|
||||||
|
did_find = True
|
||||||
|
|
||||||
if did_find:
|
if did_find:
|
||||||
with open(file_path, "wb") as f:
|
with open(file_path, "wb") as f:
|
||||||
plistlib.dump(data, f, sort_keys=False)
|
plistlib.dump(data, f, sort_keys=False)
|
||||||
@@ -204,21 +186,28 @@ class sys_patch_helpers:
|
|||||||
|
|
||||||
|
|
||||||
def install_rsr_repair_binary(self):
|
def install_rsr_repair_binary(self):
|
||||||
# With macOS 13.2, Apple implemented the Rapid Security Response System
|
"""
|
||||||
# However Apple added a half baked snapshot reversion system if seal was broken,
|
Installs RSRRepair
|
||||||
# which forgets to handle Preboot BootKC syncing
|
|
||||||
|
|
||||||
# Thus this application will try to re-sync the BootKC with SysKC in the event of a panic
|
RSRRepair is a utility that will sync the SysKC and BootKC in the event of a panic
|
||||||
# Reference: https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1019
|
|
||||||
|
|
||||||
# This is a (hopefully) temporary work-around, however likely to stay.
|
With macOS 13.2, Apple implemented the Rapid Security Response System
|
||||||
# RSRRepair has the added bonus of fixing desynced KCs from 'bless', so useful in Big Sur+
|
However Apple added a half baked snapshot reversion system if seal was broken,
|
||||||
# https://github.com/flagersgit/RSRRepair
|
which forgets to handle Preboot BootKC syncing.
|
||||||
|
|
||||||
|
Thus this application will try to re-sync the BootKC with SysKC in the event of a panic
|
||||||
|
Reference: https://github.com/dortania/OpenCore-Legacy-Patcher/issues/1019
|
||||||
|
|
||||||
|
This is a (hopefully) temporary work-around, however likely to stay.
|
||||||
|
RSRRepair has the added bonus of fixing desynced KCs from 'bless', so useful in Big Sur+
|
||||||
|
Source: https://github.com/flagersgit/RSRRepair
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
if self.constants.detected_os < os_data.os_data.big_sur:
|
if self.constants.detected_os < os_data.os_data.big_sur:
|
||||||
return
|
return
|
||||||
|
|
||||||
print("- Installing Kernel Collection syncing utility")
|
logging.info("- Installing Kernel Collection syncing utility")
|
||||||
result = utilities.elevated([self.constants.rsrrepair_userspace_path, "--install"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
result = utilities.elevated([self.constants.rsrrepair_userspace_path, "--install"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
print(f" - Failed to install RSRRepair: {result.stdout.decode()}")
|
logging.info(f" - Failed to install RSRRepair: {result.stdout.decode()}")
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
# Copyright (C) 2020-2022, Dhinak G, Mykola Grymalyuk
|
|
||||||
from resources import utilities
|
|
||||||
|
|
||||||
class TUIMenu:
|
|
||||||
def __init__(self, title, prompt, options=None, return_number_instead_of_direct_call=False, add_quit=True, auto_number=False, in_between=None, top_level=False, loop=False):
|
|
||||||
self.title = title
|
|
||||||
self.prompt = prompt
|
|
||||||
self.in_between = in_between or []
|
|
||||||
self.options = options or []
|
|
||||||
self.return_number_instead_of_direct_call = return_number_instead_of_direct_call
|
|
||||||
self.auto_number = auto_number
|
|
||||||
self.add_quit = add_quit
|
|
||||||
self.top_level = top_level
|
|
||||||
self.loop = loop
|
|
||||||
self.added_quit = False
|
|
||||||
|
|
||||||
def add_menu_option(self, name, description="", function=None, key=""):
|
|
||||||
self.options.append([key, name, description, function])
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
return_option = ["Q", "Quit"] if self.top_level else ["B", "Back"]
|
|
||||||
if self.add_quit and not self.added_quit:
|
|
||||||
self.add_menu_option(return_option[1], function=None, key=return_option[0])
|
|
||||||
self.added_quit = True
|
|
||||||
|
|
||||||
while True:
|
|
||||||
utilities.cls()
|
|
||||||
utilities.header(self.title)
|
|
||||||
print()
|
|
||||||
|
|
||||||
for i in self.in_between:
|
|
||||||
print(i)
|
|
||||||
if self.in_between:
|
|
||||||
print()
|
|
||||||
|
|
||||||
for index, option in enumerate(self.options):
|
|
||||||
if self.auto_number and not (index == (len(self.options) - 1) and self.add_quit):
|
|
||||||
option[0] = str((index + 1))
|
|
||||||
print(option[0] + ". " + option[1])
|
|
||||||
for i in option[2]:
|
|
||||||
print("\t" + i)
|
|
||||||
|
|
||||||
print()
|
|
||||||
selected = input(self.prompt)
|
|
||||||
|
|
||||||
keys = [option[0].upper() for option in self.options]
|
|
||||||
if not selected or selected.upper() not in keys:
|
|
||||||
if self.loop:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
if self.add_quit and selected.upper() == return_option[0]:
|
|
||||||
return -1
|
|
||||||
elif self.return_number_instead_of_direct_call:
|
|
||||||
return self.options[keys.index(selected.upper())][0]
|
|
||||||
else:
|
|
||||||
self.options[keys.index(selected.upper())][3]() if self.options[keys.index(selected.upper())][3] else None
|
|
||||||
if not self.loop:
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
class TUIOnlyPrint:
|
|
||||||
def __init__(self, title, prompt, in_between=None):
|
|
||||||
self.title = title
|
|
||||||
self.prompt = prompt
|
|
||||||
self.in_between = in_between or []
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
utilities.cls()
|
|
||||||
utilities.header(self.title)
|
|
||||||
print()
|
|
||||||
|
|
||||||
for i in self.in_between:
|
|
||||||
print(i)
|
|
||||||
if self.in_between:
|
|
||||||
print()
|
|
||||||
|
|
||||||
return input(self.prompt)
|
|
||||||
@@ -3,31 +3,33 @@
|
|||||||
# Call check_binary_updates() to determine if any updates are available
|
# Call check_binary_updates() to determine if any updates are available
|
||||||
# Returns dict with Link and Version of the latest binary update if available
|
# Returns dict with Link and Version of the latest binary update if available
|
||||||
import requests
|
import requests
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from resources import network_handler, constants
|
||||||
|
|
||||||
|
REPO_LATEST_RELEASE_URL: str = "https://api.github.com/repos/dortania/OpenCore-Legacy-Patcher/releases/latest"
|
||||||
|
|
||||||
|
|
||||||
class check_binary_updates:
|
class CheckBinaryUpdates:
|
||||||
def __init__(self, constants):
|
def __init__(self, global_constants: constants.Constants) -> None:
|
||||||
self.constants = constants
|
self.constants: constants.Constants = global_constants
|
||||||
self.binary_version = self.constants.patcher_version
|
|
||||||
self.binary_version_array = self.binary_version.split(".")
|
|
||||||
self.binary_version_array = [int(x) for x in self.binary_version_array]
|
|
||||||
self.binary_url = "https://api.github.com/repos/dortania/OpenCore-Legacy-Patcher/releases/latest"
|
|
||||||
|
|
||||||
self.available_binaries = {}
|
self.binary_version = self.constants.patcher_version
|
||||||
|
self.binary_version_array = [int(x) for x in self.binary_version.split(".")]
|
||||||
|
|
||||||
def verify_network_connection(self, url):
|
|
||||||
try:
|
|
||||||
response = requests.head(url, timeout=5)
|
|
||||||
if response:
|
|
||||||
return True
|
|
||||||
except (requests.exceptions.Timeout,
|
|
||||||
requests.exceptions.TooManyRedirects,
|
|
||||||
requests.exceptions.ConnectionError,
|
|
||||||
requests.exceptions.HTTPError):
|
|
||||||
return False
|
|
||||||
return False
|
|
||||||
|
|
||||||
def check_if_build_newer(self, remote_version=None, local_version=None):
|
def _check_if_build_newer(self, remote_version: list = None, local_version: list = None) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the remote version is newer than the local version
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
remote_version (list): Remote version to compare against
|
||||||
|
local_version (list): Local version to compare against
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if remote version is newer, False if not
|
||||||
|
"""
|
||||||
|
|
||||||
if remote_version is None:
|
if remote_version is None:
|
||||||
remote_version = self.remote_version_array
|
remote_version = self.remote_version_array
|
||||||
if local_version is None:
|
if local_version is None:
|
||||||
@@ -47,13 +49,32 @@ class check_binary_updates:
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def determine_local_build_type(self):
|
|
||||||
|
def _determine_local_build_type(self) -> str:
|
||||||
|
"""
|
||||||
|
Check if the local build is a GUI or TUI build
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: "GUI" or "TUI"
|
||||||
|
"""
|
||||||
|
|
||||||
if self.constants.wxpython_variant is True:
|
if self.constants.wxpython_variant is True:
|
||||||
return "GUI"
|
return "GUI"
|
||||||
else:
|
else:
|
||||||
return "TUI"
|
return "TUI"
|
||||||
|
|
||||||
def determine_remote_type(self, remote_name):
|
|
||||||
|
def _determine_remote_type(self, remote_name: str) -> str:
|
||||||
|
"""
|
||||||
|
Check if the remote build is a GUI or TUI build
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
remote_name (str): Name of the remote build
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: "GUI" or "TUI"
|
||||||
|
"""
|
||||||
|
|
||||||
if "TUI" in remote_name:
|
if "TUI" in remote_name:
|
||||||
return "TUI"
|
return "TUI"
|
||||||
elif "GUI" in remote_name:
|
elif "GUI" in remote_name:
|
||||||
@@ -61,45 +82,43 @@ class check_binary_updates:
|
|||||||
else:
|
else:
|
||||||
return "Unknown"
|
return "Unknown"
|
||||||
|
|
||||||
def check_binary_updates(self):
|
|
||||||
# print("- Checking for updates...")
|
def check_binary_updates(self) -> dict:
|
||||||
if self.verify_network_connection(self.binary_url):
|
"""
|
||||||
# print("- Network connection functional")
|
Check if any updates are available for the OpenCore Legacy Patcher binary
|
||||||
response = requests.get(self.binary_url)
|
|
||||||
data_set = response.json()
|
Returns:
|
||||||
# print("- Retrieved latest version data")
|
dict: Dictionary with Link and Version of the latest binary update if available
|
||||||
self.remote_version = data_set["tag_name"]
|
"""
|
||||||
# print(f"- Latest version: {self.remote_version}")
|
|
||||||
self.remote_version_array = self.remote_version.split(".")
|
available_binaries: list = {}
|
||||||
self.remote_version_array = [
|
|
||||||
int(x) for x in self.remote_version_array
|
if not network_handler.NetworkUtilities(REPO_LATEST_RELEASE_URL).verify_network_connection():
|
||||||
]
|
return None
|
||||||
if self.check_if_build_newer() is True:
|
|
||||||
# print("- Remote version is newer")
|
response = requests.get(REPO_LATEST_RELEASE_URL)
|
||||||
for asset in data_set["assets"]:
|
data_set = response.json()
|
||||||
print(f"- Found asset: {asset['name']}")
|
|
||||||
if self.determine_remote_type(asset["name"]) == self.determine_local_build_type():
|
self.remote_version = data_set["tag_name"]
|
||||||
# print(f"- Found matching asset: {asset['name']}")
|
|
||||||
self.available_binaries.update({
|
self.remote_version_array = self.remote_version.split(".")
|
||||||
asset['name']: {
|
self.remote_version_array = [int(x) for x in self.remote_version_array]
|
||||||
"Name":
|
|
||||||
asset["name"],
|
if self._check_if_build_newer() is False:
|
||||||
"Version":
|
return None
|
||||||
self.remote_version,
|
|
||||||
"Link":
|
for asset in data_set["assets"]:
|
||||||
asset["browser_download_url"],
|
logging.info(f"- Found asset: {asset['name']}")
|
||||||
"Type":
|
if self._determine_remote_type(asset["name"]) == self._determine_local_build_type():
|
||||||
self.determine_remote_type(asset["name"]),
|
available_binaries.update({
|
||||||
"Github Link":
|
asset['name']: {
|
||||||
f"https://github.com/dortania/OpenCore-Legacy-Patcher/releases/{self.remote_version}"
|
"Name": asset["name"],
|
||||||
}
|
"Version": self.remote_version,
|
||||||
})
|
"Link": asset["browser_download_url"],
|
||||||
break
|
"Type": self._determine_remote_type(asset["name"]),
|
||||||
if self.available_binaries:
|
"Github Link": f"https://github.com/dortania/OpenCore-Legacy-Patcher/releases/{self.remote_version}"
|
||||||
return self.available_binaries
|
}
|
||||||
else:
|
})
|
||||||
# print("- No matching binaries available")
|
return available_binaries
|
||||||
return None
|
|
||||||
# else:
|
|
||||||
# print("- Failed to connect to GitHub API")
|
|
||||||
return None
|
return None
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
# Copyright (C) 2020-2022, Dhinak G, Mykola Grymalyuk
|
# Copyright (C) 2020-2023, Dhinak G, Mykola Grymalyuk
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
import plistlib
|
import plistlib
|
||||||
@@ -9,18 +8,15 @@ from pathlib import Path
|
|||||||
import os
|
import os
|
||||||
import binascii
|
import binascii
|
||||||
import argparse
|
import argparse
|
||||||
import time
|
|
||||||
import atexit
|
import atexit
|
||||||
import requests
|
|
||||||
import shutil
|
import shutil
|
||||||
import urllib.parse
|
|
||||||
import py_sip_xnu
|
import py_sip_xnu
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
from resources import constants, ioreg
|
from resources import constants, ioreg
|
||||||
from data import sip_data, os_data
|
from data import sip_data, os_data
|
||||||
|
|
||||||
SESSION = requests.Session()
|
|
||||||
|
|
||||||
|
|
||||||
def hexswap(input_hex: str):
|
def hexswap(input_hex: str):
|
||||||
hex_pairs = [input_hex[i : i + 2] for i in range(0, len(input_hex), 2)]
|
hex_pairs = [input_hex[i : i + 2] for i in range(0, len(input_hex), 2)]
|
||||||
@@ -39,8 +35,8 @@ def string_to_hex(input_string):
|
|||||||
|
|
||||||
def process_status(process_result):
|
def process_status(process_result):
|
||||||
if process_result.returncode != 0:
|
if process_result.returncode != 0:
|
||||||
print(f"Process failed with exit code {process_result.returncode}")
|
logging.info(f"Process failed with exit code {process_result.returncode}")
|
||||||
print(f"Please report the issue on the Discord server")
|
logging.info(f"Please report the issue on the Discord server")
|
||||||
raise Exception(f"Process result: \n{process_result.stdout.decode()}")
|
raise Exception(f"Process result: \n{process_result.stdout.decode()}")
|
||||||
|
|
||||||
|
|
||||||
@@ -55,11 +51,11 @@ def human_fmt(num):
|
|||||||
def header(lines):
|
def header(lines):
|
||||||
lines = [i for i in lines if i is not None]
|
lines = [i for i in lines if i is not None]
|
||||||
total_length = len(max(lines, key=len)) + 4
|
total_length = len(max(lines, key=len)) + 4
|
||||||
print("#" * (total_length))
|
logging.info("#" * (total_length))
|
||||||
for line in lines:
|
for line in lines:
|
||||||
left_side = math.floor(((total_length - 2 - len(line.strip())) / 2))
|
left_side = math.floor(((total_length - 2 - len(line.strip())) / 2))
|
||||||
print("#" + " " * left_side + line.strip() + " " * (total_length - len("#" + " " * left_side + line.strip()) - 1) + "#")
|
logging.info("#" + " " * left_side + line.strip() + " " * (total_length - len("#" + " " * left_side + line.strip()) - 1) + "#")
|
||||||
print("#" * total_length)
|
logging.info("#" * total_length)
|
||||||
|
|
||||||
|
|
||||||
RECOVERY_STATUS = None
|
RECOVERY_STATUS = None
|
||||||
@@ -124,7 +120,7 @@ sleep_process = None
|
|||||||
|
|
||||||
def disable_sleep_while_running():
|
def disable_sleep_while_running():
|
||||||
global sleep_process
|
global sleep_process
|
||||||
print("- Disabling Idle Sleep")
|
logging.info("- Disabling Idle Sleep")
|
||||||
if sleep_process is None:
|
if sleep_process is None:
|
||||||
# If sleep_process is active, we'll just keep it running
|
# If sleep_process is active, we'll just keep it running
|
||||||
sleep_process = subprocess.Popen(["caffeinate", "-d", "-i", "-s"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
sleep_process = subprocess.Popen(["caffeinate", "-d", "-i", "-s"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
@@ -134,7 +130,7 @@ def disable_sleep_while_running():
|
|||||||
def enable_sleep_after_running():
|
def enable_sleep_after_running():
|
||||||
global sleep_process
|
global sleep_process
|
||||||
if sleep_process:
|
if sleep_process:
|
||||||
print("- Re-enabling Idle Sleep")
|
logging.info("- Re-enabling Idle Sleep")
|
||||||
sleep_process.kill()
|
sleep_process.kill()
|
||||||
sleep_process = None
|
sleep_process = None
|
||||||
|
|
||||||
@@ -283,7 +279,7 @@ def cls():
|
|||||||
if not check_recovery():
|
if not check_recovery():
|
||||||
os.system("cls" if os.name == "nt" else "clear")
|
os.system("cls" if os.name == "nt" else "clear")
|
||||||
else:
|
else:
|
||||||
print("\u001Bc")
|
logging.info("\u001Bc")
|
||||||
|
|
||||||
def check_command_line_tools():
|
def check_command_line_tools():
|
||||||
# Determine whether Command Line Tools exist
|
# Determine whether Command Line Tools exist
|
||||||
@@ -360,124 +356,6 @@ def get_firmware_vendor(*, decode: bool = False):
|
|||||||
value = value.strip("\0")
|
value = value.strip("\0")
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def verify_network_connection(url=None):
|
|
||||||
if url is None:
|
|
||||||
url = "https://www.google.com"
|
|
||||||
try:
|
|
||||||
response = SESSION.head(url, timeout=5, allow_redirects=True)
|
|
||||||
return True
|
|
||||||
except (requests.exceptions.Timeout, requests.exceptions.TooManyRedirects, requests.exceptions.ConnectionError, requests.exceptions.HTTPError):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def download_file(link, location, is_gui=None, verify_checksum=False):
|
|
||||||
if verify_network_connection(link):
|
|
||||||
disable_sleep_while_running()
|
|
||||||
base_name = Path(link).name
|
|
||||||
|
|
||||||
if Path(location).exists():
|
|
||||||
Path(location).unlink()
|
|
||||||
|
|
||||||
head_response = SESSION.head(link, allow_redirects=True)
|
|
||||||
try:
|
|
||||||
# Handle cases where Content-Length has garbage or is missing
|
|
||||||
total_file_size = int(head_response.headers['Content-Length'])
|
|
||||||
except KeyError:
|
|
||||||
total_file_size = 0
|
|
||||||
|
|
||||||
if total_file_size > 1024:
|
|
||||||
file_size_rounded = round(total_file_size / 1024 / 1024, 2)
|
|
||||||
file_size_string = f" of {file_size_rounded}MB"
|
|
||||||
|
|
||||||
# Check if we have enough space
|
|
||||||
if total_file_size > get_free_space():
|
|
||||||
print(f"Not enough space to download {base_name} ({file_size_rounded}MB)")
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
file_size_string = ""
|
|
||||||
|
|
||||||
response = SESSION.get(link, stream=True)
|
|
||||||
|
|
||||||
# SU Catalog's link is quite long, strip to make it bearable
|
|
||||||
if "sucatalog.gz" in base_name:
|
|
||||||
base_name = "sucatalog.gz"
|
|
||||||
|
|
||||||
header = f"# Downloading: {base_name} #"
|
|
||||||
box_length = len(header)
|
|
||||||
box_string = "#" * box_length
|
|
||||||
dl = 0
|
|
||||||
total_downloaded_string = ""
|
|
||||||
global clear
|
|
||||||
with location.open("wb") as file:
|
|
||||||
count = 0
|
|
||||||
start = time.perf_counter()
|
|
||||||
for chunk in response.iter_content(1024 * 1024 * 4):
|
|
||||||
dl += len(chunk)
|
|
||||||
file.write(chunk)
|
|
||||||
count += len(chunk)
|
|
||||||
if is_gui is None:
|
|
||||||
if clear:
|
|
||||||
cls()
|
|
||||||
print(box_string)
|
|
||||||
print(header)
|
|
||||||
print(box_string)
|
|
||||||
print("")
|
|
||||||
if total_file_size > 1024:
|
|
||||||
total_downloaded_string = f" ({round(float(dl / total_file_size * 100), 2)}%)"
|
|
||||||
print(f"{round(count / 1024 / 1024, 2)}MB Downloaded{file_size_string}{total_downloaded_string}\nAverage Download Speed: {round(dl//(time.perf_counter() - start) / 100000 / 8, 2)} MB/s")
|
|
||||||
|
|
||||||
if verify_checksum is True:
|
|
||||||
# Verify checksum
|
|
||||||
# Note that this can be quite taxing on slower Macs
|
|
||||||
checksum = hashlib.sha256()
|
|
||||||
with location.open("rb") as file:
|
|
||||||
chunk = file.read(1024 * 1024 * 16)
|
|
||||||
while chunk:
|
|
||||||
checksum.update(chunk)
|
|
||||||
chunk = file.read(1024 * 1024 * 16)
|
|
||||||
enable_sleep_after_running()
|
|
||||||
return checksum
|
|
||||||
enable_sleep_after_running()
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
cls()
|
|
||||||
header = "# Could not establish Network Connection with provided link! #"
|
|
||||||
box_length = len(header)
|
|
||||||
box_string = "#" * box_length
|
|
||||||
print(box_string)
|
|
||||||
print(header)
|
|
||||||
print(box_string)
|
|
||||||
if constants.Constants().url_patcher_support_pkg in link:
|
|
||||||
# If we're downloading PatcherSupportPkg, present offline build
|
|
||||||
print("\nPlease grab the offline variant of OpenCore Legacy Patcher from Github:")
|
|
||||||
print(f"https://github.com/dortania/OpenCore-Legacy-Patcher/releases/download/{constants.Constants().patcher_version}/OpenCore-Patcher-TUI-Offline.app.zip")
|
|
||||||
else:
|
|
||||||
print(link)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def download_apple_developer_portal(link, location, is_gui=None, verify_checksum=False):
|
|
||||||
TOKEN_URL_BASE = "https://developerservices2.apple.com/services/download?path="
|
|
||||||
remote_path = urllib.parse.urlparse(link).path
|
|
||||||
token_url = urllib.parse.urlunparse(urllib.parse.urlparse(TOKEN_URL_BASE)._replace(query=urllib.parse.urlencode({"path": remote_path})))
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = SESSION.get(token_url, timeout=5)
|
|
||||||
except (requests.exceptions.Timeout, requests.exceptions.TooManyRedirects, requests.exceptions.ConnectionError):
|
|
||||||
print(" - Could not contact Apple download servers")
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
response.raise_for_status()
|
|
||||||
except requests.exceptions.HTTPError:
|
|
||||||
if response.status_code == 400 and "The path specified is invalid" in response.text:
|
|
||||||
print(" - File does not exist on Apple download servers")
|
|
||||||
else:
|
|
||||||
print(" - Could not request download authorization from Apple download servers")
|
|
||||||
return None
|
|
||||||
|
|
||||||
return download_file(link, location, is_gui, verify_checksum)
|
|
||||||
|
|
||||||
|
|
||||||
def dump_constants(constants):
|
def dump_constants(constants):
|
||||||
with open(os.path.join(os.path.expanduser('~'), 'Desktop', 'internal_data.txt'), 'w') as f:
|
with open(os.path.join(os.path.expanduser('~'), 'Desktop', 'internal_data.txt'), 'w') as f:
|
||||||
f.write(str(vars(constants)))
|
f.write(str(vars(constants)))
|
||||||
@@ -544,9 +422,19 @@ def find_disk_off_uuid(uuid):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def get_free_space(disk=None):
|
def get_free_space(disk=None):
|
||||||
|
"""
|
||||||
|
Get free space on disk in bytes
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
disk (str): Path to mounted disk (or folder on disk)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: Free space in bytes
|
||||||
|
"""
|
||||||
if disk is None:
|
if disk is None:
|
||||||
disk = "/"
|
disk = "/"
|
||||||
total, used, free = shutil.disk_usage("/")
|
|
||||||
|
total, used, free = shutil.disk_usage(disk)
|
||||||
return free
|
return free
|
||||||
|
|
||||||
def grab_mount_point_from_disk(disk):
|
def grab_mount_point_from_disk(disk):
|
||||||
@@ -562,16 +450,6 @@ def monitor_disk_output(disk):
|
|||||||
output = output[-2]
|
output = output[-2]
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def validate_link(link):
|
|
||||||
# Check if link is 404
|
|
||||||
try:
|
|
||||||
response = SESSION.head(link, timeout=5, allow_redirects=True)
|
|
||||||
if response.status_code == 404:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
except (requests.exceptions.Timeout, requests.exceptions.TooManyRedirects, requests.exceptions.ConnectionError, requests.exceptions.HTTPError):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def block_os_updaters():
|
def block_os_updaters():
|
||||||
# Disables any processes that would be likely to mess with
|
# Disables any processes that would be likely to mess with
|
||||||
@@ -591,7 +469,7 @@ def block_os_updaters():
|
|||||||
for bad_process in bad_processes:
|
for bad_process in bad_processes:
|
||||||
if bad_process in current_process:
|
if bad_process in current_process:
|
||||||
if pid != "":
|
if pid != "":
|
||||||
print(f"- Killing Process: {pid} - {current_process.split('/')[-1]}")
|
logging.info(f"- Killing Process: {pid} - {current_process.split('/')[-1]}")
|
||||||
subprocess.run(["kill", "-9", pid])
|
subprocess.run(["kill", "-9", pid])
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|||||||
@@ -1,75 +1,110 @@
|
|||||||
|
import logging
|
||||||
import subprocess
|
import subprocess
|
||||||
from resources.sys_patch import sys_patch_helpers
|
|
||||||
from resources.build import build
|
|
||||||
from data import example_data, model_array, sys_patch_dict, os_data
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
from resources.sys_patch import sys_patch_helpers
|
||||||
|
from resources.build import build
|
||||||
|
from resources import constants
|
||||||
|
from data import example_data, model_array, sys_patch_dict, os_data
|
||||||
|
|
||||||
def validate(settings):
|
|
||||||
# Runs through ocvalidate to check for errors
|
|
||||||
|
|
||||||
valid_dumps = [
|
class PatcherValidation:
|
||||||
example_data.MacBookPro.MacBookPro92_Stock,
|
"""
|
||||||
example_data.MacBookPro.MacBookPro111_Stock,
|
Validation class for the patcher
|
||||||
example_data.MacBookPro.MacBookPro133_Stock,
|
|
||||||
# example_data.MacBookPro.MacBookPro171_Stock,
|
|
||||||
example_data.Macmini.Macmini52_Stock,
|
|
||||||
example_data.Macmini.Macmini61_Stock,
|
|
||||||
example_data.Macmini.Macmini71_Stock,
|
|
||||||
# example_data.Macmini.Macmini91_Stock,
|
|
||||||
example_data.iMac.iMac81_Stock,
|
|
||||||
example_data.iMac.iMac112_Stock,
|
|
||||||
example_data.iMac.iMac122_Upgraded,
|
|
||||||
example_data.iMac.iMac122_Upgraded_Nvidia,
|
|
||||||
example_data.iMac.iMac151_Stock,
|
|
||||||
example_data.MacPro.MacPro31_Stock,
|
|
||||||
example_data.MacPro.MacPro31_Upgrade,
|
|
||||||
example_data.MacPro.MacPro31_Modern_AMD,
|
|
||||||
example_data.MacPro.MacPro31_Modern_Kepler,
|
|
||||||
example_data.MacPro.MacPro41_Upgrade,
|
|
||||||
example_data.MacPro.MacPro41_Modern_AMD,
|
|
||||||
example_data.MacPro.MacPro41_51__Flashed_Modern_AMD,
|
|
||||||
example_data.MacPro.MacPro41_51_Flashed_NVIDIA_WEB_DRIVERS,
|
|
||||||
]
|
|
||||||
|
|
||||||
valid_dumps_native = [
|
Primarily for Continuous Integration
|
||||||
example_data.iMac.iMac201_Stock,
|
"""
|
||||||
example_data.MacBookPro.MacBookPro141_SSD_Upgrade,
|
|
||||||
]
|
|
||||||
|
|
||||||
settings.validate = True
|
def __init__(self, global_constants: constants.Constants) -> None:
|
||||||
|
self.constants: constants.Constants = global_constants
|
||||||
|
|
||||||
|
self.constants.validate = True
|
||||||
|
|
||||||
|
self.valid_dumps = [
|
||||||
|
example_data.MacBookPro.MacBookPro92_Stock,
|
||||||
|
example_data.MacBookPro.MacBookPro111_Stock,
|
||||||
|
example_data.MacBookPro.MacBookPro133_Stock,
|
||||||
|
|
||||||
|
example_data.Macmini.Macmini52_Stock,
|
||||||
|
example_data.Macmini.Macmini61_Stock,
|
||||||
|
example_data.Macmini.Macmini71_Stock,
|
||||||
|
|
||||||
|
example_data.iMac.iMac81_Stock,
|
||||||
|
example_data.iMac.iMac112_Stock,
|
||||||
|
example_data.iMac.iMac122_Upgraded,
|
||||||
|
example_data.iMac.iMac122_Upgraded_Nvidia,
|
||||||
|
example_data.iMac.iMac151_Stock,
|
||||||
|
|
||||||
|
example_data.MacPro.MacPro31_Stock,
|
||||||
|
example_data.MacPro.MacPro31_Upgrade,
|
||||||
|
example_data.MacPro.MacPro31_Modern_AMD,
|
||||||
|
example_data.MacPro.MacPro31_Modern_Kepler,
|
||||||
|
example_data.MacPro.MacPro41_Upgrade,
|
||||||
|
example_data.MacPro.MacPro41_Modern_AMD,
|
||||||
|
example_data.MacPro.MacPro41_51__Flashed_Modern_AMD,
|
||||||
|
example_data.MacPro.MacPro41_51_Flashed_NVIDIA_WEB_DRIVERS,
|
||||||
|
]
|
||||||
|
|
||||||
|
self.valid_dumps_native = [
|
||||||
|
example_data.iMac.iMac201_Stock,
|
||||||
|
example_data.MacBookPro.MacBookPro141_SSD_Upgrade,
|
||||||
|
]
|
||||||
|
|
||||||
|
self._validate_configs()
|
||||||
|
self._validate_sys_patch()
|
||||||
|
|
||||||
|
|
||||||
|
def _build_prebuilt(self) -> None:
|
||||||
|
"""
|
||||||
|
Generate a build for each predefined model
|
||||||
|
Then validate against ocvalidate
|
||||||
|
"""
|
||||||
|
|
||||||
def build_prebuilt():
|
|
||||||
for model in model_array.SupportedSMBIOS:
|
for model in model_array.SupportedSMBIOS:
|
||||||
print(f"Validating predefined model: {model}")
|
logging.info(f"Validating predefined model: {model}")
|
||||||
settings.custom_model = model
|
self.constants.custom_model = model
|
||||||
build.build_opencore(settings.custom_model, settings).build_opencore()
|
build.build_opencore(self.constants.custom_model, self.constants).build_opencore()
|
||||||
result = subprocess.run([settings.ocvalidate_path, f"{settings.opencore_release_folder}/EFI/OC/config.plist"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
result = subprocess.run([self.constants.ocvalidate_path, f"{self.constants.opencore_release_folder}/EFI/OC/config.plist"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
print("Error on build!")
|
logging.info("Error on build!")
|
||||||
print(result.stdout.decode())
|
logging.info(result.stdout.decode())
|
||||||
raise Exception(f"Validation failed for predefined model: {model}")
|
raise Exception(f"Validation failed for predefined model: {model}")
|
||||||
else:
|
else:
|
||||||
print(f"Validation succeeded for predefined model: {model}")
|
logging.info(f"Validation succeeded for predefined model: {model}")
|
||||||
|
|
||||||
def build_dumps():
|
|
||||||
for model in valid_dumps:
|
def _build_dumps(self) -> None:
|
||||||
settings.computer = model
|
"""
|
||||||
settings.custom_model = ""
|
Generate a build for each predefined model
|
||||||
print(f"Validating dumped model: {settings.computer.real_model}")
|
Then validate against ocvalidate
|
||||||
build.build_opencore(settings.computer.real_model, settings).build_opencore()
|
"""
|
||||||
result = subprocess.run([settings.ocvalidate_path, f"{settings.opencore_release_folder}/EFI/OC/config.plist"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
||||||
|
for model in self.valid_dumps:
|
||||||
|
self.constants.computer = model
|
||||||
|
self.constants.custom_model = ""
|
||||||
|
logging.info(f"Validating dumped model: {self.constants.computer.real_model}")
|
||||||
|
build.build_opencore(self.constants.computer.real_model, self.constants).build_opencore()
|
||||||
|
result = subprocess.run([self.constants.ocvalidate_path, f"{self.constants.opencore_release_folder}/EFI/OC/config.plist"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
print("Error on build!")
|
logging.info("Error on build!")
|
||||||
print(result.stdout.decode())
|
logging.info(result.stdout.decode())
|
||||||
raise Exception(f"Validation failed for predefined model: {settings.computer.real_model}")
|
raise Exception(f"Validation failed for predefined model: {self.constants.computer.real_model}")
|
||||||
else:
|
else:
|
||||||
print(f"Validation succeeded for predefined model: {settings.computer.real_model}")
|
logging.info(f"Validation succeeded for predefined model: {self.constants.computer.real_model}")
|
||||||
|
|
||||||
|
|
||||||
def validate_root_patch_files(major_kernel, minor_kernel):
|
def _validate_root_patch_files(self, major_kernel: int, minor_kernel: int) -> None:
|
||||||
patchset = sys_patch_dict.SystemPatchDictionary(major_kernel, minor_kernel, settings.legacy_accel_support)
|
"""
|
||||||
|
Validate that all files in the patchset are present in the payload
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
major_kernel (int): Major kernel version
|
||||||
|
minor_kernel (int): Minor kernel version
|
||||||
|
"""
|
||||||
|
|
||||||
|
patchset = sys_patch_dict.SystemPatchDictionary(major_kernel, minor_kernel, self.constants.legacy_accel_support).patchset_dict
|
||||||
host_os_float = float(f"{major_kernel}.{minor_kernel}")
|
host_os_float = float(f"{major_kernel}.{minor_kernel}")
|
||||||
|
|
||||||
for patch_subject in patchset:
|
for patch_subject in patchset:
|
||||||
for patch_core in patchset[patch_subject]:
|
for patch_core in patchset[patch_subject]:
|
||||||
patch_os_min_float = float(f'{patchset[patch_subject][patch_core]["OS Support"]["Minimum OS Support"]["OS Major"]}.{patchset[patch_subject][patch_core]["OS Support"]["Minimum OS Support"]["OS Minor"]}')
|
patch_os_min_float = float(f'{patchset[patch_subject][patch_core]["OS Support"]["Minimum OS Support"]["OS Major"]}.{patchset[patch_subject][patch_core]["OS Support"]["Minimum OS Support"]["OS Minor"]}')
|
||||||
@@ -80,58 +115,83 @@ def validate(settings):
|
|||||||
if install_type in patchset[patch_subject][patch_core]:
|
if install_type in patchset[patch_subject][patch_core]:
|
||||||
for install_directory in patchset[patch_subject][patch_core][install_type]:
|
for install_directory in patchset[patch_subject][patch_core][install_type]:
|
||||||
for install_file in patchset[patch_subject][patch_core][install_type][install_directory]:
|
for install_file in patchset[patch_subject][patch_core][install_type][install_directory]:
|
||||||
source_file = str(settings.payload_local_binaries_root_path) + "/" + patchset[patch_subject][patch_core][install_type][install_directory][install_file] + install_directory + "/" + install_file
|
source_file = str(self.constants.payload_local_binaries_root_path) + "/" + patchset[patch_subject][patch_core][install_type][install_directory][install_file] + install_directory + "/" + install_file
|
||||||
if not Path(source_file).exists():
|
if not Path(source_file).exists():
|
||||||
print(f"File not found: {source_file}")
|
logging.info(f"File not found: {source_file}")
|
||||||
raise Exception(f"Failed to find {source_file}")
|
raise Exception(f"Failed to find {source_file}")
|
||||||
|
|
||||||
print(f"- Validating against Darwin {major_kernel}.{minor_kernel}")
|
logging.info(f"- Validating against Darwin {major_kernel}.{minor_kernel}")
|
||||||
if not sys_patch_helpers.sys_patch_helpers(settings).generate_patchset_plist(patchset, f"OpenCore-Legacy-Patcher-{major_kernel}.{minor_kernel}.plist", None):
|
if not sys_patch_helpers.SysPatchHelpers(self.constants).generate_patchset_plist(patchset, f"OpenCore-Legacy-Patcher-{major_kernel}.{minor_kernel}.plist", None):
|
||||||
raise Exception("Failed to generate patchset plist")
|
raise Exception("Failed to generate patchset plist")
|
||||||
|
|
||||||
# Remove the plist file after validation
|
# Remove the plist file after validation
|
||||||
Path(settings.payload_path / f"OpenCore-Legacy-Patcher-{major_kernel}.{minor_kernel}.plist").unlink()
|
Path(self.constants.payload_path / f"OpenCore-Legacy-Patcher-{major_kernel}.{minor_kernel}.plist").unlink()
|
||||||
|
|
||||||
|
|
||||||
def validate_sys_patch():
|
def _validate_sys_patch(self) -> None:
|
||||||
if Path(settings.payload_local_binaries_root_path_zip).exists():
|
"""
|
||||||
print("Validating Root Patch File integrity")
|
Validates sys_patch modules
|
||||||
if not Path(settings.payload_local_binaries_root_path).exists():
|
"""
|
||||||
subprocess.run(["ditto", "-V", "-x", "-k", "--sequesterRsrc", "--rsrc", settings.payload_local_binaries_root_path_zip, settings.payload_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
||||||
|
if Path(self.constants.payload_local_binaries_root_path_zip).exists():
|
||||||
|
logging.info("Validating Root Patch File integrity")
|
||||||
|
if not Path(self.constants.payload_local_binaries_root_path).exists():
|
||||||
|
subprocess.run(
|
||||||
|
[
|
||||||
|
"ditto", "-V", "-x", "-k", "--sequesterRsrc", "--rsrc",
|
||||||
|
self.constants.payload_local_binaries_root_path_zip,
|
||||||
|
self.constants.payload_path
|
||||||
|
],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT
|
||||||
|
)
|
||||||
for supported_os in [os_data.os_data.big_sur, os_data.os_data.monterey, os_data.os_data.ventura]:
|
for supported_os in [os_data.os_data.big_sur, os_data.os_data.monterey, os_data.os_data.ventura]:
|
||||||
for i in range(0, 10):
|
for i in range(0, 10):
|
||||||
validate_root_patch_files(supported_os, i)
|
self._validate_root_patch_files(supported_os, i)
|
||||||
print("Validating SNB Board ID patcher")
|
logging.info("Validating SNB Board ID patcher")
|
||||||
settings.computer.reported_board_id = "Mac-7BA5B2DFE22DDD8C"
|
self.constants.computer.reported_board_id = "Mac-7BA5B2DFE22DDD8C"
|
||||||
sys_patch_helpers.sys_patch_helpers(settings).snb_board_id_patch(settings.payload_local_binaries_root_path)
|
sys_patch_helpers.SysPatchHelpers(self.constants).snb_board_id_patch(self.constants.payload_local_binaries_root_path)
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
subprocess.run(
|
||||||
|
[
|
||||||
|
"rm", "-rf", self.constants.payload_local_binaries_root_path
|
||||||
|
],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
print("- Skipping Root Patch File integrity validation")
|
logging.info("- Skipping Root Patch File integrity validation")
|
||||||
|
|
||||||
|
|
||||||
def validate_configs():
|
def _validate_configs(self) -> None:
|
||||||
|
"""
|
||||||
|
Validates build modules
|
||||||
|
"""
|
||||||
|
|
||||||
# First run is with default settings
|
# First run is with default settings
|
||||||
build_prebuilt()
|
self._build_prebuilt()
|
||||||
build_dumps()
|
self._build_dumps()
|
||||||
|
|
||||||
# Second run, flip all settings
|
# Second run, flip all settings
|
||||||
settings.verbose_debug = True
|
self.constants.verbose_debug = True
|
||||||
settings.opencore_debug = True
|
self.constants.opencore_debug = True
|
||||||
settings.opencore_build = "DEBUG"
|
self.constants.opencore_build = "DEBUG"
|
||||||
settings.kext_debug = True
|
self.constants.kext_debug = True
|
||||||
settings.kext_variant = "DEBUG"
|
self.constants.kext_variant = "DEBUG"
|
||||||
settings.kext_debug = True
|
self.constants.kext_debug = True
|
||||||
settings.showpicker = False
|
self.constants.showpicker = False
|
||||||
settings.sip_status = False
|
self.constants.sip_status = False
|
||||||
settings.secure_status = True
|
self.constants.secure_status = True
|
||||||
settings.firewire_boot = True
|
self.constants.firewire_boot = True
|
||||||
settings.nvme_boot = True
|
self.constants.nvme_boot = True
|
||||||
settings.enable_wake_on_wlan = True
|
self.constants.enable_wake_on_wlan = True
|
||||||
settings.disable_tb = True
|
self.constants.disable_tb = True
|
||||||
settings.force_surplus = True
|
self.constants.force_surplus = True
|
||||||
settings.software_demux = True
|
self.constants.software_demux = True
|
||||||
settings.serial_settings = "Minimal"
|
self.constants.serial_settings = "Minimal"
|
||||||
build_prebuilt()
|
|
||||||
build_dumps()
|
|
||||||
|
|
||||||
|
self._build_prebuilt()
|
||||||
|
self._build_dumps()
|
||||||
|
|
||||||
validate_configs()
|
subprocess.run(["rm", "-rf", self.constants.build_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
validate_sys_patch()
|
|
||||||
Reference in New Issue
Block a user