Trying to fix black screen on wake...
This commit is contained in:
3
ACPI/SSDTTime-master/.gitattributes
vendored
Normal file
3
ACPI/SSDTTime-master/.gitattributes
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# Ensure all .bat scripts use CRLF line endings
|
||||
# This can prevent a number of odd batch issues
|
||||
*.bat text eol=crlf
|
||||
110
ACPI/SSDTTime-master/.gitignore
vendored
Normal file
110
ACPI/SSDTTime-master/.gitignore
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
Results/*
|
||||
iasl*
|
||||
acpidump*
|
||||
|
||||
.vs
|
||||
21
ACPI/SSDTTime-master/LICENSE
Normal file
21
ACPI/SSDTTime-master/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 CorpNewt
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
426
ACPI/SSDTTime-master/PatchMerge.bat
Normal file
426
ACPI/SSDTTime-master/PatchMerge.bat
Normal file
@@ -0,0 +1,426 @@
|
||||
@echo off
|
||||
REM Get our local path and args before delayed expansion - allows % and !
|
||||
set "thisDir=%~dp0"
|
||||
set "args=%*"
|
||||
|
||||
setlocal enableDelayedExpansion
|
||||
REM Setup initial vars
|
||||
set "script_name="
|
||||
set /a tried=0
|
||||
set "toask=yes"
|
||||
set "pause_on_error=yes"
|
||||
set "py2v="
|
||||
set "py2path="
|
||||
set "py3v="
|
||||
set "py3path="
|
||||
set "pypath="
|
||||
set "targetpy=3"
|
||||
|
||||
REM use_py3:
|
||||
REM TRUE = Use if found, use py2 otherwise
|
||||
REM FALSE = Use py2
|
||||
REM FORCE = Use py3
|
||||
set "use_py3=TRUE"
|
||||
|
||||
REM We'll parse if the first argument passed is
|
||||
REM --install-python and if so, we'll just install
|
||||
REM Can optionally take a version number as the
|
||||
REM second arg - i.e. --install-python 3.13.1
|
||||
set "just_installing=FALSE"
|
||||
set "user_provided="
|
||||
|
||||
REM Get the system32 (or equivalent) path
|
||||
call :getsyspath "syspath"
|
||||
|
||||
REM Make sure the syspath exists
|
||||
if "!syspath!" == "" (
|
||||
if exist "%SYSTEMROOT%\system32\cmd.exe" (
|
||||
if exist "%SYSTEMROOT%\system32\reg.exe" (
|
||||
if exist "%SYSTEMROOT%\system32\where.exe" (
|
||||
REM Fall back on the default path if it exists
|
||||
set "ComSpec=%SYSTEMROOT%\system32\cmd.exe"
|
||||
set "syspath=%SYSTEMROOT%\system32\"
|
||||
)
|
||||
)
|
||||
)
|
||||
if "!syspath!" == "" (
|
||||
cls
|
||||
echo ### ###
|
||||
echo # Missing Required Files #
|
||||
echo ### ###
|
||||
echo.
|
||||
echo Could not locate cmd.exe, reg.exe, or where.exe
|
||||
echo.
|
||||
echo Please ensure your ComSpec environment variable is properly configured and
|
||||
echo points directly to cmd.exe, then try again.
|
||||
echo.
|
||||
echo Current CompSpec Value: "%ComSpec%"
|
||||
echo.
|
||||
echo Press [enter] to quit.
|
||||
pause > nul
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
|
||||
if "%~1" == "--install-python" (
|
||||
set "just_installing=TRUE"
|
||||
set "user_provided=%~2"
|
||||
goto installpy
|
||||
)
|
||||
|
||||
goto checkscript
|
||||
|
||||
:checkscript
|
||||
REM Check for our script first
|
||||
set "looking_for=!script_name!"
|
||||
if "!script_name!" == "" (
|
||||
set "looking_for=%~n0.py or %~n0.command"
|
||||
set "script_name=%~n0.py"
|
||||
if not exist "!thisDir!\!script_name!" (
|
||||
set "script_name=%~n0.command"
|
||||
)
|
||||
)
|
||||
if not exist "!thisDir!\!script_name!" (
|
||||
cls
|
||||
echo ### ###
|
||||
echo # Target Not Found #
|
||||
echo ### ###
|
||||
echo.
|
||||
echo Could not find !looking_for!.
|
||||
echo Please make sure to run this script from the same directory
|
||||
echo as !looking_for!.
|
||||
echo.
|
||||
echo Press [enter] to quit.
|
||||
pause > nul
|
||||
exit /b 1
|
||||
)
|
||||
goto checkpy
|
||||
|
||||
:checkpy
|
||||
call :updatepath
|
||||
for /f "USEBACKQ tokens=*" %%x in (`!syspath!where.exe python 2^> nul`) do ( call :checkpyversion "%%x" "py2v" "py2path" "py3v" "py3path" )
|
||||
for /f "USEBACKQ tokens=*" %%x in (`!syspath!where.exe python3 2^> nul`) do ( call :checkpyversion "%%x" "py2v" "py2path" "py3v" "py3path" )
|
||||
for /f "USEBACKQ tokens=*" %%x in (`!syspath!where.exe py 2^> nul`) do ( call :checkpylauncher "%%x" "py2v" "py2path" "py3v" "py3path" )
|
||||
REM Walk our returns to see if we need to install
|
||||
if /i "!use_py3!" == "FALSE" (
|
||||
set "targetpy=2"
|
||||
set "pypath=!py2path!"
|
||||
) else if /i "!use_py3!" == "FORCE" (
|
||||
set "pypath=!py3path!"
|
||||
) else if /i "!use_py3!" == "TRUE" (
|
||||
set "pypath=!py3path!"
|
||||
if "!pypath!" == "" set "pypath=!py2path!"
|
||||
)
|
||||
if not "!pypath!" == "" (
|
||||
goto runscript
|
||||
)
|
||||
if !tried! lss 1 (
|
||||
if /i "!toask!"=="yes" (
|
||||
REM Better ask permission first
|
||||
goto askinstall
|
||||
) else (
|
||||
goto installpy
|
||||
)
|
||||
) else (
|
||||
cls
|
||||
echo ### ###
|
||||
echo # Python Not Found #
|
||||
echo ### ###
|
||||
echo.
|
||||
REM Couldn't install for whatever reason - give the error message
|
||||
echo Python is not installed or not found in your PATH var.
|
||||
echo Please go to https://www.python.org/downloads/windows/ to
|
||||
echo download and install the latest version, then try again.
|
||||
echo.
|
||||
echo Make sure you check the box labeled:
|
||||
echo.
|
||||
echo "Add Python X.X to PATH"
|
||||
echo.
|
||||
echo Where X.X is the py version you're installing.
|
||||
echo.
|
||||
echo Press [enter] to quit.
|
||||
pause > nul
|
||||
exit /b 1
|
||||
)
|
||||
goto runscript
|
||||
|
||||
:checkpylauncher <path> <py2v> <py2path> <py3v> <py3path>
|
||||
REM Attempt to check the latest python 2 and 3 versions via the py launcher
|
||||
for /f "USEBACKQ tokens=*" %%x in (`%~1 -2 -c "import sys; print(sys.executable)" 2^> nul`) do ( call :checkpyversion "%%x" "%~2" "%~3" "%~4" "%~5" )
|
||||
for /f "USEBACKQ tokens=*" %%x in (`%~1 -3 -c "import sys; print(sys.executable)" 2^> nul`) do ( call :checkpyversion "%%x" "%~2" "%~3" "%~4" "%~5" )
|
||||
goto :EOF
|
||||
|
||||
:checkpyversion <path> <py2v> <py2path> <py3v> <py3path>
|
||||
set "version="&for /f "tokens=2* USEBACKQ delims= " %%a in (`"%~1" -V 2^>^&1`) do (
|
||||
REM Ensure we have a version number
|
||||
call :isnumber "%%a"
|
||||
if not "!errorlevel!" == "0" goto :EOF
|
||||
set "version=%%a"
|
||||
)
|
||||
if not defined version goto :EOF
|
||||
if "!version:~0,1!" == "2" (
|
||||
REM Python 2
|
||||
call :comparepyversion "!version!" "!%~2!"
|
||||
if "!errorlevel!" == "1" (
|
||||
set "%~2=!version!"
|
||||
set "%~3=%~1"
|
||||
)
|
||||
) else (
|
||||
REM Python 3
|
||||
call :comparepyversion "!version!" "!%~4!"
|
||||
if "!errorlevel!" == "1" (
|
||||
set "%~4=!version!"
|
||||
set "%~5=%~1"
|
||||
)
|
||||
)
|
||||
goto :EOF
|
||||
|
||||
:isnumber <check_value>
|
||||
set "var="&for /f "delims=0123456789." %%i in ("%~1") do set var=%%i
|
||||
if defined var (exit /b 1)
|
||||
exit /b 0
|
||||
|
||||
:comparepyversion <version1> <version2> <return>
|
||||
REM Exits with status 0 if equal, 1 if v1 gtr v2, 2 if v1 lss v2
|
||||
for /f "tokens=1,2,3 delims=." %%a in ("%~1") do (
|
||||
set a1=%%a
|
||||
set a2=%%b
|
||||
set a3=%%c
|
||||
)
|
||||
for /f "tokens=1,2,3 delims=." %%a in ("%~2") do (
|
||||
set b1=%%a
|
||||
set b2=%%b
|
||||
set b3=%%c
|
||||
)
|
||||
if not defined a1 set a1=0
|
||||
if not defined a2 set a2=0
|
||||
if not defined a3 set a3=0
|
||||
if not defined b1 set b1=0
|
||||
if not defined b2 set b2=0
|
||||
if not defined b3 set b3=0
|
||||
if %a1% gtr %b1% exit /b 1
|
||||
if %a1% lss %b1% exit /b 2
|
||||
if %a2% gtr %b2% exit /b 1
|
||||
if %a2% lss %b2% exit /b 2
|
||||
if %a3% gtr %b3% exit /b 1
|
||||
if %a3% lss %b3% exit /b 2
|
||||
exit /b 0
|
||||
|
||||
:askinstall
|
||||
cls
|
||||
echo ### ###
|
||||
echo # Python Not Found #
|
||||
echo ### ###
|
||||
echo.
|
||||
echo Python !targetpy! was not found on the system or in the PATH var.
|
||||
echo.
|
||||
set /p "menu=Would you like to install it now? [y/n]: "
|
||||
if /i "!menu!"=="y" (
|
||||
REM We got the OK - install it
|
||||
goto installpy
|
||||
) else if "!menu!"=="n" (
|
||||
REM No OK here...
|
||||
set /a tried=!tried!+1
|
||||
goto checkpy
|
||||
)
|
||||
REM Incorrect answer - go back
|
||||
goto askinstall
|
||||
|
||||
:installpy
|
||||
REM This will attempt to download and install python
|
||||
set /a tried=!tried!+1
|
||||
cls
|
||||
echo ### ###
|
||||
echo # Downloading Python #
|
||||
echo ### ###
|
||||
echo.
|
||||
set "release=!user_provided!"
|
||||
if "!release!" == "" (
|
||||
REM No explicit release set - get the latest from python.org
|
||||
echo Gathering latest version...
|
||||
powershell -command "[Net.ServicePointManager]::SecurityProtocol=[Net.SecurityProtocolType]::Tls12;(new-object System.Net.WebClient).DownloadFile('https://www.python.org/downloads/windows/','%TEMP%\pyurl.txt')"
|
||||
REM Extract it if it's gzip compressed
|
||||
powershell -command "$infile='%TEMP%\pyurl.txt';$outfile='%TEMP%\pyurl.temp';try{$input=New-Object System.IO.FileStream $infile,([IO.FileMode]::Open),([IO.FileAccess]::Read),([IO.FileShare]::Read);$output=New-Object System.IO.FileStream $outfile,([IO.FileMode]::Create),([IO.FileAccess]::Write),([IO.FileShare]::None);$gzipStream=New-Object System.IO.Compression.GzipStream $input,([IO.Compression.CompressionMode]::Decompress);$buffer=New-Object byte[](1024);while($true){$read=$gzipstream.Read($buffer,0,1024);if($read -le 0){break};$output.Write($buffer,0,$read)};$gzipStream.Close();$output.Close();$input.Close();Move-Item -Path $outfile -Destination $infile -Force}catch{}"
|
||||
if not exist "%TEMP%\pyurl.txt" (
|
||||
if /i "!just_installing!" == "TRUE" (
|
||||
echo - Failed to get info
|
||||
exit /b 1
|
||||
) else (
|
||||
goto checkpy
|
||||
)
|
||||
)
|
||||
pushd "%TEMP%"
|
||||
:: Version detection code slimmed by LussacZheng (https://github.com/corpnewt/gibMacOS/issues/20)
|
||||
for /f "tokens=9 delims=< " %%x in ('findstr /i /c:"Latest Python !targetpy! Release" pyurl.txt') do ( set "release=%%x" )
|
||||
popd
|
||||
REM Let's delete our txt file now - we no longer need it
|
||||
del "%TEMP%\pyurl.txt"
|
||||
if "!release!" == "" (
|
||||
if /i "!just_installing!" == "TRUE" (
|
||||
echo - Failed to get python version
|
||||
exit /b 1
|
||||
) else (
|
||||
goto checkpy
|
||||
)
|
||||
)
|
||||
echo Located Version: !release!
|
||||
) else (
|
||||
echo User-Provided Version: !release!
|
||||
REM Update our targetpy to reflect the first number of
|
||||
REM our release
|
||||
for /f "tokens=1 delims=." %%a in ("!release!") do (
|
||||
call :isnumber "%%a"
|
||||
if "!errorlevel!" == "0" (
|
||||
set "targetpy=%%a"
|
||||
)
|
||||
)
|
||||
)
|
||||
echo Building download url...
|
||||
REM At this point - we should have the version number.
|
||||
REM We can build the url like so: "https://www.python.org/ftp/python/[version]/python-[version]-amd64.exe"
|
||||
set "url=https://www.python.org/ftp/python/!release!/python-!release!-amd64.exe"
|
||||
set "pytype=exe"
|
||||
if "!targetpy!" == "2" (
|
||||
set "url=https://www.python.org/ftp/python/!release!/python-!release!.amd64.msi"
|
||||
set "pytype=msi"
|
||||
)
|
||||
echo - !url!
|
||||
echo Downloading...
|
||||
REM Now we download it with our slick powershell command
|
||||
powershell -command "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; (new-object System.Net.WebClient).DownloadFile('!url!','%TEMP%\pyinstall.!pytype!')"
|
||||
REM If it doesn't exist - we bail
|
||||
if not exist "%TEMP%\pyinstall.!pytype!" (
|
||||
if /i "!just_installing!" == "TRUE" (
|
||||
echo - Failed to download python installer
|
||||
exit /b 1
|
||||
) else (
|
||||
goto checkpy
|
||||
)
|
||||
)
|
||||
REM It should exist at this point - let's run it to install silently
|
||||
echo Running python !pytype! installer...
|
||||
pushd "%TEMP%"
|
||||
if /i "!pytype!" == "exe" (
|
||||
echo - pyinstall.exe /quiet PrependPath=1 Include_test=0 Shortcuts=0 Include_launcher=0
|
||||
pyinstall.exe /quiet PrependPath=1 Include_test=0 Shortcuts=0 Include_launcher=0
|
||||
) else (
|
||||
set "foldername=!release:.=!"
|
||||
echo - msiexec /i pyinstall.msi /qb ADDLOCAL=ALL TARGETDIR="%LocalAppData%\Programs\Python\Python!foldername:~0,2!"
|
||||
msiexec /i pyinstall.msi /qb ADDLOCAL=ALL TARGETDIR="%LocalAppData%\Programs\Python\Python!foldername:~0,2!"
|
||||
)
|
||||
popd
|
||||
set "py_error=!errorlevel!"
|
||||
echo Installer finished with status: !py_error!
|
||||
echo Cleaning up...
|
||||
REM Now we should be able to delete the installer and check for py again
|
||||
del "%TEMP%\pyinstall.!pytype!"
|
||||
REM If it worked, then we should have python in our PATH
|
||||
REM this does not get updated right away though - let's try
|
||||
REM manually updating the local PATH var
|
||||
call :updatepath
|
||||
if /i "!just_installing!" == "TRUE" (
|
||||
echo.
|
||||
echo Done.
|
||||
) else (
|
||||
goto checkpy
|
||||
)
|
||||
exit /b
|
||||
|
||||
:runscript
|
||||
REM Python found
|
||||
cls
|
||||
REM Checks the args gathered at the beginning of the script.
|
||||
REM Make sure we're not just forwarding empty quotes.
|
||||
set "arg_test=!args:"=!"
|
||||
if "!arg_test!"=="" (
|
||||
"!pypath!" "!thisDir!!script_name!"
|
||||
) else (
|
||||
"!pypath!" "!thisDir!!script_name!" !args!
|
||||
)
|
||||
if /i "!pause_on_error!" == "yes" (
|
||||
if not "%ERRORLEVEL%" == "0" (
|
||||
echo.
|
||||
echo Script exited with error code: %ERRORLEVEL%
|
||||
echo.
|
||||
echo Press [enter] to exit...
|
||||
pause > nul
|
||||
)
|
||||
)
|
||||
goto :EOF
|
||||
|
||||
:undouble <string_name> <string_value> <character>
|
||||
REM Helper function to strip doubles of a single character out of a string recursively
|
||||
set "string_value=%~2"
|
||||
:undouble_continue
|
||||
set "check=!string_value:%~3%~3=%~3!"
|
||||
if not "!check!" == "!string_value!" (
|
||||
set "string_value=!check!"
|
||||
goto :undouble_continue
|
||||
)
|
||||
set "%~1=!check!"
|
||||
goto :EOF
|
||||
|
||||
:updatepath
|
||||
set "spath="
|
||||
set "upath="
|
||||
for /f "USEBACKQ tokens=2* delims= " %%i in (`!syspath!reg.exe query "HKCU\Environment" /v "Path" 2^> nul`) do ( if not "%%j" == "" set "upath=%%j" )
|
||||
for /f "USEBACKQ tokens=2* delims= " %%i in (`!syspath!reg.exe query "HKLM\SYSTEM\CurrentControlSet\Control\Session Manager\Environment" /v "Path" 2^> nul`) do ( if not "%%j" == "" set "spath=%%j" )
|
||||
if not "%spath%" == "" (
|
||||
REM We got something in the system path
|
||||
set "PATH=%spath%"
|
||||
if not "%upath%" == "" (
|
||||
REM We also have something in the user path
|
||||
set "PATH=%PATH%;%upath%"
|
||||
)
|
||||
) else if not "%upath%" == "" (
|
||||
set "PATH=%upath%"
|
||||
)
|
||||
REM Remove double semicolons from the adjusted PATH
|
||||
call :undouble "PATH" "%PATH%" ";"
|
||||
goto :EOF
|
||||
|
||||
:getsyspath <variable_name>
|
||||
REM Helper method to return a valid path to cmd.exe, reg.exe, and where.exe by
|
||||
REM walking the ComSpec var - will also repair it in memory if need be
|
||||
REM Strip double semi-colons
|
||||
call :undouble "temppath" "%ComSpec%" ";"
|
||||
|
||||
REM Dirty hack to leverage the "line feed" approach - there are some odd side
|
||||
REM effects with this. Do not use this variable name in comments near this
|
||||
REM line - as it seems to behave erradically.
|
||||
(set LF=^
|
||||
%=this line is empty=%
|
||||
)
|
||||
REM Replace instances of semi-colons with a line feed and wrap
|
||||
REM in parenthesis to work around some strange batch behavior
|
||||
set "testpath=%temppath:;=!LF!%"
|
||||
|
||||
REM Let's walk each path and test if cmd.exe, reg.exe, and where.exe exist there
|
||||
set /a found=0
|
||||
for /f "tokens=* delims=" %%i in ("!testpath!") do (
|
||||
REM Only continue if we haven't found it yet
|
||||
if not "%%i" == "" (
|
||||
if !found! lss 1 (
|
||||
set "checkpath=%%i"
|
||||
REM Remove "cmd.exe" from the end if it exists
|
||||
if /i "!checkpath:~-7!" == "cmd.exe" (
|
||||
set "checkpath=!checkpath:~0,-7!"
|
||||
)
|
||||
REM Pad the end with a backslash if needed
|
||||
if not "!checkpath:~-1!" == "\" (
|
||||
set "checkpath=!checkpath!\"
|
||||
)
|
||||
REM Let's see if cmd, reg, and where exist there - and set it if so
|
||||
if EXIST "!checkpath!cmd.exe" (
|
||||
if EXIST "!checkpath!reg.exe" (
|
||||
if EXIST "!checkpath!where.exe" (
|
||||
set /a found=1
|
||||
set "ComSpec=!checkpath!cmd.exe"
|
||||
set "%~1=!checkpath!"
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
goto :EOF
|
||||
339
ACPI/SSDTTime-master/PatchMerge.command
Normal file
339
ACPI/SSDTTime-master/PatchMerge.command
Normal file
@@ -0,0 +1,339 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Get the curent directory, the script name
|
||||
# and the script name with "py" substituted for the extension.
|
||||
args=( "$@" )
|
||||
dir="$(cd -- "$(dirname "$0")" >/dev/null 2>&1; pwd -P)"
|
||||
script="${0##*/}"
|
||||
target="${script%.*}.py"
|
||||
|
||||
# use_py3:
|
||||
# TRUE = Use if found, use py2 otherwise
|
||||
# FALSE = Use py2
|
||||
# FORCE = Use py3
|
||||
use_py3="TRUE"
|
||||
|
||||
# We'll parse if the first argument passed is
|
||||
# --install-python and if so, we'll just install
|
||||
# Can optionally take a version number as the
|
||||
# second arg - i.e. --install-python 3.13.1
|
||||
just_installing="FALSE"
|
||||
|
||||
tempdir=""
|
||||
|
||||
compare_to_version () {
|
||||
# Compares our OS version to the passed OS version, and
|
||||
# return a 1 if we match the passed compare type, or a 0 if we don't.
|
||||
# $1 = 0 (equal), 1 (greater), 2 (less), 3 (gequal), 4 (lequal)
|
||||
# $2 = OS version to compare ours to
|
||||
if [ -z "$1" ] || [ -z "$2" ]; then
|
||||
# Missing info - bail.
|
||||
return
|
||||
fi
|
||||
local current_os= comp=
|
||||
current_os="$(sw_vers -productVersion 2>/dev/null)"
|
||||
comp="$(vercomp "$current_os" "$2")"
|
||||
# Check gequal and lequal first
|
||||
if [[ "$1" == "3" && ("$comp" == "1" || "$comp" == "0") ]] || [[ "$1" == "4" && ("$comp" == "2" || "$comp" == "0") ]] || [[ "$comp" == "$1" ]]; then
|
||||
# Matched
|
||||
echo "1"
|
||||
else
|
||||
# No match
|
||||
echo "0"
|
||||
fi
|
||||
}
|
||||
|
||||
set_use_py3_if () {
|
||||
# Auto sets the "use_py3" variable based on
|
||||
# conditions passed
|
||||
# $1 = 0 (equal), 1 (greater), 2 (less), 3 (gequal), 4 (lequal)
|
||||
# $2 = OS version to compare
|
||||
# $3 = TRUE/FALSE/FORCE in case of match
|
||||
if [ -z "$1" ] || [ -z "$2" ] || [ -z "$3" ]; then
|
||||
# Missing vars - bail with no changes.
|
||||
return
|
||||
fi
|
||||
if [ "$(compare_to_version "$1" "$2")" == "1" ]; then
|
||||
use_py3="$3"
|
||||
fi
|
||||
}
|
||||
|
||||
get_remote_py_version () {
|
||||
local pyurl= py_html= py_vers= py_num="3"
|
||||
pyurl="https://www.python.org/downloads/macos/"
|
||||
py_html="$(curl -L $pyurl --compressed 2>&1)"
|
||||
if [ -z "$use_py3" ]; then
|
||||
use_py3="TRUE"
|
||||
fi
|
||||
if [ "$use_py3" == "FALSE" ]; then
|
||||
py_num="2"
|
||||
fi
|
||||
py_vers="$(echo "$py_html" | grep -i "Latest Python $py_num Release" | awk '{print $8}' | cut -d'<' -f1)"
|
||||
echo "$py_vers"
|
||||
}
|
||||
|
||||
download_py () {
|
||||
local vers="$1" url=
|
||||
clear
|
||||
echo " ### ###"
|
||||
echo " # Downloading Python #"
|
||||
echo "### ###"
|
||||
echo
|
||||
if [ -z "$vers" ]; then
|
||||
echo "Gathering latest version..."
|
||||
vers="$(get_remote_py_version)"
|
||||
if [ -z "$vers" ]; then
|
||||
if [ "$just_installing" == "TRUE" ]; then
|
||||
echo " - Failed to get info!"
|
||||
exit 1
|
||||
else
|
||||
# Didn't get it still - bail
|
||||
print_error
|
||||
fi
|
||||
fi
|
||||
echo "Located Version: $vers"
|
||||
else
|
||||
# Got a version passed
|
||||
echo "User-Provided Version: $vers"
|
||||
fi
|
||||
echo "Building download url..."
|
||||
url="$(curl -L https://www.python.org/downloads/release/python-${vers//./}/ --compressed 2>&1 | grep -iE "python-$vers-macos.*.pkg\"" | awk -F'"' '{ print $2 }' | head -n 1)"
|
||||
if [ -z "$url" ]; then
|
||||
if [ "$just_installing" == "TRUE" ]; then
|
||||
echo " - Failed to build download url!"
|
||||
exit 1
|
||||
else
|
||||
# Couldn't get the URL - bail
|
||||
print_error
|
||||
fi
|
||||
fi
|
||||
echo " - $url"
|
||||
echo "Downloading..."
|
||||
# Create a temp dir and download to it
|
||||
tempdir="$(mktemp -d 2>/dev/null || mktemp -d -t 'tempdir')"
|
||||
curl "$url" -o "$tempdir/python.pkg"
|
||||
if [ "$?" != "0" ]; then
|
||||
echo " - Failed to download python installer!"
|
||||
exit $?
|
||||
fi
|
||||
echo
|
||||
echo "Running python install package..."
|
||||
echo
|
||||
sudo installer -pkg "$tempdir/python.pkg" -target /
|
||||
echo
|
||||
if [ "$?" != "0" ]; then
|
||||
echo " - Failed to install python!"
|
||||
exit $?
|
||||
fi
|
||||
# Now we expand the package and look for a shell update script
|
||||
pkgutil --expand "$tempdir/python.pkg" "$tempdir/python"
|
||||
if [ -e "$tempdir/python/Python_Shell_Profile_Updater.pkg/Scripts/postinstall" ]; then
|
||||
# Run the script
|
||||
echo "Updating PATH..."
|
||||
echo
|
||||
"$tempdir/python/Python_Shell_Profile_Updater.pkg/Scripts/postinstall"
|
||||
echo
|
||||
fi
|
||||
vers_folder="Python $(echo "$vers" | cut -d'.' -f1 -f2)"
|
||||
if [ -f "/Applications/$vers_folder/Install Certificates.command" ]; then
|
||||
# Certs script exists - let's execute that to make sure our certificates are updated
|
||||
echo "Updating Certificates..."
|
||||
echo
|
||||
"/Applications/$vers_folder/Install Certificates.command"
|
||||
echo
|
||||
fi
|
||||
echo "Cleaning up..."
|
||||
cleanup
|
||||
if [ "$just_installing" == "TRUE" ]; then
|
||||
echo
|
||||
echo "Done."
|
||||
else
|
||||
# Now we check for py again
|
||||
downloaded="TRUE"
|
||||
clear
|
||||
main
|
||||
fi
|
||||
}
|
||||
|
||||
cleanup () {
|
||||
if [ -d "$tempdir" ]; then
|
||||
rm -Rf "$tempdir"
|
||||
fi
|
||||
}
|
||||
|
||||
print_error() {
|
||||
clear
|
||||
cleanup
|
||||
echo " ### ###"
|
||||
echo " # Python Not Found #"
|
||||
echo "### ###"
|
||||
echo
|
||||
echo "Python is not installed or not found in your PATH var."
|
||||
echo
|
||||
if [ "$kernel" == "Darwin" ]; then
|
||||
echo "Please go to https://www.python.org/downloads/macos/ to"
|
||||
echo "download and install the latest version, then try again."
|
||||
else
|
||||
echo "Please install python through your package manager and"
|
||||
echo "try again."
|
||||
fi
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
print_target_missing() {
|
||||
clear
|
||||
cleanup
|
||||
echo " ### ###"
|
||||
echo " # Target Not Found #"
|
||||
echo "### ###"
|
||||
echo
|
||||
echo "Could not locate $target!"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
format_version () {
|
||||
local vers="$1"
|
||||
echo "$(echo "$1" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }')"
|
||||
}
|
||||
|
||||
vercomp () {
|
||||
# Modified from: https://apple.stackexchange.com/a/123408/11374
|
||||
local ver1="$(format_version "$1")" ver2="$(format_version "$2")"
|
||||
if [ $ver1 -gt $ver2 ]; then
|
||||
echo "1"
|
||||
elif [ $ver1 -lt $ver2 ]; then
|
||||
echo "2"
|
||||
else
|
||||
echo "0"
|
||||
fi
|
||||
}
|
||||
|
||||
get_local_python_version() {
|
||||
# $1 = Python bin name (defaults to python3)
|
||||
# Echoes the path to the highest version of the passed python bin if any
|
||||
local py_name="$1" max_version= python= python_version= python_path=
|
||||
if [ -z "$py_name" ]; then
|
||||
py_name="python3"
|
||||
fi
|
||||
py_list="$(which -a "$py_name" 2>/dev/null)"
|
||||
# Walk that newline separated list
|
||||
while read python; do
|
||||
if [ -z "$python" ]; then
|
||||
# Got a blank line - skip
|
||||
continue
|
||||
fi
|
||||
if [ "$check_py3_stub" == "1" ] && [ "$python" == "/usr/bin/python3" ]; then
|
||||
# See if we have a valid developer path
|
||||
xcode-select -p > /dev/null 2>&1
|
||||
if [ "$?" != "0" ]; then
|
||||
# /usr/bin/python3 path - but no valid developer dir
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
python_version="$(get_python_version $python)"
|
||||
if [ -z "$python_version" ]; then
|
||||
# Didn't find a py version - skip
|
||||
continue
|
||||
fi
|
||||
# Got the py version - compare to our max
|
||||
if [ -z "$max_version" ] || [ "$(vercomp "$python_version" "$max_version")" == "1" ]; then
|
||||
# Max not set, or less than the current - update it
|
||||
max_version="$python_version"
|
||||
python_path="$python"
|
||||
fi
|
||||
done <<< "$py_list"
|
||||
echo "$python_path"
|
||||
}
|
||||
|
||||
get_python_version() {
|
||||
local py_path="$1" py_version=
|
||||
# Get the python version by piping stderr into stdout (for py2), then grepping the output for
|
||||
# the word "python", getting the second element, and grepping for an alphanumeric version number
|
||||
py_version="$($py_path -V 2>&1 | grep -i python | cut -d' ' -f2 | grep -E "[A-Za-z\d\.]+")"
|
||||
if [ ! -z "$py_version" ]; then
|
||||
echo "$py_version"
|
||||
fi
|
||||
}
|
||||
|
||||
prompt_and_download() {
|
||||
if [ "$downloaded" != "FALSE" ] || [ "$kernel" != "Darwin" ]; then
|
||||
# We already tried to download, or we're not on macOS - just bail
|
||||
print_error
|
||||
fi
|
||||
clear
|
||||
echo " ### ###"
|
||||
echo " # Python Not Found #"
|
||||
echo "### ###"
|
||||
echo
|
||||
target_py="Python 3"
|
||||
printed_py="Python 2 or 3"
|
||||
if [ "$use_py3" == "FORCE" ]; then
|
||||
printed_py="Python 3"
|
||||
elif [ "$use_py3" == "FALSE" ]; then
|
||||
target_py="Python 2"
|
||||
printed_py="Python 2"
|
||||
fi
|
||||
echo "Could not locate $printed_py!"
|
||||
echo
|
||||
echo "This script requires $printed_py to run."
|
||||
echo
|
||||
while true; do
|
||||
read -p "Would you like to install the latest $target_py now? (y/n): " yn
|
||||
case $yn in
|
||||
[Yy]* ) download_py;break;;
|
||||
[Nn]* ) print_error;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
main() {
|
||||
local python= version=
|
||||
# Verify our target exists
|
||||
if [ ! -f "$dir/$target" ]; then
|
||||
# Doesn't exist
|
||||
print_target_missing
|
||||
fi
|
||||
if [ -z "$use_py3" ]; then
|
||||
use_py3="TRUE"
|
||||
fi
|
||||
if [ "$use_py3" != "FALSE" ]; then
|
||||
# Check for py3 first
|
||||
python="$(get_local_python_version python3)"
|
||||
fi
|
||||
if [ "$use_py3" != "FORCE" ] && [ -z "$python" ]; then
|
||||
# We aren't using py3 explicitly, and we don't already have a path
|
||||
python="$(get_local_python_version python2)"
|
||||
if [ -z "$python" ]; then
|
||||
# Try just looking for "python"
|
||||
python="$(get_local_python_version python)"
|
||||
fi
|
||||
fi
|
||||
if [ -z "$python" ]; then
|
||||
# Didn't ever find it - prompt
|
||||
prompt_and_download
|
||||
return 1
|
||||
fi
|
||||
# Found it - start our script and pass all args
|
||||
"$python" "$dir/$target" "${args[@]}"
|
||||
}
|
||||
|
||||
# Keep track of whether or not we're on macOS to determine if
|
||||
# we can download and install python for the user as needed.
|
||||
kernel="$(uname -s)"
|
||||
# Check to see if we need to force based on
|
||||
# macOS version. 10.15 has a dummy python3 version
|
||||
# that can trip up some py3 detection in other scripts.
|
||||
# set_use_py3_if "3" "10.15" "FORCE"
|
||||
downloaded="FALSE"
|
||||
# Check for the aforementioned /usr/bin/python3 stub if
|
||||
# our OS version is 10.15 or greater.
|
||||
check_py3_stub="$(compare_to_version "3" "10.15")"
|
||||
trap cleanup EXIT
|
||||
if [ "$1" == "--install-python" ] && [ "$kernel" == "Darwin" ]; then
|
||||
just_installing="TRUE"
|
||||
download_py "$2"
|
||||
else
|
||||
main
|
||||
fi
|
||||
586
ACPI/SSDTTime-master/PatchMerge.py
Normal file
586
ACPI/SSDTTime-master/PatchMerge.py
Normal file
@@ -0,0 +1,586 @@
|
||||
from Scripts import utils, plist
|
||||
import argparse, os
|
||||
|
||||
class PatchMerge:
|
||||
def __init__(self, config=None, results=None, overwrite=False, interactive=True):
|
||||
self.u = utils.Utils("Patch Merge")
|
||||
self.w = 80
|
||||
self.h = 24
|
||||
self.red = "\u001b[41;1m"
|
||||
self.yel = "\u001b[43;1m"
|
||||
self.grn = "\u001b[42;1m"
|
||||
self.blu = "\u001b[46;1m"
|
||||
self.rst = "\u001b[0m"
|
||||
self.copy_as_path = self.u.check_admin() if os.name=="nt" else False
|
||||
if 2/3==0:
|
||||
# ANSI escapes don't seem to work properly with python 2.x
|
||||
self.red = self.yel = self.grn = self.blu = self.rst = ""
|
||||
if os.name == "nt":
|
||||
if 2/3!=0:
|
||||
os.system("color") # Allow ANSI color escapes.
|
||||
self.w = 120
|
||||
self.h = 30
|
||||
self.interactive = interactive
|
||||
self.overwrite = overwrite
|
||||
self.target_patches = (
|
||||
("OpenCore","patches_OC.plist"),
|
||||
("Clover","patches_Clover.plist")
|
||||
)
|
||||
self.config_path = config
|
||||
self.config_type = None
|
||||
self.output = results or self.get_default_results_folder()
|
||||
# Expand paths as needed
|
||||
if self.config_path:
|
||||
self.config_path = os.path.realpath(self.config_path)
|
||||
self.config_type,_,_ = self.get_plist_info(self.config_path)
|
||||
if self.output:
|
||||
self.output = os.path.realpath(self.output)
|
||||
|
||||
def _get_patches_plists(self, path):
|
||||
# Append patches_OC/Clover.plist to the path, and return a list
|
||||
# with the format:
|
||||
# ((oc_path,exists,plist_name),(clover_path,exists,plist_name))
|
||||
path_checks = []
|
||||
for p_type,name in self.target_patches:
|
||||
if path:
|
||||
p = os.path.join(path,name)
|
||||
isfile = os.path.isfile(p)
|
||||
else:
|
||||
p = None
|
||||
isfile = False
|
||||
path_checks.append((
|
||||
p,
|
||||
isfile,
|
||||
name
|
||||
))
|
||||
return path_checks
|
||||
|
||||
def get_default_results_folder(self, prompt=False):
|
||||
# Let's attempt to locate a Results folder either in the same
|
||||
# directory as this script, or in the parent directory.
|
||||
# If none is found - we'll have to prompt the user as needed.
|
||||
#
|
||||
# Try our directory first
|
||||
local_path = os.path.dirname(os.path.realpath(__file__))
|
||||
local_results = os.path.join(local_path,"Results")
|
||||
parent_results = os.path.realpath(os.path.join(local_path,"..","Results"))
|
||||
potentials = []
|
||||
for path in (local_results,parent_results):
|
||||
if os.path.isdir(path):
|
||||
# Check if we have the files we need
|
||||
o,c = self._get_patches_plists(path)
|
||||
if o[1] or c[1]:
|
||||
potentials.append(path)
|
||||
if potentials:
|
||||
return potentials[0]
|
||||
# If we got here - we didn't find anything - check if we need
|
||||
# to prompt
|
||||
if not prompt:
|
||||
# Nope - bail
|
||||
return None
|
||||
# We're prompting
|
||||
return self.select_results_folder()
|
||||
|
||||
def select_results_folder(self):
|
||||
while True:
|
||||
self.u.head("Select Results Folder")
|
||||
print("")
|
||||
if self.copy_as_path:
|
||||
print("NOTE: Currently running as admin on Windows - drag and drop may not work.")
|
||||
print(" Shift + right-click in Explorer and select 'Copy as path' then paste here instead.")
|
||||
print("")
|
||||
print("M. Main Menu")
|
||||
print("Q. Quit")
|
||||
print("")
|
||||
print("NOTE: This is the folder containing the patches_OC.plist and")
|
||||
print(" patches_Clover.plist you are trying to merge. It will also be where")
|
||||
print(" the patched config.plist is saved.")
|
||||
print("")
|
||||
path = self.u.grab("Please drag and drop the Results folder here: ")
|
||||
if not path:
|
||||
continue
|
||||
if path.lower() == "m":
|
||||
return self.output
|
||||
elif path.lower() == "q":
|
||||
self.u.custom_quit()
|
||||
test_path = self.u.check_path(path)
|
||||
if os.path.isfile(test_path):
|
||||
# Got a file - get the containing folder
|
||||
test_path = os.path.dirname(test_path)
|
||||
if not test_path:
|
||||
self.u.head("Invalid Path")
|
||||
print("")
|
||||
print("That path either does not exist, or is not a folder.")
|
||||
print("")
|
||||
self.u.grab("Press [enter] to return...")
|
||||
continue
|
||||
# Got a folder - check for patches_OC/Clover.plist
|
||||
o,c = self._get_patches_plists(test_path)
|
||||
if not (o[1] or c[1]):
|
||||
# No patches plists in there
|
||||
self.u.head("Missing Files")
|
||||
print("")
|
||||
print("Neither patches_OC.plist nor patches_Clover.plist were found at that path.")
|
||||
print("")
|
||||
self.u.grab("Press [enter] to return...")
|
||||
continue
|
||||
# We got what we need - set and return the path
|
||||
self.output = test_path
|
||||
return self.output
|
||||
|
||||
def get_ascii_print(self, data):
|
||||
# Helper to sanitize unprintable characters by replacing them with
|
||||
# ? where needed
|
||||
unprintables = False
|
||||
all_zeroes = True
|
||||
ascii_string = ""
|
||||
for b in data:
|
||||
if not isinstance(b,int):
|
||||
try: b = ord(b)
|
||||
except: pass
|
||||
if b != 0:
|
||||
# Not wildcard matching
|
||||
all_zeroes = False
|
||||
if ord(" ") <= b < ord("~"):
|
||||
ascii_string += chr(b)
|
||||
else:
|
||||
ascii_string += "?"
|
||||
unprintables = True
|
||||
return (False if all_zeroes else unprintables,ascii_string)
|
||||
|
||||
def check_normalize(self, patch_or_drop, normalize_headers, check_type="Patch"):
|
||||
sig = ("OemTableId","TableSignature")
|
||||
if normalize_headers:
|
||||
# OpenCore - and NormalizeHeaders is enabled. Check if we have
|
||||
# any unprintable ASCII chars in our OemTableId or TableSignature
|
||||
# and warn.
|
||||
if any(self.get_ascii_print(plist.extract_data(patch_or_drop.get(x,b"\x00")))[0] for x in sig):
|
||||
print("\n{}!! WARNING !!{} NormalizeHeaders is {}ENABLED{}, and table ids contain unprintable".format(
|
||||
self.yel,
|
||||
self.rst,
|
||||
self.grn,
|
||||
self.rst
|
||||
))
|
||||
print(" characters! {} may not match or apply!\n".format(check_type))
|
||||
return True
|
||||
else:
|
||||
# Not enabled - check for question marks as that may imply characters
|
||||
# were sanitized when creating the patches/dropping tables.
|
||||
if any(b"\x3F" in plist.extract_data(patch_or_drop.get(x,b"\x00")) for x in sig):
|
||||
print("\n{}!! WARNING !!{} NormalizeHeaders is {}DISABLED{}, and table ids contain '?'!".format(
|
||||
self.yel,
|
||||
self.rst,
|
||||
self.red,
|
||||
self.rst
|
||||
))
|
||||
print(" {} may not match or apply!\n".format(check_type))
|
||||
return True
|
||||
return False
|
||||
|
||||
def ensure_path(self, plist_data, path_list, final_type = list):
|
||||
if not path_list:
|
||||
return plist_data
|
||||
if not isinstance(plist_data,dict):
|
||||
plist_data = {} # Override it with a dict
|
||||
# Set our initial reference, then iterate the
|
||||
# path list
|
||||
last = plist_data
|
||||
for i,path in enumerate(path_list,start=1):
|
||||
# Check if our next path var is in last
|
||||
if not path in last:
|
||||
last[path] = {} if i < len(path_list) else final_type()
|
||||
# Make sure it's the correct type if we're at the
|
||||
# end of the entries
|
||||
if i >= len(path_list) and not isinstance(last[path],final_type):
|
||||
# Override it
|
||||
last[path] = final_type()
|
||||
# Update our reference
|
||||
last = last[path]
|
||||
return plist_data
|
||||
|
||||
def get_unique_name(self,name,target_folder,name_append=""):
|
||||
# Get a new file name in the target folder so we don't override the original
|
||||
name = os.path.basename(name)
|
||||
ext = "" if not "." in name else name.split(".")[-1]
|
||||
if ext: name = name[:-len(ext)-1]
|
||||
if name_append: name = name+str(name_append)
|
||||
check_name = ".".join((name,ext)) if ext else name
|
||||
if not os.path.exists(os.path.join(target_folder,check_name)):
|
||||
return check_name
|
||||
# We need a unique name
|
||||
num = 1
|
||||
while True:
|
||||
check_name = "{}-{}".format(name,num)
|
||||
if ext: check_name += "."+ext
|
||||
if not os.path.exists(os.path.join(target_folder,check_name)):
|
||||
return check_name
|
||||
num += 1 # Increment our counter
|
||||
|
||||
def pause_interactive(self, return_value=None):
|
||||
if self.interactive:
|
||||
print("")
|
||||
self.u.grab("Press [enter] to return...")
|
||||
return return_value
|
||||
|
||||
def patch_plist(self):
|
||||
# Retain the config name
|
||||
if self.interactive:
|
||||
self.u.head("Patching Plist")
|
||||
print("")
|
||||
# Make sure we have a config_path
|
||||
if not self.config_path:
|
||||
print("No target plist path specified!")
|
||||
return self.pause_interactive()
|
||||
# Make sure that config_path exists
|
||||
if not os.path.isfile(self.config_path):
|
||||
print("Could not locate target plist at:")
|
||||
print(" - {}".format(self.config_path))
|
||||
return self.pause_interactive()
|
||||
# Make sure our output var has a value
|
||||
if not self.output:
|
||||
print("No Results folder path specified!")
|
||||
return self.pause_interactive()
|
||||
config_name = os.path.basename(self.config_path)
|
||||
print("Loading {}...".format(config_name))
|
||||
self.config_type,config_data,e = self.get_plist_info(self.config_path)
|
||||
if e:
|
||||
print(" - Failed to load! {}".format(e))
|
||||
return self.pause_interactive()
|
||||
# Recheck the config.plist type
|
||||
if not self.config_type:
|
||||
print("Could not determine plist type!")
|
||||
return self.pause_interactive()
|
||||
# Ensure our patches plists exist, and break out info
|
||||
# into the target_path and target_name as needed
|
||||
target_path,_,target_name = self.get_patch_plist_for_type(
|
||||
self.output,
|
||||
self.config_type
|
||||
)
|
||||
# This should only show up if output is None/False/empty
|
||||
if not target_path:
|
||||
print("Could not locate {} in:".format(target_name or "the required patches plist"))
|
||||
print(" - {}".format(self.output))
|
||||
return self.pause_interactive()
|
||||
# Make sure the path actually exists - and is a file
|
||||
if not os.path.isfile(target_path):
|
||||
print("Could not locate required patches at:")
|
||||
print(" - {}".format(target_path))
|
||||
return self.pause_interactive()
|
||||
# Set up some preliminary variables for reporting later
|
||||
errors_found = normalize_headers = False # Default to off
|
||||
target_name = os.path.basename(target_path)
|
||||
print("Loading {}...".format(target_name))
|
||||
# Load the target plist
|
||||
_,target_data,e = self.get_plist_info(target_path)
|
||||
if e:
|
||||
print(" - Failed to load! {}".format(e))
|
||||
return self.pause_interactive()
|
||||
print("Ensuring paths in {} and {}...".format(config_name,target_name))
|
||||
# Make sure all the needed values are there
|
||||
if self.config_type == "OpenCore":
|
||||
for p in (("ACPI","Add"),("ACPI","Delete"),("ACPI","Patch")):
|
||||
print(" - {}...".format(" -> ".join(p)))
|
||||
config_data = self.ensure_path(config_data,p)
|
||||
target_data = self.ensure_path(target_data,p)
|
||||
print(" - ACPI -> Quirks...")
|
||||
config_data = self.ensure_path(config_data,("ACPI","Quirks"),final_type=dict)
|
||||
normalize_headers = config_data["ACPI"]["Quirks"].get("NormalizeHeaders",False)
|
||||
if not isinstance(normalize_headers,(bool)):
|
||||
errors_found = True
|
||||
print("\n{}!! WARNING !!{} ACPI -> Quirks -> NormalizeHeaders is malformed - assuming False".format(
|
||||
self.yel,
|
||||
self.rst
|
||||
))
|
||||
normalize_headers = False
|
||||
# Set up our patch sources
|
||||
ssdts = target_data["ACPI"]["Add"]
|
||||
patch = target_data["ACPI"]["Patch"]
|
||||
drops = target_data["ACPI"]["Delete"]
|
||||
# Set up our original values
|
||||
s_orig = config_data["ACPI"]["Add"]
|
||||
p_orig = config_data["ACPI"]["Patch"]
|
||||
d_orig = config_data["ACPI"]["Delete"]
|
||||
else:
|
||||
for p in (("ACPI","DropTables"),("ACPI","SortedOrder"),("ACPI","DSDT","Patches")):
|
||||
print(" - {}...".format(" -> ".join(p)))
|
||||
config_data = self.ensure_path(config_data,p)
|
||||
target_data = self.ensure_path(target_data,p)
|
||||
# Set up our patch sources
|
||||
ssdts = target_data["ACPI"]["SortedOrder"]
|
||||
patch = target_data["ACPI"]["DSDT"]["Patches"]
|
||||
drops = target_data["ACPI"]["DropTables"]
|
||||
# Set up our original values
|
||||
s_orig = config_data["ACPI"]["SortedOrder"]
|
||||
p_orig = config_data["ACPI"]["DSDT"]["Patches"]
|
||||
d_orig = config_data["ACPI"]["DropTables"]
|
||||
print("")
|
||||
if not ssdts:
|
||||
print("--- No SSDTs to add - skipping...")
|
||||
else:
|
||||
print("--- Walking target SSDTs ({:,} total)...".format(len(ssdts)))
|
||||
s_rem = []
|
||||
# Gather any entries broken from user error
|
||||
s_broken = [x for x in s_orig if not isinstance(x,dict)] if self.config_type == "OpenCore" else []
|
||||
for s in ssdts:
|
||||
if self.config_type == "OpenCore":
|
||||
print(" - Checking {}...".format(s["Path"]))
|
||||
existing = [x for x in s_orig if isinstance(x,dict) and x["Path"] == s["Path"]]
|
||||
else:
|
||||
print(" - Checking {}...".format(s))
|
||||
existing = [x for x in s_orig if x == s]
|
||||
if existing:
|
||||
print(" --> Located {:,} existing to replace...".format(len(existing)))
|
||||
s_rem.extend(existing)
|
||||
if s_rem:
|
||||
print(" - Removing {:,} existing duplicate{}...".format(len(s_rem),"" if len(s_rem)==1 else "s"))
|
||||
for r in s_rem:
|
||||
if r in s_orig: s_orig.remove(r)
|
||||
else:
|
||||
print(" - No duplicates to remove...")
|
||||
print(" - Adding {:,} SSDT{}...".format(len(ssdts),"" if len(ssdts)==1 else "s"))
|
||||
s_orig.extend(ssdts)
|
||||
if s_broken:
|
||||
errors_found = True
|
||||
print("\n{}!! WARNING !!{} {:,} Malformed entr{} found - please fix your {}!".format(
|
||||
self.yel,
|
||||
self.rst,
|
||||
len(s_broken),
|
||||
"y" if len(d_broken)==1 else "ies",
|
||||
config_name
|
||||
))
|
||||
print("")
|
||||
if not patch:
|
||||
print("--- No patches to add - skipping...")
|
||||
else:
|
||||
print("--- Walking target patches ({:,} total)...".format(len(patch)))
|
||||
p_rem = []
|
||||
# Gather any entries broken from user error
|
||||
p_broken = [x for x in p_orig if not isinstance(x,dict)]
|
||||
for p in patch:
|
||||
print(" - Checking {}...".format(p["Comment"]))
|
||||
if self.config_type == "OpenCore" and self.check_normalize(p,normalize_headers):
|
||||
errors_found = True
|
||||
existing = [x for x in p_orig if isinstance(x,dict) and x["Find"] == p["Find"] and x["Replace"] == p["Replace"]]
|
||||
if existing:
|
||||
print(" --> Located {:,} existing to replace...".format(len(existing)))
|
||||
p_rem.extend(existing)
|
||||
# Remove any dupes
|
||||
if p_rem:
|
||||
print(" - Removing {:,} existing duplicate{}...".format(len(p_rem),"" if len(p_rem)==1 else "s"))
|
||||
for r in p_rem:
|
||||
if r in p_orig: p_orig.remove(r)
|
||||
else:
|
||||
print(" - No duplicates to remove...")
|
||||
print(" - Adding {:,} patch{}...".format(len(patch),"" if len(patch)==1 else "es"))
|
||||
p_orig.extend(patch)
|
||||
if p_broken:
|
||||
errors_found = True
|
||||
print("\n{}!! WARNING !!{} {:,} Malformed entr{} found - please fix your {}!".format(
|
||||
self.yel,
|
||||
self.rst,
|
||||
len(p_broken),
|
||||
"y" if len(d_broken)==1 else "ies",
|
||||
config_name
|
||||
))
|
||||
print("")
|
||||
if not drops:
|
||||
print("--- No tables to drop - skipping...")
|
||||
else:
|
||||
print("--- Walking target tables to drop ({:,} total)...".format(len(drops)))
|
||||
d_rem = []
|
||||
# Gather any entries broken from user error
|
||||
d_broken = [x for x in d_orig if not isinstance(x,dict)]
|
||||
for d in drops:
|
||||
if self.config_type == "OpenCore":
|
||||
print(" - Checking {}...".format(d["Comment"]))
|
||||
if self.check_normalize(d,normalize_headers,check_type="Dropped table"):
|
||||
errors_found = True
|
||||
existing = [x for x in d_orig if isinstance(x,dict) and x["TableSignature"] == d["TableSignature"] and x["OemTableId"] == d["OemTableId"]]
|
||||
else:
|
||||
name = " - ".join([x for x in (d.get("Signature",""),d.get("TableId","")) if x]) or "Unknown Dropped Table"
|
||||
print(" - Checking {}...".format(name))
|
||||
existing = [x for x in d_orig if isinstance(x,dict) and x.get("Signature") == d.get("Signature") and x.get("TableId") == d.get("TableId")]
|
||||
if existing:
|
||||
print(" --> Located {:,} existing to replace...".format(len(existing)))
|
||||
d_rem.extend(existing)
|
||||
if d_rem:
|
||||
print(" - Removing {:,} existing duplicate{}...".format(len(d_rem),"" if len(d_rem)==1 else "s"))
|
||||
for r in d_rem:
|
||||
if r in d_orig: d_orig.remove(r)
|
||||
else:
|
||||
print(" - No duplicates to remove...")
|
||||
print(" - Dropping {:,} table{}...".format(len(drops),"" if len(drops)==1 else "s"))
|
||||
d_orig.extend(drops)
|
||||
if d_broken:
|
||||
errors_found = True
|
||||
print("\n{}!! WARNING !!{} {:,} Malformed entr{} found - please fix your {}!".format(
|
||||
self.yel,
|
||||
self.rst,
|
||||
len(d_broken),
|
||||
"y" if len(d_broken)==1 else "ies",
|
||||
config_name
|
||||
))
|
||||
print("")
|
||||
if self.overwrite:
|
||||
output_path = self.config_path
|
||||
else:
|
||||
config_name = self.get_unique_name(config_name,self.output)
|
||||
output_path = os.path.join(self.output,config_name)
|
||||
print("Saving to {}...".format(output_path))
|
||||
try:
|
||||
plist.dump(config_data,open(output_path,"wb"))
|
||||
except Exception as e:
|
||||
print(" - Failed to save! {}".format(e))
|
||||
return self.pause_interactive()
|
||||
print(" - Saved.")
|
||||
print("")
|
||||
if errors_found:
|
||||
print("{}!! WARNING !!{} Potential errors were found when merging - please address them!".format(
|
||||
self.yel,
|
||||
self.rst
|
||||
))
|
||||
print("")
|
||||
if not self.overwrite:
|
||||
print("{}!! WARNING !!{} Make sure you review the saved {} before replacing!".format(
|
||||
self.red,
|
||||
self.rst,
|
||||
config_name
|
||||
))
|
||||
print("")
|
||||
print("Done.")
|
||||
return self.pause_interactive()
|
||||
|
||||
def get_plist_info(self, config_path):
|
||||
# Attempts to load the passed config and return a tuple
|
||||
# of (type_string,config_data,error)
|
||||
type_string = config_data = e = None
|
||||
try:
|
||||
config_data = plist.load(open(config_path,"rb"))
|
||||
except Exception as e:
|
||||
return (None,None,e)
|
||||
if not isinstance(config_data,dict):
|
||||
e = "Invalid root node type: {}".format(type(config_data))
|
||||
else:
|
||||
type_string = "OpenCore" if "PlatformInfo" in config_data else "Clover" if "SMBIOS" in config_data else None
|
||||
return (type_string,config_data,None)
|
||||
|
||||
def get_patch_plist_for_type(self, path, config_type):
|
||||
o,c = self._get_patches_plists(path)
|
||||
return {
|
||||
"OpenCore":o,
|
||||
"Clover":c
|
||||
}.get(config_type,(None,False,None))
|
||||
|
||||
def select_plist(self):
|
||||
while True:
|
||||
self.u.head("Select Plist")
|
||||
print("")
|
||||
if self.copy_as_path:
|
||||
print("NOTE: Currently running as admin on Windows - drag and drop may not work.")
|
||||
print(" Shift + right-click in Explorer and select 'Copy as path' then paste here instead.")
|
||||
print("")
|
||||
print("M. Main Menu")
|
||||
print("Q. Quit")
|
||||
print("")
|
||||
path = self.u.grab("Please drag and drop the config.plist here: ")
|
||||
if not path: continue
|
||||
if path.lower() == "m": return
|
||||
elif path.lower() == "q": self.u.custom_quit()
|
||||
test_path = self.u.check_path(path)
|
||||
if not test_path or not os.path.isfile(test_path):
|
||||
self.u.head("Invalid Path")
|
||||
print("")
|
||||
print("That path either does not exist, or is not a file.")
|
||||
print("")
|
||||
self.u.grab("Press [enter] to return...")
|
||||
continue
|
||||
# Got a file - try to load it
|
||||
t,_,e = self.get_plist_info(test_path)
|
||||
if e:
|
||||
self.u.head("Invalid File")
|
||||
print("")
|
||||
print("That file failed to load:\n\n{}".format(e))
|
||||
print("")
|
||||
self.u.grab("Press [enter] to return...")
|
||||
continue
|
||||
# Got a valid file
|
||||
self.config_path = test_path
|
||||
self.config_type = t
|
||||
return
|
||||
|
||||
def main(self):
|
||||
# Gather some preliminary info for display
|
||||
target_path,target_exists,target_name = self.get_patch_plist_for_type(
|
||||
self.output,
|
||||
self.config_type
|
||||
)
|
||||
self.u.resize(self.w,self.h)
|
||||
self.u.head()
|
||||
print("")
|
||||
print("Current config.plist: {}".format(self.config_path))
|
||||
print("Type of config.plist: {}".format(self.config_type or "Unknown"))
|
||||
print("Results Folder: {}".format(self.output))
|
||||
print("Patches Plist: {}{}".format(
|
||||
target_name or "Unknown",
|
||||
"" if (not target_name or target_exists) else " - {}!! MISSING !!{}".format(self.red,self.rst)
|
||||
))
|
||||
print("Overwrite Original: {}{}{}{}".format(
|
||||
self.red if self.overwrite else self.grn,
|
||||
"!! True !!" if self.overwrite else "False",
|
||||
self.rst,
|
||||
" - Make Sure You Have A Backup!" if self.overwrite else ""
|
||||
))
|
||||
print("")
|
||||
print("C. Select config.plist")
|
||||
print("O. Toggle Overwrite Original")
|
||||
print("R. Select Results Folder")
|
||||
if self.config_path and target_exists:
|
||||
print("P. Patch with {}".format(target_name))
|
||||
print("")
|
||||
print("Q. Quit")
|
||||
print("")
|
||||
menu = self.u.grab("Please make a selection: ")
|
||||
if not len(menu):
|
||||
return
|
||||
if menu.lower() == "q":
|
||||
self.u.custom_quit()
|
||||
elif menu.lower() == "c":
|
||||
self.select_plist()
|
||||
elif menu.lower() == "o":
|
||||
self.overwrite ^= True
|
||||
elif menu.lower() == "r":
|
||||
self.select_results_folder()
|
||||
elif menu.lower() == "p" and self.config_path and target_exists:
|
||||
self.patch_plist()
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Setup the cli args
|
||||
parser = argparse.ArgumentParser(prog="PatchMerge.py", description="PatchMerge - py script to merge patches_[OC/Clover].plist with a config.plist.")
|
||||
parser.add_argument("-c", "--config", help="path to target config.plist - required if running in non-interactive mode")
|
||||
parser.add_argument("-r", "--results", help="path to Results folder containing patches_[OC/Clover].plist - required if running in non-interactive mode")
|
||||
parser.add_argument("-o", "--overwrite", help="overwrite the original config.plist", action="store_true")
|
||||
parser.add_argument("-i", "--no-interaction", help="run in non-interactive mode - requires -c and -r", action="store_true")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
p = PatchMerge(
|
||||
config=args.config,
|
||||
results=args.results,
|
||||
overwrite=args.overwrite,
|
||||
interactive=not args.no_interaction
|
||||
)
|
||||
|
||||
if args.no_interaction:
|
||||
# We're in non-interactive mode here
|
||||
p.patch_plist()
|
||||
else:
|
||||
# Interactive mode
|
||||
if 2/3 == 0:
|
||||
input = raw_input
|
||||
while True:
|
||||
try:
|
||||
p.main()
|
||||
except Exception as e:
|
||||
print("An error occurred: {}".format(e))
|
||||
print("")
|
||||
input("Press [enter] to continue...")
|
||||
47
ACPI/SSDTTime-master/README.md
Normal file
47
ACPI/SSDTTime-master/README.md
Normal file
@@ -0,0 +1,47 @@
|
||||
SSDTTime
|
||||
==========
|
||||
A simple tool designed to make creating SSDTs simple.
|
||||
Supports macOS, Linux and Windows
|
||||
|
||||
## Supported SSDTs:
|
||||
- SSDT-HPET
|
||||
- Patches out IRQ conflicts
|
||||
- SSDT-EC
|
||||
- OS-aware fake EC (laptop and desktop variants)
|
||||
- SSDT-USBX
|
||||
- Provides generic USB power properties
|
||||
- SSDT-PLUG
|
||||
- Sets plugin-type = 1 on CPU0/PR00
|
||||
- SSDT-PMC
|
||||
- Adds missing PMCR device for native 300-series NVRAM
|
||||
- SSDT-AWAC
|
||||
- Disables AWAC clock, and enables (or fakes) RTC as needed
|
||||
- SSDT-USB-Reset
|
||||
- Returns a zero status for detected root hubs to allow hardware querying
|
||||
- SSDT-Bridge
|
||||
- Create missing PCI bridges for passed device path
|
||||
- SSDT-PNLF
|
||||
- Sets up a PNLF device for laptop backlight control
|
||||
- SSDT-XOSI
|
||||
- _OSI rename and patch to return true for a range of Windows versions - also checks for OSID
|
||||
- DMAR
|
||||
- Remove Reserved Memory Regions from the DMAR table
|
||||
- SSDT-SBUS-MCHC
|
||||
- Defines an MCHC and BUS0 device for SMBus compatibility
|
||||
- IMEI Bridge
|
||||
- Defines IMEI - only needed on SNB + 7-series or IVB + 6-series
|
||||
|
||||
Additionally on Linux and Windows the tool can be used to dump the system DSDT.
|
||||
|
||||
## Instructions:
|
||||
### Linux:
|
||||
* Launch SSDTTime.py with any somewhat recent version of Python from either a terminal window or by running the file normally.
|
||||
### macOS:
|
||||
* Launch SSDTTime.command from either a terminal window or by double clicking the file.
|
||||
### Windows:
|
||||
* Launch SSDTTime.bat from either a terminal window or by double clicking the file.
|
||||
|
||||
## Credits:
|
||||
- [CorpNewt](https://github.com/CorpNewt) - Writing the script and libraries used
|
||||
- [NoOne](https://github.com/IOIIIO) - Some small improvements to the script
|
||||
- Rehabman/Intel - iasl
|
||||
426
ACPI/SSDTTime-master/SSDTTime.bat
Normal file
426
ACPI/SSDTTime-master/SSDTTime.bat
Normal file
@@ -0,0 +1,426 @@
|
||||
@echo off
|
||||
REM Get our local path and args before delayed expansion - allows % and !
|
||||
set "thisDir=%~dp0"
|
||||
set "args=%*"
|
||||
|
||||
setlocal enableDelayedExpansion
|
||||
REM Setup initial vars
|
||||
set "script_name="
|
||||
set /a tried=0
|
||||
set "toask=yes"
|
||||
set "pause_on_error=yes"
|
||||
set "py2v="
|
||||
set "py2path="
|
||||
set "py3v="
|
||||
set "py3path="
|
||||
set "pypath="
|
||||
set "targetpy=3"
|
||||
|
||||
REM use_py3:
|
||||
REM TRUE = Use if found, use py2 otherwise
|
||||
REM FALSE = Use py2
|
||||
REM FORCE = Use py3
|
||||
set "use_py3=TRUE"
|
||||
|
||||
REM We'll parse if the first argument passed is
|
||||
REM --install-python and if so, we'll just install
|
||||
REM Can optionally take a version number as the
|
||||
REM second arg - i.e. --install-python 3.13.1
|
||||
set "just_installing=FALSE"
|
||||
set "user_provided="
|
||||
|
||||
REM Get the system32 (or equivalent) path
|
||||
call :getsyspath "syspath"
|
||||
|
||||
REM Make sure the syspath exists
|
||||
if "!syspath!" == "" (
|
||||
if exist "%SYSTEMROOT%\system32\cmd.exe" (
|
||||
if exist "%SYSTEMROOT%\system32\reg.exe" (
|
||||
if exist "%SYSTEMROOT%\system32\where.exe" (
|
||||
REM Fall back on the default path if it exists
|
||||
set "ComSpec=%SYSTEMROOT%\system32\cmd.exe"
|
||||
set "syspath=%SYSTEMROOT%\system32\"
|
||||
)
|
||||
)
|
||||
)
|
||||
if "!syspath!" == "" (
|
||||
cls
|
||||
echo ### ###
|
||||
echo # Missing Required Files #
|
||||
echo ### ###
|
||||
echo.
|
||||
echo Could not locate cmd.exe, reg.exe, or where.exe
|
||||
echo.
|
||||
echo Please ensure your ComSpec environment variable is properly configured and
|
||||
echo points directly to cmd.exe, then try again.
|
||||
echo.
|
||||
echo Current CompSpec Value: "%ComSpec%"
|
||||
echo.
|
||||
echo Press [enter] to quit.
|
||||
pause > nul
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
|
||||
if "%~1" == "--install-python" (
|
||||
set "just_installing=TRUE"
|
||||
set "user_provided=%~2"
|
||||
goto installpy
|
||||
)
|
||||
|
||||
goto checkscript
|
||||
|
||||
:checkscript
|
||||
REM Check for our script first
|
||||
set "looking_for=!script_name!"
|
||||
if "!script_name!" == "" (
|
||||
set "looking_for=%~n0.py or %~n0.command"
|
||||
set "script_name=%~n0.py"
|
||||
if not exist "!thisDir!\!script_name!" (
|
||||
set "script_name=%~n0.command"
|
||||
)
|
||||
)
|
||||
if not exist "!thisDir!\!script_name!" (
|
||||
cls
|
||||
echo ### ###
|
||||
echo # Target Not Found #
|
||||
echo ### ###
|
||||
echo.
|
||||
echo Could not find !looking_for!.
|
||||
echo Please make sure to run this script from the same directory
|
||||
echo as !looking_for!.
|
||||
echo.
|
||||
echo Press [enter] to quit.
|
||||
pause > nul
|
||||
exit /b 1
|
||||
)
|
||||
goto checkpy
|
||||
|
||||
:checkpy
|
||||
call :updatepath
|
||||
for /f "USEBACKQ tokens=*" %%x in (`!syspath!where.exe python 2^> nul`) do ( call :checkpyversion "%%x" "py2v" "py2path" "py3v" "py3path" )
|
||||
for /f "USEBACKQ tokens=*" %%x in (`!syspath!where.exe python3 2^> nul`) do ( call :checkpyversion "%%x" "py2v" "py2path" "py3v" "py3path" )
|
||||
for /f "USEBACKQ tokens=*" %%x in (`!syspath!where.exe py 2^> nul`) do ( call :checkpylauncher "%%x" "py2v" "py2path" "py3v" "py3path" )
|
||||
REM Walk our returns to see if we need to install
|
||||
if /i "!use_py3!" == "FALSE" (
|
||||
set "targetpy=2"
|
||||
set "pypath=!py2path!"
|
||||
) else if /i "!use_py3!" == "FORCE" (
|
||||
set "pypath=!py3path!"
|
||||
) else if /i "!use_py3!" == "TRUE" (
|
||||
set "pypath=!py3path!"
|
||||
if "!pypath!" == "" set "pypath=!py2path!"
|
||||
)
|
||||
if not "!pypath!" == "" (
|
||||
goto runscript
|
||||
)
|
||||
if !tried! lss 1 (
|
||||
if /i "!toask!"=="yes" (
|
||||
REM Better ask permission first
|
||||
goto askinstall
|
||||
) else (
|
||||
goto installpy
|
||||
)
|
||||
) else (
|
||||
cls
|
||||
echo ### ###
|
||||
echo # Python Not Found #
|
||||
echo ### ###
|
||||
echo.
|
||||
REM Couldn't install for whatever reason - give the error message
|
||||
echo Python is not installed or not found in your PATH var.
|
||||
echo Please go to https://www.python.org/downloads/windows/ to
|
||||
echo download and install the latest version, then try again.
|
||||
echo.
|
||||
echo Make sure you check the box labeled:
|
||||
echo.
|
||||
echo "Add Python X.X to PATH"
|
||||
echo.
|
||||
echo Where X.X is the py version you're installing.
|
||||
echo.
|
||||
echo Press [enter] to quit.
|
||||
pause > nul
|
||||
exit /b 1
|
||||
)
|
||||
goto runscript
|
||||
|
||||
:checkpylauncher <path> <py2v> <py2path> <py3v> <py3path>
|
||||
REM Attempt to check the latest python 2 and 3 versions via the py launcher
|
||||
for /f "USEBACKQ tokens=*" %%x in (`%~1 -2 -c "import sys; print(sys.executable)" 2^> nul`) do ( call :checkpyversion "%%x" "%~2" "%~3" "%~4" "%~5" )
|
||||
for /f "USEBACKQ tokens=*" %%x in (`%~1 -3 -c "import sys; print(sys.executable)" 2^> nul`) do ( call :checkpyversion "%%x" "%~2" "%~3" "%~4" "%~5" )
|
||||
goto :EOF
|
||||
|
||||
:checkpyversion <path> <py2v> <py2path> <py3v> <py3path>
|
||||
set "version="&for /f "tokens=2* USEBACKQ delims= " %%a in (`"%~1" -V 2^>^&1`) do (
|
||||
REM Ensure we have a version number
|
||||
call :isnumber "%%a"
|
||||
if not "!errorlevel!" == "0" goto :EOF
|
||||
set "version=%%a"
|
||||
)
|
||||
if not defined version goto :EOF
|
||||
if "!version:~0,1!" == "2" (
|
||||
REM Python 2
|
||||
call :comparepyversion "!version!" "!%~2!"
|
||||
if "!errorlevel!" == "1" (
|
||||
set "%~2=!version!"
|
||||
set "%~3=%~1"
|
||||
)
|
||||
) else (
|
||||
REM Python 3
|
||||
call :comparepyversion "!version!" "!%~4!"
|
||||
if "!errorlevel!" == "1" (
|
||||
set "%~4=!version!"
|
||||
set "%~5=%~1"
|
||||
)
|
||||
)
|
||||
goto :EOF
|
||||
|
||||
:isnumber <check_value>
|
||||
set "var="&for /f "delims=0123456789." %%i in ("%~1") do set var=%%i
|
||||
if defined var (exit /b 1)
|
||||
exit /b 0
|
||||
|
||||
:comparepyversion <version1> <version2> <return>
|
||||
REM Exits with status 0 if equal, 1 if v1 gtr v2, 2 if v1 lss v2
|
||||
for /f "tokens=1,2,3 delims=." %%a in ("%~1") do (
|
||||
set a1=%%a
|
||||
set a2=%%b
|
||||
set a3=%%c
|
||||
)
|
||||
for /f "tokens=1,2,3 delims=." %%a in ("%~2") do (
|
||||
set b1=%%a
|
||||
set b2=%%b
|
||||
set b3=%%c
|
||||
)
|
||||
if not defined a1 set a1=0
|
||||
if not defined a2 set a2=0
|
||||
if not defined a3 set a3=0
|
||||
if not defined b1 set b1=0
|
||||
if not defined b2 set b2=0
|
||||
if not defined b3 set b3=0
|
||||
if %a1% gtr %b1% exit /b 1
|
||||
if %a1% lss %b1% exit /b 2
|
||||
if %a2% gtr %b2% exit /b 1
|
||||
if %a2% lss %b2% exit /b 2
|
||||
if %a3% gtr %b3% exit /b 1
|
||||
if %a3% lss %b3% exit /b 2
|
||||
exit /b 0
|
||||
|
||||
:askinstall
|
||||
cls
|
||||
echo ### ###
|
||||
echo # Python Not Found #
|
||||
echo ### ###
|
||||
echo.
|
||||
echo Python !targetpy! was not found on the system or in the PATH var.
|
||||
echo.
|
||||
set /p "menu=Would you like to install it now? [y/n]: "
|
||||
if /i "!menu!"=="y" (
|
||||
REM We got the OK - install it
|
||||
goto installpy
|
||||
) else if "!menu!"=="n" (
|
||||
REM No OK here...
|
||||
set /a tried=!tried!+1
|
||||
goto checkpy
|
||||
)
|
||||
REM Incorrect answer - go back
|
||||
goto askinstall
|
||||
|
||||
:installpy
|
||||
REM This will attempt to download and install python
|
||||
set /a tried=!tried!+1
|
||||
cls
|
||||
echo ### ###
|
||||
echo # Downloading Python #
|
||||
echo ### ###
|
||||
echo.
|
||||
set "release=!user_provided!"
|
||||
if "!release!" == "" (
|
||||
REM No explicit release set - get the latest from python.org
|
||||
echo Gathering latest version...
|
||||
powershell -command "[Net.ServicePointManager]::SecurityProtocol=[Net.SecurityProtocolType]::Tls12;(new-object System.Net.WebClient).DownloadFile('https://www.python.org/downloads/windows/','%TEMP%\pyurl.txt')"
|
||||
REM Extract it if it's gzip compressed
|
||||
powershell -command "$infile='%TEMP%\pyurl.txt';$outfile='%TEMP%\pyurl.temp';try{$input=New-Object System.IO.FileStream $infile,([IO.FileMode]::Open),([IO.FileAccess]::Read),([IO.FileShare]::Read);$output=New-Object System.IO.FileStream $outfile,([IO.FileMode]::Create),([IO.FileAccess]::Write),([IO.FileShare]::None);$gzipStream=New-Object System.IO.Compression.GzipStream $input,([IO.Compression.CompressionMode]::Decompress);$buffer=New-Object byte[](1024);while($true){$read=$gzipstream.Read($buffer,0,1024);if($read -le 0){break};$output.Write($buffer,0,$read)};$gzipStream.Close();$output.Close();$input.Close();Move-Item -Path $outfile -Destination $infile -Force}catch{}"
|
||||
if not exist "%TEMP%\pyurl.txt" (
|
||||
if /i "!just_installing!" == "TRUE" (
|
||||
echo - Failed to get info
|
||||
exit /b 1
|
||||
) else (
|
||||
goto checkpy
|
||||
)
|
||||
)
|
||||
pushd "%TEMP%"
|
||||
:: Version detection code slimmed by LussacZheng (https://github.com/corpnewt/gibMacOS/issues/20)
|
||||
for /f "tokens=9 delims=< " %%x in ('findstr /i /c:"Latest Python !targetpy! Release" pyurl.txt') do ( set "release=%%x" )
|
||||
popd
|
||||
REM Let's delete our txt file now - we no longer need it
|
||||
del "%TEMP%\pyurl.txt"
|
||||
if "!release!" == "" (
|
||||
if /i "!just_installing!" == "TRUE" (
|
||||
echo - Failed to get python version
|
||||
exit /b 1
|
||||
) else (
|
||||
goto checkpy
|
||||
)
|
||||
)
|
||||
echo Located Version: !release!
|
||||
) else (
|
||||
echo User-Provided Version: !release!
|
||||
REM Update our targetpy to reflect the first number of
|
||||
REM our release
|
||||
for /f "tokens=1 delims=." %%a in ("!release!") do (
|
||||
call :isnumber "%%a"
|
||||
if "!errorlevel!" == "0" (
|
||||
set "targetpy=%%a"
|
||||
)
|
||||
)
|
||||
)
|
||||
echo Building download url...
|
||||
REM At this point - we should have the version number.
|
||||
REM We can build the url like so: "https://www.python.org/ftp/python/[version]/python-[version]-amd64.exe"
|
||||
set "url=https://www.python.org/ftp/python/!release!/python-!release!-amd64.exe"
|
||||
set "pytype=exe"
|
||||
if "!targetpy!" == "2" (
|
||||
set "url=https://www.python.org/ftp/python/!release!/python-!release!.amd64.msi"
|
||||
set "pytype=msi"
|
||||
)
|
||||
echo - !url!
|
||||
echo Downloading...
|
||||
REM Now we download it with our slick powershell command
|
||||
powershell -command "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; (new-object System.Net.WebClient).DownloadFile('!url!','%TEMP%\pyinstall.!pytype!')"
|
||||
REM If it doesn't exist - we bail
|
||||
if not exist "%TEMP%\pyinstall.!pytype!" (
|
||||
if /i "!just_installing!" == "TRUE" (
|
||||
echo - Failed to download python installer
|
||||
exit /b 1
|
||||
) else (
|
||||
goto checkpy
|
||||
)
|
||||
)
|
||||
REM It should exist at this point - let's run it to install silently
|
||||
echo Running python !pytype! installer...
|
||||
pushd "%TEMP%"
|
||||
if /i "!pytype!" == "exe" (
|
||||
echo - pyinstall.exe /quiet PrependPath=1 Include_test=0 Shortcuts=0 Include_launcher=0
|
||||
pyinstall.exe /quiet PrependPath=1 Include_test=0 Shortcuts=0 Include_launcher=0
|
||||
) else (
|
||||
set "foldername=!release:.=!"
|
||||
echo - msiexec /i pyinstall.msi /qb ADDLOCAL=ALL TARGETDIR="%LocalAppData%\Programs\Python\Python!foldername:~0,2!"
|
||||
msiexec /i pyinstall.msi /qb ADDLOCAL=ALL TARGETDIR="%LocalAppData%\Programs\Python\Python!foldername:~0,2!"
|
||||
)
|
||||
popd
|
||||
set "py_error=!errorlevel!"
|
||||
echo Installer finished with status: !py_error!
|
||||
echo Cleaning up...
|
||||
REM Now we should be able to delete the installer and check for py again
|
||||
del "%TEMP%\pyinstall.!pytype!"
|
||||
REM If it worked, then we should have python in our PATH
|
||||
REM this does not get updated right away though - let's try
|
||||
REM manually updating the local PATH var
|
||||
call :updatepath
|
||||
if /i "!just_installing!" == "TRUE" (
|
||||
echo.
|
||||
echo Done.
|
||||
) else (
|
||||
goto checkpy
|
||||
)
|
||||
exit /b
|
||||
|
||||
:runscript
|
||||
REM Python found
|
||||
cls
|
||||
REM Checks the args gathered at the beginning of the script.
|
||||
REM Make sure we're not just forwarding empty quotes.
|
||||
set "arg_test=!args:"=!"
|
||||
if "!arg_test!"=="" (
|
||||
"!pypath!" "!thisDir!!script_name!"
|
||||
) else (
|
||||
"!pypath!" "!thisDir!!script_name!" !args!
|
||||
)
|
||||
if /i "!pause_on_error!" == "yes" (
|
||||
if not "%ERRORLEVEL%" == "0" (
|
||||
echo.
|
||||
echo Script exited with error code: %ERRORLEVEL%
|
||||
echo.
|
||||
echo Press [enter] to exit...
|
||||
pause > nul
|
||||
)
|
||||
)
|
||||
goto :EOF
|
||||
|
||||
:undouble <string_name> <string_value> <character>
|
||||
REM Helper function to strip doubles of a single character out of a string recursively
|
||||
set "string_value=%~2"
|
||||
:undouble_continue
|
||||
set "check=!string_value:%~3%~3=%~3!"
|
||||
if not "!check!" == "!string_value!" (
|
||||
set "string_value=!check!"
|
||||
goto :undouble_continue
|
||||
)
|
||||
set "%~1=!check!"
|
||||
goto :EOF
|
||||
|
||||
:updatepath
|
||||
set "spath="
|
||||
set "upath="
|
||||
for /f "USEBACKQ tokens=2* delims= " %%i in (`!syspath!reg.exe query "HKCU\Environment" /v "Path" 2^> nul`) do ( if not "%%j" == "" set "upath=%%j" )
|
||||
for /f "USEBACKQ tokens=2* delims= " %%i in (`!syspath!reg.exe query "HKLM\SYSTEM\CurrentControlSet\Control\Session Manager\Environment" /v "Path" 2^> nul`) do ( if not "%%j" == "" set "spath=%%j" )
|
||||
if not "%spath%" == "" (
|
||||
REM We got something in the system path
|
||||
set "PATH=%spath%"
|
||||
if not "%upath%" == "" (
|
||||
REM We also have something in the user path
|
||||
set "PATH=%PATH%;%upath%"
|
||||
)
|
||||
) else if not "%upath%" == "" (
|
||||
set "PATH=%upath%"
|
||||
)
|
||||
REM Remove double semicolons from the adjusted PATH
|
||||
call :undouble "PATH" "%PATH%" ";"
|
||||
goto :EOF
|
||||
|
||||
:getsyspath <variable_name>
|
||||
REM Helper method to return a valid path to cmd.exe, reg.exe, and where.exe by
|
||||
REM walking the ComSpec var - will also repair it in memory if need be
|
||||
REM Strip double semi-colons
|
||||
call :undouble "temppath" "%ComSpec%" ";"
|
||||
|
||||
REM Dirty hack to leverage the "line feed" approach - there are some odd side
|
||||
REM effects with this. Do not use this variable name in comments near this
|
||||
REM line - as it seems to behave erradically.
|
||||
(set LF=^
|
||||
%=this line is empty=%
|
||||
)
|
||||
REM Replace instances of semi-colons with a line feed and wrap
|
||||
REM in parenthesis to work around some strange batch behavior
|
||||
set "testpath=%temppath:;=!LF!%"
|
||||
|
||||
REM Let's walk each path and test if cmd.exe, reg.exe, and where.exe exist there
|
||||
set /a found=0
|
||||
for /f "tokens=* delims=" %%i in ("!testpath!") do (
|
||||
REM Only continue if we haven't found it yet
|
||||
if not "%%i" == "" (
|
||||
if !found! lss 1 (
|
||||
set "checkpath=%%i"
|
||||
REM Remove "cmd.exe" from the end if it exists
|
||||
if /i "!checkpath:~-7!" == "cmd.exe" (
|
||||
set "checkpath=!checkpath:~0,-7!"
|
||||
)
|
||||
REM Pad the end with a backslash if needed
|
||||
if not "!checkpath:~-1!" == "\" (
|
||||
set "checkpath=!checkpath!\"
|
||||
)
|
||||
REM Let's see if cmd, reg, and where exist there - and set it if so
|
||||
if EXIST "!checkpath!cmd.exe" (
|
||||
if EXIST "!checkpath!reg.exe" (
|
||||
if EXIST "!checkpath!where.exe" (
|
||||
set /a found=1
|
||||
set "ComSpec=!checkpath!cmd.exe"
|
||||
set "%~1=!checkpath!"
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
goto :EOF
|
||||
339
ACPI/SSDTTime-master/SSDTTime.command
Normal file
339
ACPI/SSDTTime-master/SSDTTime.command
Normal file
@@ -0,0 +1,339 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Get the curent directory, the script name
|
||||
# and the script name with "py" substituted for the extension.
|
||||
args=( "$@" )
|
||||
dir="$(cd -- "$(dirname "$0")" >/dev/null 2>&1; pwd -P)"
|
||||
script="${0##*/}"
|
||||
target="${script%.*}.py"
|
||||
|
||||
# use_py3:
|
||||
# TRUE = Use if found, use py2 otherwise
|
||||
# FALSE = Use py2
|
||||
# FORCE = Use py3
|
||||
use_py3="TRUE"
|
||||
|
||||
# We'll parse if the first argument passed is
|
||||
# --install-python and if so, we'll just install
|
||||
# Can optionally take a version number as the
|
||||
# second arg - i.e. --install-python 3.13.1
|
||||
just_installing="FALSE"
|
||||
|
||||
tempdir=""
|
||||
|
||||
compare_to_version () {
|
||||
# Compares our OS version to the passed OS version, and
|
||||
# return a 1 if we match the passed compare type, or a 0 if we don't.
|
||||
# $1 = 0 (equal), 1 (greater), 2 (less), 3 (gequal), 4 (lequal)
|
||||
# $2 = OS version to compare ours to
|
||||
if [ -z "$1" ] || [ -z "$2" ]; then
|
||||
# Missing info - bail.
|
||||
return
|
||||
fi
|
||||
local current_os= comp=
|
||||
current_os="$(sw_vers -productVersion 2>/dev/null)"
|
||||
comp="$(vercomp "$current_os" "$2")"
|
||||
# Check gequal and lequal first
|
||||
if [[ "$1" == "3" && ("$comp" == "1" || "$comp" == "0") ]] || [[ "$1" == "4" && ("$comp" == "2" || "$comp" == "0") ]] || [[ "$comp" == "$1" ]]; then
|
||||
# Matched
|
||||
echo "1"
|
||||
else
|
||||
# No match
|
||||
echo "0"
|
||||
fi
|
||||
}
|
||||
|
||||
set_use_py3_if () {
|
||||
# Auto sets the "use_py3" variable based on
|
||||
# conditions passed
|
||||
# $1 = 0 (equal), 1 (greater), 2 (less), 3 (gequal), 4 (lequal)
|
||||
# $2 = OS version to compare
|
||||
# $3 = TRUE/FALSE/FORCE in case of match
|
||||
if [ -z "$1" ] || [ -z "$2" ] || [ -z "$3" ]; then
|
||||
# Missing vars - bail with no changes.
|
||||
return
|
||||
fi
|
||||
if [ "$(compare_to_version "$1" "$2")" == "1" ]; then
|
||||
use_py3="$3"
|
||||
fi
|
||||
}
|
||||
|
||||
get_remote_py_version () {
|
||||
local pyurl= py_html= py_vers= py_num="3"
|
||||
pyurl="https://www.python.org/downloads/macos/"
|
||||
py_html="$(curl -L $pyurl --compressed 2>&1)"
|
||||
if [ -z "$use_py3" ]; then
|
||||
use_py3="TRUE"
|
||||
fi
|
||||
if [ "$use_py3" == "FALSE" ]; then
|
||||
py_num="2"
|
||||
fi
|
||||
py_vers="$(echo "$py_html" | grep -i "Latest Python $py_num Release" | awk '{print $8}' | cut -d'<' -f1)"
|
||||
echo "$py_vers"
|
||||
}
|
||||
|
||||
download_py () {
|
||||
local vers="$1" url=
|
||||
clear
|
||||
echo " ### ###"
|
||||
echo " # Downloading Python #"
|
||||
echo "### ###"
|
||||
echo
|
||||
if [ -z "$vers" ]; then
|
||||
echo "Gathering latest version..."
|
||||
vers="$(get_remote_py_version)"
|
||||
if [ -z "$vers" ]; then
|
||||
if [ "$just_installing" == "TRUE" ]; then
|
||||
echo " - Failed to get info!"
|
||||
exit 1
|
||||
else
|
||||
# Didn't get it still - bail
|
||||
print_error
|
||||
fi
|
||||
fi
|
||||
echo "Located Version: $vers"
|
||||
else
|
||||
# Got a version passed
|
||||
echo "User-Provided Version: $vers"
|
||||
fi
|
||||
echo "Building download url..."
|
||||
url="$(curl -L https://www.python.org/downloads/release/python-${vers//./}/ --compressed 2>&1 | grep -iE "python-$vers-macos.*.pkg\"" | awk -F'"' '{ print $2 }' | head -n 1)"
|
||||
if [ -z "$url" ]; then
|
||||
if [ "$just_installing" == "TRUE" ]; then
|
||||
echo " - Failed to build download url!"
|
||||
exit 1
|
||||
else
|
||||
# Couldn't get the URL - bail
|
||||
print_error
|
||||
fi
|
||||
fi
|
||||
echo " - $url"
|
||||
echo "Downloading..."
|
||||
# Create a temp dir and download to it
|
||||
tempdir="$(mktemp -d 2>/dev/null || mktemp -d -t 'tempdir')"
|
||||
curl "$url" -o "$tempdir/python.pkg"
|
||||
if [ "$?" != "0" ]; then
|
||||
echo " - Failed to download python installer!"
|
||||
exit $?
|
||||
fi
|
||||
echo
|
||||
echo "Running python install package..."
|
||||
echo
|
||||
sudo installer -pkg "$tempdir/python.pkg" -target /
|
||||
echo
|
||||
if [ "$?" != "0" ]; then
|
||||
echo " - Failed to install python!"
|
||||
exit $?
|
||||
fi
|
||||
# Now we expand the package and look for a shell update script
|
||||
pkgutil --expand "$tempdir/python.pkg" "$tempdir/python"
|
||||
if [ -e "$tempdir/python/Python_Shell_Profile_Updater.pkg/Scripts/postinstall" ]; then
|
||||
# Run the script
|
||||
echo "Updating PATH..."
|
||||
echo
|
||||
"$tempdir/python/Python_Shell_Profile_Updater.pkg/Scripts/postinstall"
|
||||
echo
|
||||
fi
|
||||
vers_folder="Python $(echo "$vers" | cut -d'.' -f1 -f2)"
|
||||
if [ -f "/Applications/$vers_folder/Install Certificates.command" ]; then
|
||||
# Certs script exists - let's execute that to make sure our certificates are updated
|
||||
echo "Updating Certificates..."
|
||||
echo
|
||||
"/Applications/$vers_folder/Install Certificates.command"
|
||||
echo
|
||||
fi
|
||||
echo "Cleaning up..."
|
||||
cleanup
|
||||
if [ "$just_installing" == "TRUE" ]; then
|
||||
echo
|
||||
echo "Done."
|
||||
else
|
||||
# Now we check for py again
|
||||
downloaded="TRUE"
|
||||
clear
|
||||
main
|
||||
fi
|
||||
}
|
||||
|
||||
cleanup () {
|
||||
if [ -d "$tempdir" ]; then
|
||||
rm -Rf "$tempdir"
|
||||
fi
|
||||
}
|
||||
|
||||
print_error() {
|
||||
clear
|
||||
cleanup
|
||||
echo " ### ###"
|
||||
echo " # Python Not Found #"
|
||||
echo "### ###"
|
||||
echo
|
||||
echo "Python is not installed or not found in your PATH var."
|
||||
echo
|
||||
if [ "$kernel" == "Darwin" ]; then
|
||||
echo "Please go to https://www.python.org/downloads/macos/ to"
|
||||
echo "download and install the latest version, then try again."
|
||||
else
|
||||
echo "Please install python through your package manager and"
|
||||
echo "try again."
|
||||
fi
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
print_target_missing() {
|
||||
clear
|
||||
cleanup
|
||||
echo " ### ###"
|
||||
echo " # Target Not Found #"
|
||||
echo "### ###"
|
||||
echo
|
||||
echo "Could not locate $target!"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
format_version () {
|
||||
local vers="$1"
|
||||
echo "$(echo "$1" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }')"
|
||||
}
|
||||
|
||||
vercomp () {
|
||||
# Modified from: https://apple.stackexchange.com/a/123408/11374
|
||||
local ver1="$(format_version "$1")" ver2="$(format_version "$2")"
|
||||
if [ $ver1 -gt $ver2 ]; then
|
||||
echo "1"
|
||||
elif [ $ver1 -lt $ver2 ]; then
|
||||
echo "2"
|
||||
else
|
||||
echo "0"
|
||||
fi
|
||||
}
|
||||
|
||||
get_local_python_version() {
|
||||
# $1 = Python bin name (defaults to python3)
|
||||
# Echoes the path to the highest version of the passed python bin if any
|
||||
local py_name="$1" max_version= python= python_version= python_path=
|
||||
if [ -z "$py_name" ]; then
|
||||
py_name="python3"
|
||||
fi
|
||||
py_list="$(which -a "$py_name" 2>/dev/null)"
|
||||
# Walk that newline separated list
|
||||
while read python; do
|
||||
if [ -z "$python" ]; then
|
||||
# Got a blank line - skip
|
||||
continue
|
||||
fi
|
||||
if [ "$check_py3_stub" == "1" ] && [ "$python" == "/usr/bin/python3" ]; then
|
||||
# See if we have a valid developer path
|
||||
xcode-select -p > /dev/null 2>&1
|
||||
if [ "$?" != "0" ]; then
|
||||
# /usr/bin/python3 path - but no valid developer dir
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
python_version="$(get_python_version $python)"
|
||||
if [ -z "$python_version" ]; then
|
||||
# Didn't find a py version - skip
|
||||
continue
|
||||
fi
|
||||
# Got the py version - compare to our max
|
||||
if [ -z "$max_version" ] || [ "$(vercomp "$python_version" "$max_version")" == "1" ]; then
|
||||
# Max not set, or less than the current - update it
|
||||
max_version="$python_version"
|
||||
python_path="$python"
|
||||
fi
|
||||
done <<< "$py_list"
|
||||
echo "$python_path"
|
||||
}
|
||||
|
||||
get_python_version() {
|
||||
local py_path="$1" py_version=
|
||||
# Get the python version by piping stderr into stdout (for py2), then grepping the output for
|
||||
# the word "python", getting the second element, and grepping for an alphanumeric version number
|
||||
py_version="$($py_path -V 2>&1 | grep -i python | cut -d' ' -f2 | grep -E "[A-Za-z\d\.]+")"
|
||||
if [ ! -z "$py_version" ]; then
|
||||
echo "$py_version"
|
||||
fi
|
||||
}
|
||||
|
||||
prompt_and_download() {
|
||||
if [ "$downloaded" != "FALSE" ] || [ "$kernel" != "Darwin" ]; then
|
||||
# We already tried to download, or we're not on macOS - just bail
|
||||
print_error
|
||||
fi
|
||||
clear
|
||||
echo " ### ###"
|
||||
echo " # Python Not Found #"
|
||||
echo "### ###"
|
||||
echo
|
||||
target_py="Python 3"
|
||||
printed_py="Python 2 or 3"
|
||||
if [ "$use_py3" == "FORCE" ]; then
|
||||
printed_py="Python 3"
|
||||
elif [ "$use_py3" == "FALSE" ]; then
|
||||
target_py="Python 2"
|
||||
printed_py="Python 2"
|
||||
fi
|
||||
echo "Could not locate $printed_py!"
|
||||
echo
|
||||
echo "This script requires $printed_py to run."
|
||||
echo
|
||||
while true; do
|
||||
read -p "Would you like to install the latest $target_py now? (y/n): " yn
|
||||
case $yn in
|
||||
[Yy]* ) download_py;break;;
|
||||
[Nn]* ) print_error;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
main() {
|
||||
local python= version=
|
||||
# Verify our target exists
|
||||
if [ ! -f "$dir/$target" ]; then
|
||||
# Doesn't exist
|
||||
print_target_missing
|
||||
fi
|
||||
if [ -z "$use_py3" ]; then
|
||||
use_py3="TRUE"
|
||||
fi
|
||||
if [ "$use_py3" != "FALSE" ]; then
|
||||
# Check for py3 first
|
||||
python="$(get_local_python_version python3)"
|
||||
fi
|
||||
if [ "$use_py3" != "FORCE" ] && [ -z "$python" ]; then
|
||||
# We aren't using py3 explicitly, and we don't already have a path
|
||||
python="$(get_local_python_version python2)"
|
||||
if [ -z "$python" ]; then
|
||||
# Try just looking for "python"
|
||||
python="$(get_local_python_version python)"
|
||||
fi
|
||||
fi
|
||||
if [ -z "$python" ]; then
|
||||
# Didn't ever find it - prompt
|
||||
prompt_and_download
|
||||
return 1
|
||||
fi
|
||||
# Found it - start our script and pass all args
|
||||
"$python" "$dir/$target" "${args[@]}"
|
||||
}
|
||||
|
||||
# Keep track of whether or not we're on macOS to determine if
|
||||
# we can download and install python for the user as needed.
|
||||
kernel="$(uname -s)"
|
||||
# Check to see if we need to force based on
|
||||
# macOS version. 10.15 has a dummy python3 version
|
||||
# that can trip up some py3 detection in other scripts.
|
||||
# set_use_py3_if "3" "10.15" "FORCE"
|
||||
downloaded="FALSE"
|
||||
# Check for the aforementioned /usr/bin/python3 stub if
|
||||
# our OS version is 10.15 or greater.
|
||||
check_py3_stub="$(compare_to_version "3" "10.15")"
|
||||
trap cleanup EXIT
|
||||
if [ "$1" == "--install-python" ] && [ "$kernel" == "Darwin" ]; then
|
||||
just_installing="TRUE"
|
||||
download_py "$2"
|
||||
else
|
||||
main
|
||||
fi
|
||||
4138
ACPI/SSDTTime-master/SSDTTime.py
Normal file
4138
ACPI/SSDTTime-master/SSDTTime.py
Normal file
File diff suppressed because it is too large
Load Diff
4
ACPI/SSDTTime-master/Scripts/__init__.py
Normal file
4
ACPI/SSDTTime-master/Scripts/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from os.path import dirname, basename, isfile
|
||||
import glob
|
||||
modules = glob.glob(dirname(__file__)+"/*.py")
|
||||
__all__ = [ basename(f)[:-3] for f in modules if isfile(f) and not f.endswith('__init__.py')]
|
||||
330
ACPI/SSDTTime-master/Scripts/downloader.py
Normal file
330
ACPI/SSDTTime-master/Scripts/downloader.py
Normal file
@@ -0,0 +1,330 @@
|
||||
import sys, os, time, ssl, gzip, multiprocessing
|
||||
from io import BytesIO
|
||||
# Python-aware urllib stuff
|
||||
try:
|
||||
from urllib.request import urlopen, Request
|
||||
import queue as q
|
||||
except ImportError:
|
||||
# Import urllib2 to catch errors
|
||||
import urllib2
|
||||
from urllib2 import urlopen, Request
|
||||
import Queue as q
|
||||
|
||||
TERMINAL_WIDTH = 120 if os.name=="nt" else 80
|
||||
|
||||
def get_size(size, suffix=None, use_1024=False, round_to=2, strip_zeroes=False):
|
||||
# size is the number of bytes
|
||||
# suffix is the target suffix to locate (B, KB, MB, etc) - if found
|
||||
# use_2014 denotes whether or not we display in MiB vs MB
|
||||
# round_to is the number of dedimal points to round our result to (0-15)
|
||||
# strip_zeroes denotes whether we strip out zeroes
|
||||
|
||||
# Failsafe in case our size is unknown
|
||||
if size == -1:
|
||||
return "Unknown"
|
||||
# Get our suffixes based on use_1024
|
||||
ext = ["B","KiB","MiB","GiB","TiB","PiB"] if use_1024 else ["B","KB","MB","GB","TB","PB"]
|
||||
div = 1024 if use_1024 else 1000
|
||||
s = float(size)
|
||||
s_dict = {} # Initialize our dict
|
||||
# Iterate the ext list, and divide by 1000 or 1024 each time to setup the dict {ext:val}
|
||||
for e in ext:
|
||||
s_dict[e] = s
|
||||
s /= div
|
||||
# Get our suffix if provided - will be set to None if not found, or if started as None
|
||||
suffix = next((x for x in ext if x.lower() == suffix.lower()),None) if suffix else suffix
|
||||
# Get the largest value that's still over 1
|
||||
biggest = suffix if suffix else next((x for x in ext[::-1] if s_dict[x] >= 1), "B")
|
||||
# Determine our rounding approach - first make sure it's an int; default to 2 on error
|
||||
try:round_to=int(round_to)
|
||||
except:round_to=2
|
||||
round_to = 0 if round_to < 0 else 15 if round_to > 15 else round_to # Ensure it's between 0 and 15
|
||||
bval = round(s_dict[biggest], round_to)
|
||||
# Split our number based on decimal points
|
||||
a,b = str(bval).split(".")
|
||||
# Check if we need to strip or pad zeroes
|
||||
b = b.rstrip("0") if strip_zeroes else b.ljust(round_to,"0") if round_to > 0 else ""
|
||||
return "{:,}{} {}".format(int(a),"" if not b else "."+b,biggest)
|
||||
|
||||
def _process_hook(queue, total_size, bytes_so_far=0, update_interval=1.0, max_packets=0):
|
||||
packets = []
|
||||
speed = remaining = ""
|
||||
last_update = time.time()
|
||||
while True:
|
||||
# Write our info first so we have *some* status while
|
||||
# waiting for packets
|
||||
if total_size > 0:
|
||||
percent = float(bytes_so_far) / total_size
|
||||
percent = round(percent*100, 2)
|
||||
t_s = get_size(total_size)
|
||||
try:
|
||||
b_s = get_size(bytes_so_far, t_s.split(" ")[1])
|
||||
except:
|
||||
b_s = get_size(bytes_so_far)
|
||||
perc_str = " {:.2f}%".format(percent)
|
||||
bar_width = (TERMINAL_WIDTH // 3)-len(perc_str)
|
||||
progress = "=" * int(bar_width * (percent/100))
|
||||
sys.stdout.write("\r\033[K{}/{} | {}{}{}{}{}".format(
|
||||
b_s,
|
||||
t_s,
|
||||
progress,
|
||||
" " * (bar_width-len(progress)),
|
||||
perc_str,
|
||||
speed,
|
||||
remaining
|
||||
))
|
||||
else:
|
||||
b_s = get_size(bytes_so_far)
|
||||
sys.stdout.write("\r\033[K{}{}".format(b_s, speed))
|
||||
sys.stdout.flush()
|
||||
# Now we gather the next packet
|
||||
try:
|
||||
packet = queue.get(timeout=update_interval)
|
||||
# Packets should be formatted as a tuple of
|
||||
# (timestamp, len(bytes_downloaded))
|
||||
# If "DONE" is passed, we assume the download
|
||||
# finished - and bail
|
||||
if packet == "DONE":
|
||||
print("") # Jump to the next line
|
||||
return
|
||||
# Append our packet to the list and ensure we're not
|
||||
# beyond our max.
|
||||
# Only check max if it's > 0
|
||||
packets.append(packet)
|
||||
if max_packets > 0:
|
||||
packets = packets[-max_packets:]
|
||||
# Increment our bytes so far as well
|
||||
bytes_so_far += packet[1]
|
||||
except q.Empty:
|
||||
# Didn't get anything - reset the speed
|
||||
# and packets
|
||||
packets = []
|
||||
speed = " | 0 B/s"
|
||||
remaining = " | ?? left" if total_size > 0 else ""
|
||||
except KeyboardInterrupt:
|
||||
print("") # Jump to the next line
|
||||
return
|
||||
# If we have packets and it's time for an update, process
|
||||
# the info.
|
||||
update_check = time.time()
|
||||
if packets and update_check - last_update >= update_interval:
|
||||
last_update = update_check # Refresh our update timestamp
|
||||
speed = " | ?? B/s"
|
||||
if len(packets) > 1:
|
||||
# Let's calculate the amount downloaded over how long
|
||||
try:
|
||||
first,last = packets[0][0],packets[-1][0]
|
||||
chunks = sum([float(x[1]) for x in packets])
|
||||
t = last-first
|
||||
assert t >= 0
|
||||
bytes_speed = 1. / t * chunks
|
||||
speed = " | {}/s".format(get_size(bytes_speed,round_to=1))
|
||||
# Get our remaining time
|
||||
if total_size > 0:
|
||||
seconds_left = (total_size-bytes_so_far) / bytes_speed
|
||||
days = seconds_left // 86400
|
||||
hours = (seconds_left - (days*86400)) // 3600
|
||||
mins = (seconds_left - (days*86400) - (hours*3600)) // 60
|
||||
secs = seconds_left - (days*86400) - (hours*3600) - (mins*60)
|
||||
if days > 99 or bytes_speed == 0:
|
||||
remaining = " | ?? left"
|
||||
else:
|
||||
remaining = " | {}{:02d}:{:02d}:{:02d} left".format(
|
||||
"{}:".format(int(days)) if days else "",
|
||||
int(hours),
|
||||
int(mins),
|
||||
int(round(secs))
|
||||
)
|
||||
except:
|
||||
pass
|
||||
# Clear the packets so we don't reuse the same ones
|
||||
packets = []
|
||||
|
||||
class Downloader:
|
||||
|
||||
def __init__(self,**kwargs):
|
||||
self.ua = kwargs.get("useragent",{"User-Agent":"Mozilla"})
|
||||
self.chunk = 1048576 # 1024 x 1024 i.e. 1MiB
|
||||
if os.name=="nt": os.system("color") # Initialize cmd for ANSI escapes
|
||||
# Provide reasonable default logic to workaround macOS CA file handling
|
||||
cafile = ssl.get_default_verify_paths().openssl_cafile
|
||||
try:
|
||||
# If default OpenSSL CA file does not exist, use that from certifi
|
||||
if not os.path.exists(cafile):
|
||||
import certifi
|
||||
cafile = certifi.where()
|
||||
self.ssl_context = ssl.create_default_context(cafile=cafile)
|
||||
except:
|
||||
# None of the above worked, disable certificate verification for now
|
||||
self.ssl_context = ssl._create_unverified_context()
|
||||
return
|
||||
|
||||
def _decode(self, value, encoding="utf-8", errors="ignore"):
|
||||
# Helper method to only decode if bytes type
|
||||
if sys.version_info >= (3,0) and isinstance(value, bytes):
|
||||
return value.decode(encoding,errors)
|
||||
return value
|
||||
|
||||
def _update_main_name(self):
|
||||
# Windows running python 2 seems to have issues with multiprocessing
|
||||
# if the case of the main script's name is incorrect:
|
||||
# e.g. Downloader.py vs downloader.py
|
||||
#
|
||||
# To work around this, we try to scrape for the correct case if
|
||||
# possible.
|
||||
try:
|
||||
path = os.path.abspath(sys.modules["__main__"].__file__)
|
||||
except AttributeError as e:
|
||||
# This likely means we're running from the interpreter
|
||||
# directly
|
||||
return None
|
||||
if not os.path.isfile(path):
|
||||
return None
|
||||
# Get the file name and folder path
|
||||
name = os.path.basename(path).lower()
|
||||
fldr = os.path.dirname(path)
|
||||
# Walk the files in the folder until we find our
|
||||
# name - then steal its case and update that path
|
||||
for f in os.listdir(fldr):
|
||||
if f.lower() == name:
|
||||
# Got it
|
||||
new_path = os.path.join(fldr,f)
|
||||
sys.modules["__main__"].__file__ = new_path
|
||||
return new_path
|
||||
# If we got here, it wasn't found
|
||||
return None
|
||||
|
||||
def _get_headers(self, headers = None):
|
||||
# Fall back on the default ua if none provided
|
||||
target = headers if isinstance(headers,dict) else self.ua
|
||||
new_headers = {}
|
||||
# Shallow copy to prevent changes to the headers
|
||||
# overriding the original
|
||||
for k in target:
|
||||
new_headers[k] = target[k]
|
||||
return new_headers
|
||||
|
||||
def open_url(self, url, headers = None):
|
||||
headers = self._get_headers(headers)
|
||||
# Wrap up the try/except block so we don't have to do this for each function
|
||||
try:
|
||||
response = urlopen(Request(url, headers=headers), context=self.ssl_context)
|
||||
except Exception as e:
|
||||
# No fixing this - bail
|
||||
return None
|
||||
return response
|
||||
|
||||
def get_size(self, *args, **kwargs):
|
||||
return get_size(*args,**kwargs)
|
||||
|
||||
def get_string(self, url, progress = True, headers = None, expand_gzip = True):
|
||||
response = self.get_bytes(url,progress,headers,expand_gzip)
|
||||
if response is None: return None
|
||||
return self._decode(response)
|
||||
|
||||
def get_bytes(self, url, progress = True, headers = None, expand_gzip = True):
|
||||
response = self.open_url(url, headers)
|
||||
if response is None: return None
|
||||
try: total_size = int(response.headers['Content-Length'])
|
||||
except: total_size = -1
|
||||
chunk_so_far = b""
|
||||
packets = queue = process = None
|
||||
if progress:
|
||||
# Make sure our vars are initialized
|
||||
packets = [] if progress else None
|
||||
queue = multiprocessing.Queue()
|
||||
# Create the multiprocess and start it
|
||||
process = multiprocessing.Process(
|
||||
target=_process_hook,
|
||||
args=(queue,total_size)
|
||||
)
|
||||
process.daemon = True
|
||||
# Filthy hack for earlier python versions on Windows
|
||||
if os.name == "nt" and hasattr(multiprocessing,"forking"):
|
||||
self._update_main_name()
|
||||
process.start()
|
||||
try:
|
||||
while True:
|
||||
chunk = response.read(self.chunk)
|
||||
if progress:
|
||||
# Add our items to the queue
|
||||
queue.put((time.time(),len(chunk)))
|
||||
if not chunk: break
|
||||
chunk_so_far += chunk
|
||||
finally:
|
||||
# Close the response whenever we're done
|
||||
response.close()
|
||||
if expand_gzip and response.headers.get("Content-Encoding","unknown").lower() == "gzip":
|
||||
fileobj = BytesIO(chunk_so_far)
|
||||
gfile = gzip.GzipFile(fileobj=fileobj)
|
||||
return gfile.read()
|
||||
if progress:
|
||||
# Finalize the queue and wait
|
||||
queue.put("DONE")
|
||||
process.join()
|
||||
return chunk_so_far
|
||||
|
||||
def stream_to_file(self, url, file_path, progress = True, headers = None, ensure_size_if_present = True, allow_resume = False):
|
||||
response = self.open_url(url, headers)
|
||||
if response is None: return None
|
||||
bytes_so_far = 0
|
||||
try: total_size = int(response.headers['Content-Length'])
|
||||
except: total_size = -1
|
||||
packets = queue = process = None
|
||||
mode = "wb"
|
||||
if allow_resume and os.path.isfile(file_path) and total_size != -1:
|
||||
# File exists, we're resuming and have a target size. Check the
|
||||
# local file size.
|
||||
current_size = os.stat(file_path).st_size
|
||||
if current_size == total_size:
|
||||
# File is already complete - return the path
|
||||
return file_path
|
||||
elif current_size < total_size:
|
||||
response.close()
|
||||
# File is not complete - seek to our current size
|
||||
bytes_so_far = current_size
|
||||
mode = "ab" # Append
|
||||
# We also need to try creating a new request
|
||||
# in order to pass our range header
|
||||
new_headers = self._get_headers(headers)
|
||||
# Get the start byte, 0-indexed
|
||||
byte_string = "bytes={}-".format(current_size)
|
||||
new_headers["Range"] = byte_string
|
||||
response = self.open_url(url, new_headers)
|
||||
if response is None: return None
|
||||
if progress:
|
||||
# Make sure our vars are initialized
|
||||
packets = [] if progress else None
|
||||
queue = multiprocessing.Queue()
|
||||
# Create the multiprocess and start it
|
||||
process = multiprocessing.Process(
|
||||
target=_process_hook,
|
||||
args=(queue,total_size,bytes_so_far)
|
||||
)
|
||||
process.daemon = True
|
||||
# Filthy hack for earlier python versions on Windows
|
||||
if os.name == "nt" and hasattr(multiprocessing,"forking"):
|
||||
self._update_main_name()
|
||||
process.start()
|
||||
with open(file_path,mode) as f:
|
||||
try:
|
||||
while True:
|
||||
chunk = response.read(self.chunk)
|
||||
bytes_so_far += len(chunk)
|
||||
if progress:
|
||||
# Add our items to the queue
|
||||
queue.put((time.time(),len(chunk)))
|
||||
if not chunk: break
|
||||
f.write(chunk)
|
||||
finally:
|
||||
# Close the response whenever we're done
|
||||
response.close()
|
||||
if progress:
|
||||
# Finalize the queue and wait
|
||||
queue.put("DONE")
|
||||
process.join()
|
||||
if ensure_size_if_present and total_size != -1:
|
||||
# We're verifying size - make sure we got what we asked for
|
||||
if bytes_so_far != total_size:
|
||||
return None # We didn't - imply it failed
|
||||
return file_path if os.path.exists(file_path) else None
|
||||
907
ACPI/SSDTTime-master/Scripts/dsdt.py
Normal file
907
ACPI/SSDTTime-master/Scripts/dsdt.py
Normal file
@@ -0,0 +1,907 @@
|
||||
import os, errno, tempfile, shutil, plistlib, sys, binascii, zipfile, getpass, re
|
||||
from . import run, downloader, utils
|
||||
|
||||
try:
|
||||
FileNotFoundError
|
||||
except NameError:
|
||||
FileNotFoundError = IOError
|
||||
|
||||
class DSDT:
|
||||
def __init__(self, **kwargs):
|
||||
self.dl = downloader.Downloader()
|
||||
self.r = run.Run()
|
||||
self.u = utils.Utils("SSDT Time")
|
||||
self.iasl_url_macOS = "https://raw.githubusercontent.com/acidanthera/MaciASL/master/Dist/iasl-stable"
|
||||
self.iasl_url_macOS_legacy = "https://raw.githubusercontent.com/acidanthera/MaciASL/master/Dist/iasl-legacy"
|
||||
self.iasl_url_linux = "https://raw.githubusercontent.com/corpnewt/linux_iasl/main/iasl.zip"
|
||||
self.iasl_url_linux_legacy = "https://raw.githubusercontent.com/corpnewt/iasl-legacy/main/iasl-legacy-linux.zip"
|
||||
self.acpi_github_windows = "https://github.com/acpica/acpica/releases/latest"
|
||||
self.acpi_binary_tools = "https://www.intel.com/content/www/us/en/developer/topic-technology/open/acpica/download.html"
|
||||
self.iasl_url_windows_legacy = "https://raw.githubusercontent.com/corpnewt/iasl-legacy/main/iasl-legacy-windows.zip"
|
||||
self.h = {} # {"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"}
|
||||
self.iasl = self.check_iasl()
|
||||
self.iasl_legacy = self.check_iasl(legacy=True)
|
||||
if not self.iasl:
|
||||
url = (self.acpi_github_windows,self.acpi_binary_tools) if os.name=="nt" else \
|
||||
self.iasl_url_macOS if sys.platform=="darwin" else \
|
||||
self.iasl_url_linux if sys.platform.startswith("linux") else None
|
||||
exception = "Could not locate or download iasl!"
|
||||
if url:
|
||||
exception += "\n\nPlease manually download {} from:\n - {}\n\nAnd place in:\n - {}\n".format(
|
||||
"and extract iasl.exe and acpidump.exe" if os.name=="nt" else "iasl",
|
||||
"\n - ".join(url) if isinstance(url,(list,tuple)) else url,
|
||||
os.path.dirname(os.path.realpath(__file__))
|
||||
)
|
||||
raise Exception(exception)
|
||||
self.allowed_signatures = (b"APIC",b"DMAR",b"DSDT",b"SSDT")
|
||||
self.mixed_listing = (b"DSDT",b"SSDT")
|
||||
self.acpi_tables = {}
|
||||
# Setup regex matches
|
||||
self.hex_match = re.compile(r"^\s*[0-9A-F]{4,}:(\s[0-9A-F]{2})+(\s+\/\/.*)?$")
|
||||
self.type_match = re.compile(r".*(?P<type>Processor|Scope|Device|Method|Name) \((?P<name>[^,\)]+).*")
|
||||
|
||||
def _table_signature(self, table_path, table_name = None, data = None):
|
||||
path = os.path.join(table_path,table_name) if table_name else table_path
|
||||
if not os.path.isfile(path):
|
||||
return None
|
||||
if data:
|
||||
# Got data - make sure there's enough for a signature
|
||||
if len(data) >= 4:
|
||||
return data[:4]
|
||||
else:
|
||||
return None
|
||||
# Try to load it and read the first 4 bytes to verify the
|
||||
# signature
|
||||
with open(path,"rb") as f:
|
||||
try:
|
||||
return f.read(4)
|
||||
except:
|
||||
pass
|
||||
return None
|
||||
|
||||
def non_ascii_count(self, data):
|
||||
# Helper to emulate the ACPI_IS_ASCII macro from ACPICA's code
|
||||
# It just appears to check if the passed byte is < 0x80
|
||||
# We'll check all available data though - and return the number
|
||||
# of non-ascii bytes
|
||||
non_ascii = 0
|
||||
for b in data:
|
||||
if not isinstance(b,int):
|
||||
try: b = ord(b)
|
||||
except: b = -1
|
||||
if not b < 0x80:
|
||||
non_ascii += 1
|
||||
return non_ascii
|
||||
|
||||
def table_is_valid(self, table_path, table_name = None, ensure_binary = True, check_signature = True):
|
||||
# Ensure we have a valid file
|
||||
path = os.path.join(table_path,table_name) if table_name else table_path
|
||||
if not os.path.isfile(path):
|
||||
return False
|
||||
# Set up a data placeholder
|
||||
data = None
|
||||
if ensure_binary is not None:
|
||||
# Make sure the table is the right type - load it
|
||||
# and read the data
|
||||
with open(path,"rb") as f:
|
||||
data = f.read()
|
||||
# Make sure we actually got some data
|
||||
if not data:
|
||||
return False
|
||||
# Gather the non-ASCII char count
|
||||
non_ascii_count = self.non_ascii_count(data)
|
||||
if ensure_binary and not non_ascii_count:
|
||||
# We want a binary, but it's all ascii
|
||||
return False
|
||||
elif not ensure_binary and non_ascii_count:
|
||||
# We want ascii, and got a binary
|
||||
return False
|
||||
if check_signature:
|
||||
if not self._table_signature(path,data=data) in self.allowed_signatures:
|
||||
# Check with the function - we didn't load the table
|
||||
# already
|
||||
return False
|
||||
# If we got here - the table passed our checks
|
||||
return True
|
||||
|
||||
def get_ascii_print(self, data):
|
||||
# Helper to sanitize unprintable characters by replacing them with
|
||||
# ? where needed
|
||||
unprintables = False
|
||||
ascii_string = ""
|
||||
for b in data:
|
||||
if not isinstance(b,int):
|
||||
try: b = ord(b)
|
||||
except: b = -1
|
||||
if ord(" ") <= b < ord("~"):
|
||||
ascii_string += chr(b)
|
||||
else:
|
||||
ascii_string += "?"
|
||||
unprintables = True
|
||||
return (unprintables,ascii_string)
|
||||
|
||||
def load(self, table_path):
|
||||
# Attempt to load the passed file - or if a directory
|
||||
# was passed, load all .aml and .dat files within
|
||||
cwd = os.getcwd()
|
||||
temp = None
|
||||
target_files = {}
|
||||
failed = []
|
||||
try:
|
||||
if os.path.isdir(table_path):
|
||||
# Got a directory - gather all valid
|
||||
# files in the directory
|
||||
valid_files = [
|
||||
x for x in os.listdir(table_path) if self.table_is_valid(table_path,x)
|
||||
]
|
||||
elif os.path.isfile(table_path):
|
||||
# Just loading the one table - don't check
|
||||
# the signature - but make sure it's binary
|
||||
if self.table_is_valid(table_path,check_signature=False):
|
||||
valid_files = [table_path]
|
||||
else:
|
||||
# Not valid - raise an error
|
||||
raise FileNotFoundError(
|
||||
errno.ENOENT,
|
||||
os.strerror(errno.ENOENT),
|
||||
"{} is not a valid .aml/.dat file.".format(table_path)
|
||||
)
|
||||
else:
|
||||
# Not a valid path
|
||||
raise FileNotFoundError(
|
||||
errno.ENOENT,
|
||||
os.strerror(errno.ENOENT),
|
||||
table_path
|
||||
)
|
||||
if not valid_files:
|
||||
# No valid files were found
|
||||
raise FileNotFoundError(
|
||||
errno.ENOENT,
|
||||
os.strerror(errno.ENOENT),
|
||||
"No valid .aml/.dat files found at {}".format(table_path)
|
||||
)
|
||||
# Create a temp dir and copy all files there
|
||||
temp = tempfile.mkdtemp()
|
||||
for file in valid_files:
|
||||
shutil.copy(
|
||||
os.path.join(table_path,file),
|
||||
temp
|
||||
)
|
||||
# Build a list of all target files in the temp folder - and save
|
||||
# the disassembled_name for each to verify after
|
||||
list_dir = os.listdir(temp)
|
||||
for x in list_dir:
|
||||
if len(list_dir) > 1 and not self.table_is_valid(temp,x):
|
||||
continue # Skip invalid files when multiple are passed
|
||||
name_ext = [y for y in os.path.basename(x).split(".") if y]
|
||||
if name_ext and name_ext[-1].lower() in ("asl","dsl"):
|
||||
continue # Skip any already disassembled files
|
||||
target_files[x] = {
|
||||
"assembled_name": os.path.basename(x),
|
||||
"disassembled_name": ".".join(x.split(".")[:-1]) + ".dsl",
|
||||
}
|
||||
if not target_files:
|
||||
# Somehow we ended up with none?
|
||||
raise FileNotFoundError(
|
||||
errno.ENOENT,
|
||||
os.strerror(errno.ENOENT),
|
||||
"No valid .aml/.dat files found at {}".format(table_path)
|
||||
)
|
||||
os.chdir(temp)
|
||||
# Generate and run a command
|
||||
dsdt_or_ssdt = [x for x in list(target_files) if self._table_signature(temp,x) in self.mixed_listing]
|
||||
other_tables = [x for x in list(target_files) if not x in dsdt_or_ssdt]
|
||||
out_d = ("","",0)
|
||||
out_t = ("","",0)
|
||||
|
||||
def exists(folder_path,file_name):
|
||||
# Helper to make sure the file exists and has a non-Zero size
|
||||
check_path = os.path.join(folder_path,file_name)
|
||||
if os.path.isfile(check_path) and os.stat(check_path).st_size > 0:
|
||||
return True
|
||||
return False
|
||||
|
||||
# Check our DSDT and SSDTs first
|
||||
if dsdt_or_ssdt:
|
||||
args = [self.iasl,"-da","-dl","-l"]+list(dsdt_or_ssdt)
|
||||
out_d = self.r.run({"args":args})
|
||||
if out_d[2] != 0:
|
||||
# Attempt to run without `-da` if the above failed
|
||||
args = [self.iasl,"-dl","-l"]+list(dsdt_or_ssdt)
|
||||
out_d = self.r.run({"args":args})
|
||||
# Get a list of disassembled names that failed
|
||||
fail_temp = []
|
||||
for x in dsdt_or_ssdt:
|
||||
if not exists(temp,target_files[x]["disassembled_name"]):
|
||||
fail_temp.append(x)
|
||||
# Let's try to disassemble any that failed individually
|
||||
for x in fail_temp:
|
||||
args = [self.iasl,"-dl","-l",x]
|
||||
self.r.run({"args":args})
|
||||
if not exists(temp,target_files[x]["disassembled_name"]):
|
||||
failed.append(x)
|
||||
# Check for other tables (DMAR, APIC, etc)
|
||||
if other_tables:
|
||||
args = [self.iasl]+list(other_tables)
|
||||
out_t = self.r.run({"args":args})
|
||||
# Get a list of disassembled names that failed
|
||||
for x in other_tables:
|
||||
if not exists(temp,target_files[x]["disassembled_name"]):
|
||||
failed.append(x)
|
||||
if len(failed) == len(target_files):
|
||||
raise Exception("Failed to disassemble - {}".format(", ".join(failed)))
|
||||
# Actually process the tables now
|
||||
to_remove = []
|
||||
for file in target_files:
|
||||
# We need to load the .aml and .dsl into memory
|
||||
# and get the paths and scopes
|
||||
if not exists(temp,target_files[file]["disassembled_name"]):
|
||||
to_remove.append(file)
|
||||
continue
|
||||
with open(os.path.join(temp,target_files[file]["disassembled_name"]),"r") as f:
|
||||
target_files[file]["table"] = f.read()
|
||||
# Remove the compiler info at the start
|
||||
if target_files[file]["table"].startswith("/*"):
|
||||
target_files[file]["table"] = "*/".join(target_files[file]["table"].split("*/")[1:]).strip()
|
||||
# Check for "Table Header:" or "Raw Table Data: Length" and strip everything
|
||||
# after the last occurrence
|
||||
for h in ("\nTable Header:","\nRaw Table Data: Length"):
|
||||
if h in target_files[file]["table"]:
|
||||
target_files[file]["table"] = h.join(target_files[file]["table"].split(h)[:-1]).rstrip()
|
||||
break # Bail on the first match
|
||||
target_files[file]["lines"] = target_files[file]["table"].split("\n")
|
||||
target_files[file]["scopes"] = self.get_scopes(table=target_files[file])
|
||||
target_files[file]["paths"] = self.get_paths(table=target_files[file])
|
||||
with open(os.path.join(temp,file),"rb") as f:
|
||||
table_bytes = f.read()
|
||||
target_files[file]["raw"] = table_bytes
|
||||
# Let's read the table header and get the info we need
|
||||
#
|
||||
# [0:4] = Table Signature
|
||||
# [4:8] = Length (little endian)
|
||||
# [8] = Compliance Revision
|
||||
# [9] = Checksum
|
||||
# [10:16] = OEM ID (6 chars, padded to the right with \x00)
|
||||
# [16:24] = Table ID (8 chars, padded to the right with \x00)
|
||||
# [24:28] = OEM Revision (little endian)
|
||||
#
|
||||
target_files[file]["signature"] = table_bytes[0:4]
|
||||
target_files[file]["revision"] = table_bytes[8]
|
||||
target_files[file]["oem"] = table_bytes[10:16]
|
||||
target_files[file]["id"] = table_bytes[16:24]
|
||||
target_files[file]["oem_revision"] = int(binascii.hexlify(table_bytes[24:28][::-1]),16)
|
||||
target_files[file]["length"] = len(table_bytes)
|
||||
# Get the printable versions of the sig, oem, and id as needed
|
||||
for key in ("signature","oem","id"):
|
||||
unprintable,ascii_string = self.get_ascii_print(target_files[file][key])
|
||||
if unprintable:
|
||||
target_files[file][key+"_ascii"] = ascii_string
|
||||
# Cast as int on py2, and try to decode bytes to strings on py3
|
||||
if 2/3==0:
|
||||
target_files[file]["revision"] = int(binascii.hexlify(target_files[file]["revision"]),16)
|
||||
if target_files[file]["signature"] in self.mixed_listing:
|
||||
# The disassembler omits the last line of hex data in a mixed listing
|
||||
# file... convenient. However - we should be able to reconstruct this
|
||||
# manually.
|
||||
last_hex = next((l for l in target_files[file]["lines"][::-1] if self.is_hex(l)),None)
|
||||
if last_hex:
|
||||
# Get the address left of the colon
|
||||
addr = int(last_hex.split(":")[0].strip(),16)
|
||||
# Get the hex bytes right of the colon
|
||||
hexs = last_hex.split(":")[1].split("//")[0].strip()
|
||||
# Increment the address by the number of hex bytes
|
||||
next_addr = addr+len(hexs.split())
|
||||
# Now we need to get the bytes at the end
|
||||
hexb = self.get_hex_bytes(hexs.replace(" ",""))
|
||||
# Get the last occurrence after the split
|
||||
remaining = target_files[file]["raw"].split(hexb)[-1]
|
||||
else:
|
||||
# If we didn't get a last hex val - then we likely don't have any
|
||||
# This can happen if the file passed is small enough, or has all
|
||||
# the data in a single block.
|
||||
next_addr = 0
|
||||
remaining = target_files[file]["raw"]
|
||||
# Iterate in chunks of 16
|
||||
for chunk in [remaining[i:i+16] for i in range(0,len(remaining),16)]:
|
||||
# Build a new byte string
|
||||
hex_string = binascii.hexlify(chunk)
|
||||
# Decode the bytes if we're on python 3
|
||||
if 2/3!=0: hex_string = hex_string.decode()
|
||||
# Ensure the bytes are all upper case
|
||||
hex_string = hex_string.upper()
|
||||
l = " {}: {}".format(
|
||||
hex(next_addr)[2:].upper().rjust(4,"0"),
|
||||
" ".join([hex_string[i:i+2] for i in range(0,len(hex_string),2)])
|
||||
)
|
||||
# Increment our address
|
||||
next_addr += len(chunk)
|
||||
# Append our line
|
||||
target_files[file]["lines"].append(l)
|
||||
target_files[file]["table"] += "\n"+l
|
||||
# Remove any that didn't disassemble
|
||||
for file in to_remove:
|
||||
target_files.pop(file,None)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return ({},failed)
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
if temp: shutil.rmtree(temp,ignore_errors=True)
|
||||
# Add/update any tables we loaded
|
||||
for table in target_files:
|
||||
self.acpi_tables[table] = target_files[table]
|
||||
# Only return the newly loaded results
|
||||
return (target_files, failed,)
|
||||
|
||||
def get_latest_iasl(self):
|
||||
# First try getting from github - if that fails, fall back to intel.com
|
||||
try:
|
||||
source = self.dl.get_string(self.acpi_github_windows, progress=False, headers=self.h)
|
||||
assets_url = None
|
||||
# Check for attachments first
|
||||
for line in source.split("\n"):
|
||||
if '<a href="https://github.com/user-attachments/files/' in line \
|
||||
and "/iasl-win-" in line and '.zip"' in line:
|
||||
# We found it - return the URL
|
||||
return line.split('<a href="')[1].split('"')[0]
|
||||
if 'src="' in line and "expanded_assets" in line:
|
||||
# Save the URL for later in case we need it
|
||||
assets_url = line.split('src="')[1].split('"')[0]
|
||||
# If we got here - we didn't find the link in the attachments,
|
||||
# check in the expanded assets
|
||||
if assets_url:
|
||||
source = self.dl.get_string(assets_url, progress=False, headers=self.h)
|
||||
iasl = acpidump = None # Placeholders
|
||||
for line in source.split("\n"):
|
||||
# Check for any required assets
|
||||
if '<a href="/acpica/acpica/releases/download/' in line:
|
||||
# Check if we got iasl.exe or acpidump.exe
|
||||
if '/iasl.exe"' in line:
|
||||
iasl = "https://github.com{}".format(line.split('"')[1].split('"')[0])
|
||||
if '/acpidump.exe"' in line:
|
||||
acpidump = "https://github.com{}".format(line.split('"')[1].split('"')[0])
|
||||
if iasl and acpidump:
|
||||
# Got the needed files, return them
|
||||
return (iasl,acpidump)
|
||||
# If we got here - move on to intel.com
|
||||
except: pass
|
||||
# Helper to scrape https://www.intel.com/content/www/us/en/developer/topic-technology/open/acpica/download.html for the latest
|
||||
# download binaries link - then scrape the contents of that page for the actual download as needed
|
||||
try:
|
||||
source = self.dl.get_string(self.acpi_binary_tools, progress=False, headers=self.h)
|
||||
for line in source.split("\n"):
|
||||
if '<a href="' in line and ">iasl compiler and windows acpi tools" in line.lower():
|
||||
# Check if we have a direct download link - i.e. ends with .zip - or if we're
|
||||
# redirected to a different download page - i.e. ends with .html
|
||||
dl_link = line.split('<a href="')[1].split('"')[0]
|
||||
if dl_link.lower().endswith(".zip"):
|
||||
# Direct download - return as-is
|
||||
return dl_link
|
||||
elif dl_link.lower().endswith((".html",".htm")):
|
||||
# Redirect - try to scrape for a download link
|
||||
try:
|
||||
if dl_link.lower().startswith(("http:","https:")):
|
||||
# The existing link is likely complete - use it as-is
|
||||
dl_page_url = dl_link
|
||||
else:
|
||||
# <a href="/content/www/us/en/download/774881/acpi-component-architecture-downloads-windows-binary-tools.html">iASL Compiler and Windows ACPI Tools
|
||||
# Only a suffix - prepend to it
|
||||
dl_page_url = "https://www.intel.com" + line.split('<a href="')[1].split('"')[0]
|
||||
dl_page_source = self.dl.get_string(dl_page_url, progress=False, headers=self.h)
|
||||
for line in dl_page_source.split("\n"):
|
||||
if 'data-href="' in line and '"download-button"' in line:
|
||||
# Should have the right line
|
||||
return line.split('data-href="')[1].split('"')[0]
|
||||
except: pass
|
||||
except: pass
|
||||
return None
|
||||
|
||||
def check_iasl(self, legacy=False, try_downloading=True):
|
||||
if sys.platform == "win32":
|
||||
targets = (os.path.join(os.path.dirname(os.path.realpath(__file__)), "iasl-legacy.exe" if legacy else "iasl.exe"),)
|
||||
else:
|
||||
if legacy:
|
||||
targets = (os.path.join(os.path.dirname(os.path.realpath(__file__)), "iasl-legacy"),)
|
||||
else:
|
||||
targets = (
|
||||
os.path.join(os.path.dirname(os.path.realpath(__file__)), "iasl-dev"),
|
||||
os.path.join(os.path.dirname(os.path.realpath(__file__)), "iasl-stable"),
|
||||
os.path.join(os.path.dirname(os.path.realpath(__file__)), "iasl")
|
||||
)
|
||||
target = next((t for t in targets if os.path.exists(t)),None)
|
||||
if target or not try_downloading:
|
||||
# Either found it - or we didn't, and have already tried downloading
|
||||
return target
|
||||
# Need to download
|
||||
temp = tempfile.mkdtemp()
|
||||
try:
|
||||
if sys.platform == "darwin":
|
||||
self._download_and_extract(temp,self.iasl_url_macOS_legacy if legacy else self.iasl_url_macOS)
|
||||
elif sys.platform.startswith("linux"):
|
||||
self._download_and_extract(temp,self.iasl_url_linux_legacy if legacy else self.iasl_url_linux)
|
||||
elif sys.platform == "win32":
|
||||
iasl_url_windows = self.iasl_url_windows_legacy if legacy else self.get_latest_iasl()
|
||||
if not iasl_url_windows: raise Exception("Could not get latest iasl for Windows")
|
||||
self._download_and_extract(temp,iasl_url_windows)
|
||||
else:
|
||||
raise Exception("Unknown OS")
|
||||
except Exception as e:
|
||||
print("An error occurred :(\n - {}".format(e))
|
||||
shutil.rmtree(temp, ignore_errors=True)
|
||||
# Check again after downloading
|
||||
return self.check_iasl(legacy=legacy,try_downloading=False)
|
||||
|
||||
def _download_and_extract(self, temp, url):
|
||||
script_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)))
|
||||
if not isinstance(url,(tuple,list)):
|
||||
url = (url,) # Wrap in a tuple
|
||||
for u in url:
|
||||
ztemp = tempfile.mkdtemp(dir=temp)
|
||||
zfile = os.path.basename(u)
|
||||
print("Downloading {}".format(zfile))
|
||||
self.dl.stream_to_file(u, os.path.join(ztemp,zfile), progress=False, headers=self.h)
|
||||
search_dir = ztemp
|
||||
if zfile.lower().endswith(".zip"):
|
||||
print(" - Extracting")
|
||||
search_dir = tempfile.mkdtemp(dir=temp)
|
||||
# Extract with built-in tools \o/
|
||||
with zipfile.ZipFile(os.path.join(ztemp,zfile)) as z:
|
||||
z.extractall(search_dir)
|
||||
for x in os.listdir(search_dir):
|
||||
if x.lower().startswith(("iasl","acpidump")):
|
||||
# Found one
|
||||
print(" - Found {}".format(x))
|
||||
if sys.platform != "win32":
|
||||
print(" - Chmod +x")
|
||||
self.r.run({"args":["chmod","+x",os.path.join(search_dir,x)]})
|
||||
print(" - Copying to {} directory".format(os.path.basename(script_dir)))
|
||||
shutil.copy(os.path.join(search_dir,x), os.path.join(script_dir,x))
|
||||
|
||||
def dump_tables(self, output, disassemble=False):
|
||||
# Helper to dump all ACPI tables to the specified
|
||||
# output path
|
||||
def check_command_output(out):
|
||||
if out[2] == 0: return False
|
||||
print(" - {}".format(out[1]))
|
||||
return True
|
||||
self.u.head("Dumping ACPI Tables")
|
||||
print("")
|
||||
res = self.check_output(output)
|
||||
if os.name == "nt":
|
||||
target = os.path.join(os.path.dirname(os.path.realpath(__file__)),"acpidump.exe")
|
||||
if os.path.exists(target):
|
||||
# Dump to the target folder
|
||||
print("Dumping tables to {}...".format(res))
|
||||
cwd = os.getcwd()
|
||||
os.chdir(res)
|
||||
out = self.r.run({"args":[target,"-b"]})
|
||||
os.chdir(cwd)
|
||||
if check_command_output(out):
|
||||
return
|
||||
# Make sure we have a DSDT
|
||||
if not next((x for x in os.listdir(res) if x.lower().startswith("dsdt.")),None):
|
||||
# We need to try and dump the DSDT individually - this sometimes
|
||||
# happens on older Windows installs or odd OEM machines
|
||||
print(" - DSDT not found - dumping by signature...")
|
||||
os.chdir(res)
|
||||
out = self.r.run({"args":[target,"-b","-n","DSDT"]})
|
||||
os.chdir(cwd)
|
||||
if check_command_output(out):
|
||||
return
|
||||
# Iterate the dumped files and ensure the names are uppercase, and the
|
||||
# extension used is .aml, not the default .dat
|
||||
print("Updating names...")
|
||||
for f in os.listdir(res):
|
||||
new_name = f.upper()
|
||||
if new_name.endswith(".DAT"):
|
||||
new_name = new_name[:-4]+".aml"
|
||||
if new_name != f:
|
||||
# Something changed - print it and rename it
|
||||
try:
|
||||
os.rename(os.path.join(res,f),os.path.join(res,new_name))
|
||||
except Exception as e:
|
||||
print(" - {} -> {} failed: {}".format(f,new_name,e))
|
||||
print("Dump successful!")
|
||||
if disassemble:
|
||||
return self.load(res)
|
||||
return res
|
||||
else:
|
||||
print("Failed to locate acpidump.exe")
|
||||
return
|
||||
elif sys.platform.startswith("linux"):
|
||||
table_dir = "/sys/firmware/acpi/tables"
|
||||
if not os.path.isdir(table_dir):
|
||||
print("Could not locate {}!".format(table_dir))
|
||||
return
|
||||
print("Copying tables to {}...".format(res))
|
||||
copied_files = []
|
||||
for table in os.listdir(table_dir):
|
||||
if not os.path.isfile(os.path.join(table_dir,table)):
|
||||
continue # We only want files
|
||||
target_path = os.path.join(res,table.upper()+".aml")
|
||||
comms = (
|
||||
# Copy the file
|
||||
["sudo","cp",os.path.join(table_dir,table),target_path],
|
||||
# Ensure it's owned by the user account
|
||||
["sudo","chown",getpass.getuser(),target_path],
|
||||
# Enable read and write permissions
|
||||
["sudo","chmod","a+rw",target_path]
|
||||
)
|
||||
# Iterate our commands and bail if any error
|
||||
for comm in comms:
|
||||
out = self.r.run({"args":comm})
|
||||
if check_command_output(out):
|
||||
return
|
||||
print("Dump successful!")
|
||||
if disassemble:
|
||||
return self.load(res)
|
||||
return res
|
||||
|
||||
def check_output(self, output):
|
||||
t_folder = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), output)
|
||||
if not os.path.isdir(t_folder):
|
||||
os.makedirs(t_folder)
|
||||
return t_folder
|
||||
|
||||
def get_hex_from_int(self, total, pad_to = 4):
|
||||
hex_str = hex(total)[2:].upper().rjust(pad_to,"0")
|
||||
return "".join([hex_str[i:i + 2] for i in range(0, len(hex_str), 2)][::-1])
|
||||
|
||||
def get_hex(self, line):
|
||||
# strip the header and commented end
|
||||
return line.split(":")[1].split("//")[0].replace(" ","")
|
||||
|
||||
def get_line(self, line):
|
||||
# Strip the header and commented end - no space replacing though
|
||||
line = line.split("//")[0]
|
||||
if ":" in line:
|
||||
return line.split(":")[1]
|
||||
return line
|
||||
|
||||
def get_hex_bytes(self, line):
|
||||
return binascii.unhexlify(line)
|
||||
|
||||
def get_str_bytes(self, value):
|
||||
if 2/3!=0 and isinstance(value,str):
|
||||
value = value.encode()
|
||||
return value
|
||||
|
||||
def get_table_with_id(self, table_id):
|
||||
table_id = self.get_str_bytes(table_id)
|
||||
return next((v for k,v in self.acpi_tables.items() if table_id == v.get("id")),None)
|
||||
|
||||
def get_table_with_signature(self, table_sig):
|
||||
table_sig = self.get_str_bytes(table_sig)
|
||||
return next((v for k,v in self.acpi_tables.items() if table_sig == v.get("signature")),None)
|
||||
|
||||
def get_table(self, table_id_or_sig):
|
||||
table_id_or_sig = self.get_str_bytes(table_id_or_sig)
|
||||
return next((v for k,v in self.acpi_tables.items() if table_id_or_sig in (v.get("signature"),v.get("id"))),None)
|
||||
|
||||
def get_dsdt(self):
|
||||
return self.get_table_with_signature("DSDT")
|
||||
|
||||
def get_dsdt_or_only(self):
|
||||
dsdt = self.get_dsdt()
|
||||
if dsdt: return dsdt
|
||||
# Make sure we have only one table
|
||||
if len(self.acpi_tables) != 1:
|
||||
return None
|
||||
return list(self.acpi_tables.values())[0]
|
||||
|
||||
def find_previous_hex(self, index=0, table=None):
|
||||
if not table: table = self.get_dsdt_or_only()
|
||||
if not table: return ("",-1,-1)
|
||||
# Returns the index of the previous set of hex digits before the passed index
|
||||
start_index = -1
|
||||
end_index = -1
|
||||
old_hex = True
|
||||
for i,line in enumerate(table.get("lines","")[index::-1]):
|
||||
if old_hex:
|
||||
if not self.is_hex(line):
|
||||
# Broke out of the old hex
|
||||
old_hex = False
|
||||
continue
|
||||
# Not old_hex territory - check if we got new hex
|
||||
if self.is_hex(line): # Checks for a :, but not in comments
|
||||
end_index = index-i
|
||||
hex_text,start_index = self.get_hex_ending_at(end_index,table=table)
|
||||
return (hex_text, start_index, end_index)
|
||||
return ("",start_index,end_index)
|
||||
|
||||
def find_next_hex(self, index=0, table=None):
|
||||
if not table: table = self.get_dsdt_or_only()
|
||||
if not table: return ("",-1,-1)
|
||||
# Returns the index of the next set of hex digits after the passed index
|
||||
start_index = -1
|
||||
end_index = -1
|
||||
old_hex = True
|
||||
for i,line in enumerate(table.get("lines","")[index:]):
|
||||
if old_hex:
|
||||
if not self.is_hex(line):
|
||||
# Broke out of the old hex
|
||||
old_hex = False
|
||||
continue
|
||||
# Not old_hex territory - check if we got new hex
|
||||
if self.is_hex(line): # Checks for a :, but not in comments
|
||||
start_index = i+index
|
||||
hex_text,end_index = self.get_hex_starting_at(start_index,table=table)
|
||||
return (hex_text, start_index, end_index)
|
||||
return ("",start_index,end_index)
|
||||
|
||||
def is_hex(self, line):
|
||||
return self.hex_match.match(line) is not None
|
||||
|
||||
def get_hex_starting_at(self, start_index, table=None):
|
||||
if not table: table = self.get_dsdt_or_only()
|
||||
if not table: return ("",-1)
|
||||
# Returns a tuple of the hex, and the ending index
|
||||
hex_text = ""
|
||||
index = -1
|
||||
for i,x in enumerate(table.get("lines","")[start_index:]):
|
||||
if not self.is_hex(x):
|
||||
break
|
||||
hex_text += self.get_hex(x)
|
||||
index = i+start_index
|
||||
return (hex_text, index)
|
||||
|
||||
def get_hex_ending_at(self, start_index, table=None):
|
||||
if not table: table = self.get_dsdt_or_only()
|
||||
if not table: return ("",-1)
|
||||
# Returns a tuple of the hex, and the ending index
|
||||
hex_text = ""
|
||||
index = -1
|
||||
for i,x in enumerate(table.get("lines","")[start_index::-1]):
|
||||
if not self.is_hex(x):
|
||||
break
|
||||
hex_text = self.get_hex(x)+hex_text
|
||||
index = start_index-i
|
||||
return (hex_text, index)
|
||||
|
||||
def get_shortest_unique_pad(self, current_hex, index, instance=0, table=None):
|
||||
if not table: table = self.get_dsdt_or_only()
|
||||
if not table: return None
|
||||
try: left_pad = self.get_unique_pad(current_hex, index, False, instance, table=table)
|
||||
except: left_pad = None
|
||||
try: right_pad = self.get_unique_pad(current_hex, index, True, instance, table=table)
|
||||
except: right_pad = None
|
||||
try: mid_pad = self.get_unique_pad(current_hex, index, None, instance, table=table)
|
||||
except: mid_pad = None
|
||||
if left_pad == right_pad == mid_pad is None: raise Exception("No unique pad found!")
|
||||
# We got at least one unique pad
|
||||
min_pad = None
|
||||
for x in (left_pad,right_pad,mid_pad):
|
||||
if x is None: continue # Skip
|
||||
if min_pad is None or len(x[0]+x[1]) < len(min_pad[0]+min_pad[1]):
|
||||
min_pad = x
|
||||
return min_pad
|
||||
|
||||
def get_unique_pad(self, current_hex, index, direction=None, instance=0, table=None):
|
||||
if not table: table = self.get_dsdt_or_only()
|
||||
if not table: raise Exception("No valid table passed!")
|
||||
# Returns any pad needed to make the passed patch unique
|
||||
# direction can be True = forward, False = backward, None = both
|
||||
start_index = index
|
||||
line,last_index = self.get_hex_starting_at(index,table=table)
|
||||
if last_index == -1:
|
||||
raise Exception("Could not find hex starting at index {}!".format(index))
|
||||
first_line = line
|
||||
# Assume at least 1 byte of our current_hex exists at index, so we need to at
|
||||
# least load in len(current_hex)-2 worth of data if we haven't found it.
|
||||
while True:
|
||||
if current_hex in line or len(line) >= len(first_line)+len(current_hex):
|
||||
break # Assume we've hit our cap
|
||||
new_line,_index,last_index = self.find_next_hex(last_index, table=table)
|
||||
if last_index == -1:
|
||||
raise Exception("Hit end of file before passed hex was located!")
|
||||
# Append the new info
|
||||
line += new_line
|
||||
if not current_hex in line:
|
||||
raise Exception("{} not found in table at index {}-{}!".format(current_hex,start_index,last_index))
|
||||
padl = padr = ""
|
||||
parts = line.split(current_hex)
|
||||
if instance >= len(parts)-1:
|
||||
raise Exception("Instance out of range!")
|
||||
linel = current_hex.join(parts[0:instance+1])
|
||||
liner = current_hex.join(parts[instance+1:])
|
||||
last_check = True # Default to forward
|
||||
while True:
|
||||
# Check if our hex string is unique
|
||||
check_bytes = self.get_hex_bytes(padl+current_hex+padr)
|
||||
if table["raw"].count(check_bytes) == 1: # Got it!
|
||||
break
|
||||
if direction == True or (direction is None and len(padr)<=len(padl)):
|
||||
# Let's check a forward byte
|
||||
if not len(liner):
|
||||
# Need to grab more
|
||||
liner, _index, last_index = self.find_next_hex(last_index, table=table)
|
||||
if last_index == -1: raise Exception("Hit end of file before unique hex was found!")
|
||||
padr = padr+liner[0:2]
|
||||
liner = liner[2:]
|
||||
continue
|
||||
if direction == False or (direction is None and len(padl)<=len(padr)):
|
||||
# Let's check a backward byte
|
||||
if not len(linel):
|
||||
# Need to grab more
|
||||
linel, start_index, _index = self.find_previous_hex(start_index, table=table)
|
||||
if _index == -1: raise Exception("Hit end of file before unique hex was found!")
|
||||
padl = linel[-2:]+padl
|
||||
linel = linel[:-2]
|
||||
continue
|
||||
break
|
||||
return (padl,padr)
|
||||
|
||||
def get_devices(self,search=None,types=("Device (","Scope ("),strip_comments=False,table=None):
|
||||
if not table: table = self.get_dsdt_or_only()
|
||||
if not table: return []
|
||||
# Returns a list of tuples organized as (Device/Scope,d_s_index,matched_index)
|
||||
if search is None:
|
||||
return []
|
||||
last_device = None
|
||||
device_index = 0
|
||||
devices = []
|
||||
for index,line in enumerate(table.get("lines","")):
|
||||
if self.is_hex(line):
|
||||
continue
|
||||
line = self.get_line(line) if strip_comments else line
|
||||
if any ((x for x in types if x in line)):
|
||||
# Got a last_device match
|
||||
last_device = line
|
||||
device_index = index
|
||||
if search in line:
|
||||
# Got a search hit - add it
|
||||
devices.append((last_device,device_index,index))
|
||||
return devices
|
||||
|
||||
def get_scope(self,starting_index=0,add_hex=False,strip_comments=False,table=None):
|
||||
if not table: table = self.get_dsdt_or_only()
|
||||
if not table: return []
|
||||
# Walks the scope starting at starting_index, and returns when
|
||||
# we've exited
|
||||
brackets = None
|
||||
scope = []
|
||||
for line in table.get("lines","")[starting_index:]:
|
||||
if self.is_hex(line):
|
||||
if add_hex:
|
||||
scope.append(line)
|
||||
continue
|
||||
line = self.get_line(line) if strip_comments else line
|
||||
scope.append(line)
|
||||
if brackets is None:
|
||||
if line.count("{"):
|
||||
brackets = line.count("{")
|
||||
continue
|
||||
brackets = brackets + line.count("{") - line.count("}")
|
||||
if brackets <= 0:
|
||||
# We've exited the scope
|
||||
return scope
|
||||
return scope
|
||||
|
||||
def get_scopes(self, table=None):
|
||||
if not table: table = self.get_dsdt_or_only()
|
||||
if not table: return []
|
||||
scopes = []
|
||||
for index,line in enumerate(table.get("lines","")):
|
||||
if self.is_hex(line): continue
|
||||
if any(x in line for x in ("Processor (","Scope (","Device (","Method (","Name (")):
|
||||
scopes.append((line,index))
|
||||
return scopes
|
||||
|
||||
def get_paths(self, table=None):
|
||||
if not table: table = self.get_dsdt_or_only()
|
||||
if not table: return []
|
||||
# Set up lists for complete paths, as well
|
||||
# as our current path reference
|
||||
path_list = []
|
||||
_path = []
|
||||
brackets = 0
|
||||
for i,line in enumerate(table.get("lines",[])):
|
||||
if self.is_hex(line):
|
||||
# Skip hex
|
||||
continue
|
||||
line = self.get_line(line)
|
||||
brackets += line.count("{")-line.count("}")
|
||||
while len(_path):
|
||||
# Remove any path entries that are nested
|
||||
# equal to or further than our current set
|
||||
if _path[-1][-1] >= brackets:
|
||||
del _path[-1]
|
||||
else:
|
||||
break
|
||||
type_match = self.type_match.match(line)
|
||||
if type_match:
|
||||
# Add our path entry and save the full path
|
||||
# to the path list as needed
|
||||
_path.append((type_match.group("name"),brackets))
|
||||
if type_match.group("type") == "Scope":
|
||||
continue
|
||||
# Ensure that we only consider non-Scope paths that aren't
|
||||
# already fully qualified with a \ prefix
|
||||
path = []
|
||||
for p in _path[::-1]:
|
||||
path.append(p[0])
|
||||
p_check = p[0].split(".")[0].rstrip("_")
|
||||
if p_check.startswith("\\") or p_check in ("_SB","_PR"):
|
||||
# Fully qualified - bail here
|
||||
break
|
||||
path = ".".join(path[::-1]).split(".")
|
||||
# Properly qualify the path
|
||||
if len(path) and path[0] == "\\": path.pop(0)
|
||||
if any("^" in x for x in path): # Accommodate caret notation
|
||||
new_path = []
|
||||
for x in path:
|
||||
if x.count("^"):
|
||||
# Remove the last Y paths to account for going up a level
|
||||
del new_path[-1*x.count("^"):]
|
||||
new_path.append(x.replace("^","")) # Add the original, removing any ^ chars
|
||||
path = new_path
|
||||
if not path:
|
||||
continue
|
||||
# Ensure we strip trailing underscores for consistency
|
||||
padded_path = [("\\" if j==0 else"")+x.lstrip("\\").rstrip("_") for j,x in enumerate(path)]
|
||||
path_str = ".".join(padded_path)
|
||||
path_list.append((path_str,i,type_match.group("type")))
|
||||
return sorted(path_list)
|
||||
|
||||
def get_path_of_type(self, obj_type="Device", obj="HPET", table=None):
|
||||
if not table: table = self.get_dsdt_or_only()
|
||||
if not table: return []
|
||||
paths = []
|
||||
# Remove trailing underscores and normalize case for all path
|
||||
# elements passed
|
||||
obj = ".".join([x.rstrip("_").upper() for x in obj.split(".")])
|
||||
obj_type = obj_type.lower() if obj_type else obj_type
|
||||
for path in table.get("paths",[]):
|
||||
path_check = ".".join([x.rstrip("_").upper() for x in path[0].split(".")])
|
||||
if (obj_type and obj_type != path[2].lower()) or not path_check.endswith(obj):
|
||||
# Type or object mismatch - skip
|
||||
continue
|
||||
paths.append(path)
|
||||
return sorted(paths)
|
||||
|
||||
def get_device_paths(self, obj="HPET",table=None):
|
||||
return self.get_path_of_type(obj_type="Device",obj=obj,table=table)
|
||||
|
||||
def get_method_paths(self, obj="_STA",table=None):
|
||||
return self.get_path_of_type(obj_type="Method",obj=obj,table=table)
|
||||
|
||||
def get_name_paths(self, obj="CPU0",table=None):
|
||||
return self.get_path_of_type(obj_type="Name",obj=obj,table=table)
|
||||
|
||||
def get_processor_paths(self, obj_type="Processor",table=None):
|
||||
return self.get_path_of_type(obj_type=obj_type,obj="",table=table)
|
||||
|
||||
def get_device_paths_with_id(self,_id="PNP0A03",id_types=("_HID","_CID"),table=None):
|
||||
if not table: table = self.get_dsdt_or_only()
|
||||
if not table: return []
|
||||
if not isinstance(id_types,(list,tuple)): return []
|
||||
# Strip non-strings from the list
|
||||
id_types = [x.upper() for x in id_types if isinstance(x,str)]
|
||||
if not id_types: return []
|
||||
_id = _id.upper() # Ensure case
|
||||
devs = []
|
||||
for p in table.get("paths",[]):
|
||||
try:
|
||||
for type_check in id_types:
|
||||
if p[0].endswith(type_check) and _id in table.get("lines")[p[1]]:
|
||||
# Save the path, strip the suffix and trailing periods
|
||||
devs.append(p[0][:-len(type_check)].rstrip("."))
|
||||
# Leave this loop to avoid adding the same device
|
||||
# multiple times
|
||||
break
|
||||
except Exception as e:
|
||||
print(e)
|
||||
continue
|
||||
devices = []
|
||||
# Walk the paths again - and save any devices
|
||||
# that match our prior list
|
||||
for p in table.get("paths",[]):
|
||||
if p[0] in devs and p[-1] == "Device":
|
||||
devices.append(p)
|
||||
return devices
|
||||
|
||||
def get_device_paths_with_cid(self,cid="PNP0A03",table=None):
|
||||
return self.get_device_paths_with_id(_id=cid,id_types=("_CID",),table=table)
|
||||
|
||||
def get_device_paths_with_hid(self,hid="ACPI000E",table=None):
|
||||
return self.get_device_paths_with_id(_id=hid,id_types=("_HID",),table=table)
|
||||
688
ACPI/SSDTTime-master/Scripts/plist.py
Normal file
688
ACPI/SSDTTime-master/Scripts/plist.py
Normal file
@@ -0,0 +1,688 @@
|
||||
### ###
|
||||
# Imports #
|
||||
### ###
|
||||
|
||||
import datetime, os, plistlib, struct, sys, itertools, binascii
|
||||
from io import BytesIO
|
||||
|
||||
if sys.version_info < (3,0):
|
||||
# Force use of StringIO instead of cStringIO as the latter
|
||||
# has issues with Unicode strings
|
||||
from StringIO import StringIO
|
||||
else:
|
||||
from io import StringIO
|
||||
|
||||
try:
|
||||
basestring # Python 2
|
||||
unicode
|
||||
except NameError:
|
||||
basestring = str # Python 3
|
||||
unicode = str
|
||||
|
||||
try:
|
||||
FMT_XML = plistlib.FMT_XML
|
||||
FMT_BINARY = plistlib.FMT_BINARY
|
||||
except AttributeError:
|
||||
FMT_XML = "FMT_XML"
|
||||
FMT_BINARY = "FMT_BINARY"
|
||||
|
||||
### ###
|
||||
# Helper Methods #
|
||||
### ###
|
||||
|
||||
def wrap_data(value):
|
||||
if not _check_py3(): return plistlib.Data(value)
|
||||
return value
|
||||
|
||||
def extract_data(value):
|
||||
if not _check_py3() and isinstance(value,plistlib.Data): return value.data
|
||||
return value
|
||||
|
||||
def _check_py3():
|
||||
return sys.version_info >= (3, 0)
|
||||
|
||||
def _is_binary(fp):
|
||||
if isinstance(fp, basestring):
|
||||
return fp.startswith(b"bplist00")
|
||||
header = fp.read(32)
|
||||
fp.seek(0)
|
||||
return header[:8] == b'bplist00'
|
||||
|
||||
def _seek_past_whitespace(fp):
|
||||
offset = 0
|
||||
while True:
|
||||
byte = fp.read(1)
|
||||
if not byte:
|
||||
# End of file, reset offset and bail
|
||||
offset = 0
|
||||
break
|
||||
if not byte.isspace():
|
||||
# Found our first non-whitespace character
|
||||
break
|
||||
offset += 1
|
||||
# Seek to the first non-whitespace char
|
||||
fp.seek(offset)
|
||||
return offset
|
||||
|
||||
### ###
|
||||
# Deprecated Functions - Remapped #
|
||||
### ###
|
||||
|
||||
def readPlist(pathOrFile):
|
||||
if not isinstance(pathOrFile, basestring):
|
||||
return load(pathOrFile)
|
||||
with open(pathOrFile, "rb") as f:
|
||||
return load(f)
|
||||
|
||||
def writePlist(value, pathOrFile):
|
||||
if not isinstance(pathOrFile, basestring):
|
||||
return dump(value, pathOrFile, fmt=FMT_XML, sort_keys=True, skipkeys=False)
|
||||
with open(pathOrFile, "wb") as f:
|
||||
return dump(value, f, fmt=FMT_XML, sort_keys=True, skipkeys=False)
|
||||
|
||||
### ###
|
||||
# Remapped Functions #
|
||||
### ###
|
||||
|
||||
def load(fp, fmt=None, use_builtin_types=None, dict_type=dict):
|
||||
if _is_binary(fp):
|
||||
use_builtin_types = False if use_builtin_types is None else use_builtin_types
|
||||
try:
|
||||
p = _BinaryPlistParser(use_builtin_types=use_builtin_types, dict_type=dict_type)
|
||||
except:
|
||||
# Python 3.9 removed use_builtin_types
|
||||
p = _BinaryPlistParser(dict_type=dict_type)
|
||||
return p.parse(fp)
|
||||
elif _check_py3():
|
||||
offset = _seek_past_whitespace(fp)
|
||||
use_builtin_types = True if use_builtin_types is None else use_builtin_types
|
||||
# We need to monkey patch this to allow for hex integers - code taken/modified from
|
||||
# https://github.com/python/cpython/blob/3.8/Lib/plistlib.py
|
||||
if fmt is None:
|
||||
header = fp.read(32)
|
||||
fp.seek(offset)
|
||||
for info in plistlib._FORMATS.values():
|
||||
if info['detect'](header):
|
||||
P = info['parser']
|
||||
break
|
||||
else:
|
||||
raise plistlib.InvalidFileException()
|
||||
else:
|
||||
P = plistlib._FORMATS[fmt]['parser']
|
||||
try:
|
||||
p = P(use_builtin_types=use_builtin_types, dict_type=dict_type)
|
||||
except:
|
||||
# Python 3.9 removed use_builtin_types
|
||||
p = P(dict_type=dict_type)
|
||||
if isinstance(p,plistlib._PlistParser):
|
||||
# Monkey patch!
|
||||
def end_integer():
|
||||
d = p.get_data()
|
||||
value = int(d,16) if d.lower().startswith("0x") else int(d)
|
||||
if -1 << 63 <= value < 1 << 64:
|
||||
p.add_object(value)
|
||||
else:
|
||||
raise OverflowError("Integer overflow at line {}".format(p.parser.CurrentLineNumber))
|
||||
def end_data():
|
||||
try:
|
||||
p.add_object(plistlib._decode_base64(p.get_data()))
|
||||
except Exception as e:
|
||||
raise Exception("Data error at line {}: {}".format(p.parser.CurrentLineNumber,e))
|
||||
p.end_integer = end_integer
|
||||
p.end_data = end_data
|
||||
return p.parse(fp)
|
||||
else:
|
||||
offset = _seek_past_whitespace(fp)
|
||||
# Is not binary - assume a string - and try to load
|
||||
# We avoid using readPlistFromString() as that uses
|
||||
# cStringIO and fails when Unicode strings are detected
|
||||
# Don't subclass - keep the parser local
|
||||
from xml.parsers.expat import ParserCreate
|
||||
# Create a new PlistParser object - then we need to set up
|
||||
# the values and parse.
|
||||
p = plistlib.PlistParser()
|
||||
parser = ParserCreate()
|
||||
parser.StartElementHandler = p.handleBeginElement
|
||||
parser.EndElementHandler = p.handleEndElement
|
||||
parser.CharacterDataHandler = p.handleData
|
||||
# We also need to monkey patch this to allow for other dict_types, hex int support
|
||||
# proper line output for data errors, and for unicode string decoding
|
||||
def begin_dict(attrs):
|
||||
d = dict_type()
|
||||
p.addObject(d)
|
||||
p.stack.append(d)
|
||||
def end_integer():
|
||||
d = p.getData()
|
||||
value = int(d,16) if d.lower().startswith("0x") else int(d)
|
||||
if -1 << 63 <= value < 1 << 64:
|
||||
p.addObject(value)
|
||||
else:
|
||||
raise OverflowError("Integer overflow at line {}".format(parser.CurrentLineNumber))
|
||||
def end_data():
|
||||
try:
|
||||
p.addObject(plistlib.Data.fromBase64(p.getData()))
|
||||
except Exception as e:
|
||||
raise Exception("Data error at line {}: {}".format(parser.CurrentLineNumber,e))
|
||||
def end_string():
|
||||
d = p.getData()
|
||||
if isinstance(d,unicode):
|
||||
d = d.encode("utf-8")
|
||||
p.addObject(d)
|
||||
p.begin_dict = begin_dict
|
||||
p.end_integer = end_integer
|
||||
p.end_data = end_data
|
||||
p.end_string = end_string
|
||||
if isinstance(fp, unicode):
|
||||
# Encode unicode -> string; use utf-8 for safety
|
||||
fp = fp.encode("utf-8")
|
||||
if isinstance(fp, basestring):
|
||||
# It's a string - let's wrap it up
|
||||
fp = StringIO(fp)
|
||||
# Parse it
|
||||
parser.ParseFile(fp)
|
||||
return p.root
|
||||
|
||||
def loads(value, fmt=None, use_builtin_types=None, dict_type=dict):
|
||||
if _check_py3() and isinstance(value, basestring):
|
||||
# If it's a string - encode it
|
||||
value = value.encode()
|
||||
try:
|
||||
return load(BytesIO(value),fmt=fmt,use_builtin_types=use_builtin_types,dict_type=dict_type)
|
||||
except:
|
||||
# Python 3.9 removed use_builtin_types
|
||||
return load(BytesIO(value),fmt=fmt,dict_type=dict_type)
|
||||
|
||||
def dump(value, fp, fmt=FMT_XML, sort_keys=True, skipkeys=False):
|
||||
if fmt == FMT_BINARY:
|
||||
# Assume binary at this point
|
||||
writer = _BinaryPlistWriter(fp, sort_keys=sort_keys, skipkeys=skipkeys)
|
||||
writer.write(value)
|
||||
elif fmt == FMT_XML:
|
||||
if _check_py3():
|
||||
plistlib.dump(value, fp, fmt=fmt, sort_keys=sort_keys, skipkeys=skipkeys)
|
||||
else:
|
||||
# We need to monkey patch a bunch here too in order to avoid auto-sorting
|
||||
# of keys
|
||||
writer = plistlib.PlistWriter(fp)
|
||||
def writeDict(d):
|
||||
if d:
|
||||
writer.beginElement("dict")
|
||||
items = sorted(d.items()) if sort_keys else d.items()
|
||||
for key, value in items:
|
||||
if not isinstance(key, basestring):
|
||||
if skipkeys:
|
||||
continue
|
||||
raise TypeError("keys must be strings")
|
||||
writer.simpleElement("key", key)
|
||||
writer.writeValue(value)
|
||||
writer.endElement("dict")
|
||||
else:
|
||||
writer.simpleElement("dict")
|
||||
writer.writeDict = writeDict
|
||||
writer.writeln("<plist version=\"1.0\">")
|
||||
writer.writeValue(value)
|
||||
writer.writeln("</plist>")
|
||||
else:
|
||||
# Not a proper format
|
||||
raise ValueError("Unsupported format: {}".format(fmt))
|
||||
|
||||
def dumps(value, fmt=FMT_XML, skipkeys=False, sort_keys=True):
|
||||
# We avoid using writePlistToString() as that uses
|
||||
# cStringIO and fails when Unicode strings are detected
|
||||
f = BytesIO() if _check_py3() else StringIO()
|
||||
dump(value, f, fmt=fmt, skipkeys=skipkeys, sort_keys=sort_keys)
|
||||
value = f.getvalue()
|
||||
if _check_py3():
|
||||
value = value.decode("utf-8")
|
||||
return value
|
||||
|
||||
### ###
|
||||
# Binary Plist Stuff For Py2 #
|
||||
### ###
|
||||
|
||||
# From the python 3 plistlib.py source: https://github.com/python/cpython/blob/3.11/Lib/plistlib.py
|
||||
# Tweaked to function on both Python 2 and 3
|
||||
|
||||
class UID:
|
||||
def __init__(self, data):
|
||||
if not isinstance(data, int):
|
||||
raise TypeError("data must be an int")
|
||||
# It seems Apple only uses 32-bit unsigned ints for UIDs. Although the comment in
|
||||
# CoreFoundation's CFBinaryPList.c detailing the binary plist format theoretically
|
||||
# allows for 64-bit UIDs, most functions in the same file use 32-bit unsigned ints,
|
||||
# with the sole function hinting at 64-bits appearing to be a leftover from copying
|
||||
# and pasting integer handling code internally, and this code has not changed since
|
||||
# it was added. (In addition, code in CFPropertyList.c to handle CF$UID also uses a
|
||||
# 32-bit unsigned int.)
|
||||
#
|
||||
# if data >= 1 << 64:
|
||||
# raise ValueError("UIDs cannot be >= 2**64")
|
||||
if data >= 1 << 32:
|
||||
raise ValueError("UIDs cannot be >= 2**32 (4294967296)")
|
||||
if data < 0:
|
||||
raise ValueError("UIDs must be positive")
|
||||
self.data = data
|
||||
|
||||
def __index__(self):
|
||||
return self.data
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (self.__class__.__name__, repr(self.data))
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (self.data,)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, UID):
|
||||
return NotImplemented
|
||||
return self.data == other.data
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.data)
|
||||
|
||||
class InvalidFileException (ValueError):
|
||||
def __init__(self, message="Invalid file"):
|
||||
ValueError.__init__(self, message)
|
||||
|
||||
_BINARY_FORMAT = {1: 'B', 2: 'H', 4: 'L', 8: 'Q'}
|
||||
|
||||
_undefined = object()
|
||||
|
||||
class _BinaryPlistParser:
|
||||
"""
|
||||
Read or write a binary plist file, following the description of the binary
|
||||
format. Raise InvalidFileException in case of error, otherwise return the
|
||||
root object.
|
||||
see also: http://opensource.apple.com/source/CF/CF-744.18/CFBinaryPList.c
|
||||
"""
|
||||
def __init__(self, use_builtin_types, dict_type):
|
||||
self._use_builtin_types = use_builtin_types
|
||||
self._dict_type = dict_type
|
||||
|
||||
def parse(self, fp):
|
||||
try:
|
||||
# The basic file format:
|
||||
# HEADER
|
||||
# object...
|
||||
# refid->offset...
|
||||
# TRAILER
|
||||
self._fp = fp
|
||||
self._fp.seek(-32, os.SEEK_END)
|
||||
trailer = self._fp.read(32)
|
||||
if len(trailer) != 32:
|
||||
raise InvalidFileException()
|
||||
(
|
||||
offset_size, self._ref_size, num_objects, top_object,
|
||||
offset_table_offset
|
||||
) = struct.unpack('>6xBBQQQ', trailer)
|
||||
self._fp.seek(offset_table_offset)
|
||||
self._object_offsets = self._read_ints(num_objects, offset_size)
|
||||
self._objects = [_undefined] * num_objects
|
||||
return self._read_object(top_object)
|
||||
|
||||
except (OSError, IndexError, struct.error, OverflowError,
|
||||
UnicodeDecodeError):
|
||||
raise InvalidFileException()
|
||||
|
||||
def _get_size(self, tokenL):
|
||||
""" return the size of the next object."""
|
||||
if tokenL == 0xF:
|
||||
m = self._fp.read(1)[0]
|
||||
if not _check_py3():
|
||||
m = ord(m)
|
||||
m = m & 0x3
|
||||
s = 1 << m
|
||||
f = '>' + _BINARY_FORMAT[s]
|
||||
return struct.unpack(f, self._fp.read(s))[0]
|
||||
|
||||
return tokenL
|
||||
|
||||
def _read_ints(self, n, size):
|
||||
data = self._fp.read(size * n)
|
||||
if size in _BINARY_FORMAT:
|
||||
return struct.unpack('>' + _BINARY_FORMAT[size] * n, data)
|
||||
else:
|
||||
if not size or len(data) != size * n:
|
||||
raise InvalidFileException()
|
||||
return tuple(int(binascii.hexlify(data[i: i + size]),16)
|
||||
for i in range(0, size * n, size))
|
||||
'''return tuple(int.from_bytes(data[i: i + size], 'big')
|
||||
for i in range(0, size * n, size))'''
|
||||
|
||||
def _read_refs(self, n):
|
||||
return self._read_ints(n, self._ref_size)
|
||||
|
||||
def _read_object(self, ref):
|
||||
"""
|
||||
read the object by reference.
|
||||
May recursively read sub-objects (content of an array/dict/set)
|
||||
"""
|
||||
result = self._objects[ref]
|
||||
if result is not _undefined:
|
||||
return result
|
||||
|
||||
offset = self._object_offsets[ref]
|
||||
self._fp.seek(offset)
|
||||
token = self._fp.read(1)[0]
|
||||
if not _check_py3():
|
||||
token = ord(token)
|
||||
tokenH, tokenL = token & 0xF0, token & 0x0F
|
||||
|
||||
if token == 0x00: # \x00 or 0x00
|
||||
result = None
|
||||
|
||||
elif token == 0x08: # \x08 or 0x08
|
||||
result = False
|
||||
|
||||
elif token == 0x09: # \x09 or 0x09
|
||||
result = True
|
||||
|
||||
# The referenced source code also mentions URL (0x0c, 0x0d) and
|
||||
# UUID (0x0e), but neither can be generated using the Cocoa libraries.
|
||||
|
||||
elif token == 0x0f: # \x0f or 0x0f
|
||||
result = b''
|
||||
|
||||
elif tokenH == 0x10: # int
|
||||
result = int(binascii.hexlify(self._fp.read(1 << tokenL)),16)
|
||||
if tokenL >= 3: # Signed - adjust
|
||||
result = result-((result & 0x8000000000000000) << 1)
|
||||
|
||||
elif token == 0x22: # real
|
||||
result = struct.unpack('>f', self._fp.read(4))[0]
|
||||
|
||||
elif token == 0x23: # real
|
||||
result = struct.unpack('>d', self._fp.read(8))[0]
|
||||
|
||||
elif token == 0x33: # date
|
||||
f = struct.unpack('>d', self._fp.read(8))[0]
|
||||
# timestamp 0 of binary plists corresponds to 1/1/2001
|
||||
# (year of Mac OS X 10.0), instead of 1/1/1970.
|
||||
result = (datetime.datetime(2001, 1, 1) +
|
||||
datetime.timedelta(seconds=f))
|
||||
|
||||
elif tokenH == 0x40: # data
|
||||
s = self._get_size(tokenL)
|
||||
if self._use_builtin_types or not hasattr(plistlib, "Data"):
|
||||
result = self._fp.read(s)
|
||||
else:
|
||||
result = plistlib.Data(self._fp.read(s))
|
||||
|
||||
elif tokenH == 0x50: # ascii string
|
||||
s = self._get_size(tokenL)
|
||||
result = self._fp.read(s).decode('ascii')
|
||||
result = result
|
||||
|
||||
elif tokenH == 0x60: # unicode string
|
||||
s = self._get_size(tokenL)
|
||||
result = self._fp.read(s * 2).decode('utf-16be')
|
||||
|
||||
elif tokenH == 0x80: # UID
|
||||
# used by Key-Archiver plist files
|
||||
result = UID(int(binascii.hexlify(self._fp.read(1 + tokenL)),16))
|
||||
|
||||
elif tokenH == 0xA0: # array
|
||||
s = self._get_size(tokenL)
|
||||
obj_refs = self._read_refs(s)
|
||||
result = []
|
||||
self._objects[ref] = result
|
||||
result.extend(self._read_object(x) for x in obj_refs)
|
||||
|
||||
# tokenH == 0xB0 is documented as 'ordset', but is not actually
|
||||
# implemented in the Apple reference code.
|
||||
|
||||
# tokenH == 0xC0 is documented as 'set', but sets cannot be used in
|
||||
# plists.
|
||||
|
||||
elif tokenH == 0xD0: # dict
|
||||
s = self._get_size(tokenL)
|
||||
key_refs = self._read_refs(s)
|
||||
obj_refs = self._read_refs(s)
|
||||
result = self._dict_type()
|
||||
self._objects[ref] = result
|
||||
for k, o in zip(key_refs, obj_refs):
|
||||
key = self._read_object(k)
|
||||
if hasattr(plistlib, "Data") and isinstance(key, plistlib.Data):
|
||||
key = key.data
|
||||
result[key] = self._read_object(o)
|
||||
|
||||
else:
|
||||
raise InvalidFileException()
|
||||
|
||||
self._objects[ref] = result
|
||||
return result
|
||||
|
||||
def _count_to_size(count):
|
||||
if count < 1 << 8:
|
||||
return 1
|
||||
|
||||
elif count < 1 << 16:
|
||||
return 2
|
||||
|
||||
elif count < 1 << 32:
|
||||
return 4
|
||||
|
||||
else:
|
||||
return 8
|
||||
|
||||
_scalars = (str, int, float, datetime.datetime, bytes)
|
||||
|
||||
class _BinaryPlistWriter (object):
|
||||
def __init__(self, fp, sort_keys, skipkeys):
|
||||
self._fp = fp
|
||||
self._sort_keys = sort_keys
|
||||
self._skipkeys = skipkeys
|
||||
|
||||
def write(self, value):
|
||||
|
||||
# Flattened object list:
|
||||
self._objlist = []
|
||||
|
||||
# Mappings from object->objectid
|
||||
# First dict has (type(object), object) as the key,
|
||||
# second dict is used when object is not hashable and
|
||||
# has id(object) as the key.
|
||||
self._objtable = {}
|
||||
self._objidtable = {}
|
||||
|
||||
# Create list of all objects in the plist
|
||||
self._flatten(value)
|
||||
|
||||
# Size of object references in serialized containers
|
||||
# depends on the number of objects in the plist.
|
||||
num_objects = len(self._objlist)
|
||||
self._object_offsets = [0]*num_objects
|
||||
self._ref_size = _count_to_size(num_objects)
|
||||
|
||||
self._ref_format = _BINARY_FORMAT[self._ref_size]
|
||||
|
||||
# Write file header
|
||||
self._fp.write(b'bplist00')
|
||||
|
||||
# Write object list
|
||||
for obj in self._objlist:
|
||||
self._write_object(obj)
|
||||
|
||||
# Write refnum->object offset table
|
||||
top_object = self._getrefnum(value)
|
||||
offset_table_offset = self._fp.tell()
|
||||
offset_size = _count_to_size(offset_table_offset)
|
||||
offset_format = '>' + _BINARY_FORMAT[offset_size] * num_objects
|
||||
self._fp.write(struct.pack(offset_format, *self._object_offsets))
|
||||
|
||||
# Write trailer
|
||||
sort_version = 0
|
||||
trailer = (
|
||||
sort_version, offset_size, self._ref_size, num_objects,
|
||||
top_object, offset_table_offset
|
||||
)
|
||||
self._fp.write(struct.pack('>5xBBBQQQ', *trailer))
|
||||
|
||||
def _flatten(self, value):
|
||||
# First check if the object is in the object table, not used for
|
||||
# containers to ensure that two subcontainers with the same contents
|
||||
# will be serialized as distinct values.
|
||||
if isinstance(value, _scalars):
|
||||
if (type(value), value) in self._objtable:
|
||||
return
|
||||
|
||||
elif hasattr(plistlib, "Data") and isinstance(value, plistlib.Data):
|
||||
if (type(value.data), value.data) in self._objtable:
|
||||
return
|
||||
|
||||
elif id(value) in self._objidtable:
|
||||
return
|
||||
|
||||
# Add to objectreference map
|
||||
refnum = len(self._objlist)
|
||||
self._objlist.append(value)
|
||||
if isinstance(value, _scalars):
|
||||
self._objtable[(type(value), value)] = refnum
|
||||
elif hasattr(plistlib, "Data") and isinstance(value, plistlib.Data):
|
||||
self._objtable[(type(value.data), value.data)] = refnum
|
||||
else:
|
||||
self._objidtable[id(value)] = refnum
|
||||
|
||||
# And finally recurse into containers
|
||||
if isinstance(value, dict):
|
||||
keys = []
|
||||
values = []
|
||||
items = value.items()
|
||||
if self._sort_keys:
|
||||
items = sorted(items)
|
||||
|
||||
for k, v in items:
|
||||
if not isinstance(k, basestring):
|
||||
if self._skipkeys:
|
||||
continue
|
||||
raise TypeError("keys must be strings")
|
||||
keys.append(k)
|
||||
values.append(v)
|
||||
|
||||
for o in itertools.chain(keys, values):
|
||||
self._flatten(o)
|
||||
|
||||
elif isinstance(value, (list, tuple)):
|
||||
for o in value:
|
||||
self._flatten(o)
|
||||
|
||||
def _getrefnum(self, value):
|
||||
if isinstance(value, _scalars):
|
||||
return self._objtable[(type(value), value)]
|
||||
elif hasattr(plistlib, "Data") and isinstance(value, plistlib.Data):
|
||||
return self._objtable[(type(value.data), value.data)]
|
||||
else:
|
||||
return self._objidtable[id(value)]
|
||||
|
||||
def _write_size(self, token, size):
|
||||
if size < 15:
|
||||
self._fp.write(struct.pack('>B', token | size))
|
||||
|
||||
elif size < 1 << 8:
|
||||
self._fp.write(struct.pack('>BBB', token | 0xF, 0x10, size))
|
||||
|
||||
elif size < 1 << 16:
|
||||
self._fp.write(struct.pack('>BBH', token | 0xF, 0x11, size))
|
||||
|
||||
elif size < 1 << 32:
|
||||
self._fp.write(struct.pack('>BBL', token | 0xF, 0x12, size))
|
||||
|
||||
else:
|
||||
self._fp.write(struct.pack('>BBQ', token | 0xF, 0x13, size))
|
||||
|
||||
def _write_object(self, value):
|
||||
ref = self._getrefnum(value)
|
||||
self._object_offsets[ref] = self._fp.tell()
|
||||
if value is None:
|
||||
self._fp.write(b'\x00')
|
||||
|
||||
elif value is False:
|
||||
self._fp.write(b'\x08')
|
||||
|
||||
elif value is True:
|
||||
self._fp.write(b'\x09')
|
||||
|
||||
elif isinstance(value, int):
|
||||
if value < 0:
|
||||
try:
|
||||
self._fp.write(struct.pack('>Bq', 0x13, value))
|
||||
except struct.error:
|
||||
raise OverflowError(value) # from None
|
||||
elif value < 1 << 8:
|
||||
self._fp.write(struct.pack('>BB', 0x10, value))
|
||||
elif value < 1 << 16:
|
||||
self._fp.write(struct.pack('>BH', 0x11, value))
|
||||
elif value < 1 << 32:
|
||||
self._fp.write(struct.pack('>BL', 0x12, value))
|
||||
elif value < 1 << 63:
|
||||
self._fp.write(struct.pack('>BQ', 0x13, value))
|
||||
elif value < 1 << 64:
|
||||
self._fp.write(b'\x14' + value.to_bytes(16, 'big', signed=True))
|
||||
else:
|
||||
raise OverflowError(value)
|
||||
|
||||
elif isinstance(value, float):
|
||||
self._fp.write(struct.pack('>Bd', 0x23, value))
|
||||
|
||||
elif isinstance(value, datetime.datetime):
|
||||
f = (value - datetime.datetime(2001, 1, 1)).total_seconds()
|
||||
self._fp.write(struct.pack('>Bd', 0x33, f))
|
||||
|
||||
elif (_check_py3() and isinstance(value, (bytes, bytearray))) or (hasattr(plistlib, "Data") and isinstance(value, plistlib.Data)):
|
||||
if not isinstance(value, (bytes, bytearray)):
|
||||
value = value.data # Unpack it
|
||||
self._write_size(0x40, len(value))
|
||||
self._fp.write(value)
|
||||
|
||||
elif isinstance(value, basestring):
|
||||
try:
|
||||
t = value.encode('ascii')
|
||||
self._write_size(0x50, len(value))
|
||||
except UnicodeEncodeError:
|
||||
t = value.encode('utf-16be')
|
||||
self._write_size(0x60, len(t) // 2)
|
||||
self._fp.write(t)
|
||||
|
||||
elif isinstance(value, UID) or (hasattr(plistlib,"UID") and isinstance(value, plistlib.UID)):
|
||||
if value.data < 0:
|
||||
raise ValueError("UIDs must be positive")
|
||||
elif value.data < 1 << 8:
|
||||
self._fp.write(struct.pack('>BB', 0x80, value))
|
||||
elif value.data < 1 << 16:
|
||||
self._fp.write(struct.pack('>BH', 0x81, value))
|
||||
elif value.data < 1 << 32:
|
||||
self._fp.write(struct.pack('>BL', 0x83, value))
|
||||
# elif value.data < 1 << 64:
|
||||
# self._fp.write(struct.pack('>BQ', 0x87, value))
|
||||
else:
|
||||
raise OverflowError(value)
|
||||
|
||||
elif isinstance(value, (list, tuple)):
|
||||
refs = [self._getrefnum(o) for o in value]
|
||||
s = len(refs)
|
||||
self._write_size(0xA0, s)
|
||||
self._fp.write(struct.pack('>' + self._ref_format * s, *refs))
|
||||
|
||||
elif isinstance(value, dict):
|
||||
keyRefs, valRefs = [], []
|
||||
|
||||
if self._sort_keys:
|
||||
rootItems = sorted(value.items())
|
||||
else:
|
||||
rootItems = value.items()
|
||||
|
||||
for k, v in rootItems:
|
||||
if not isinstance(k, basestring):
|
||||
if self._skipkeys:
|
||||
continue
|
||||
raise TypeError("keys must be strings")
|
||||
keyRefs.append(self._getrefnum(k))
|
||||
valRefs.append(self._getrefnum(v))
|
||||
|
||||
s = len(keyRefs)
|
||||
self._write_size(0xD0, s)
|
||||
self._fp.write(struct.pack('>' + self._ref_format * s, *keyRefs))
|
||||
self._fp.write(struct.pack('>' + self._ref_format * s, *valRefs))
|
||||
|
||||
else:
|
||||
raise TypeError(value)
|
||||
69
ACPI/SSDTTime-master/Scripts/reveal.py
Normal file
69
ACPI/SSDTTime-master/Scripts/reveal.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import sys, os
|
||||
from . import run
|
||||
|
||||
class Reveal:
|
||||
|
||||
def __init__(self):
|
||||
self.r = run.Run()
|
||||
return
|
||||
|
||||
def get_parent(self, path):
|
||||
return os.path.normpath(os.path.join(path, os.pardir))
|
||||
|
||||
def reveal(self, path, new_window = False):
|
||||
# Reveals the passed path in Finder - only works on macOS
|
||||
if not sys.platform == "darwin":
|
||||
return ("", "macOS Only", 1)
|
||||
if not path:
|
||||
# No path sent - nothing to reveal
|
||||
return ("", "No path specified", 1)
|
||||
# Build our script - then convert it to a single line task
|
||||
if not os.path.exists(path):
|
||||
# Not real - bail
|
||||
return ("", "{} - doesn't exist".format(path), 1)
|
||||
# Get the absolute path
|
||||
path = os.path.abspath(path)
|
||||
command = ["osascript"]
|
||||
if new_window:
|
||||
command.extend([
|
||||
"-e", "set p to \"{}\"".format(path.replace("\"", "\\\"")),
|
||||
"-e", "tell application \"Finder\"",
|
||||
"-e", "reveal POSIX file p as text",
|
||||
"-e", "activate",
|
||||
"-e", "end tell"
|
||||
])
|
||||
else:
|
||||
if path == self.get_parent(path):
|
||||
command.extend([
|
||||
"-e", "set p to \"{}\"".format(path.replace("\"", "\\\"")),
|
||||
"-e", "tell application \"Finder\"",
|
||||
"-e", "reopen",
|
||||
"-e", "activate",
|
||||
"-e", "set target of window 1 to (POSIX file p as text)",
|
||||
"-e", "end tell"
|
||||
])
|
||||
else:
|
||||
command.extend([
|
||||
"-e", "set o to \"{}\"".format(self.get_parent(path).replace("\"", "\\\"")),
|
||||
"-e", "set p to \"{}\"".format(path.replace("\"", "\\\"")),
|
||||
"-e", "tell application \"Finder\"",
|
||||
"-e", "reopen",
|
||||
"-e", "activate",
|
||||
"-e", "set target of window 1 to (POSIX file o as text)",
|
||||
"-e", "select (POSIX file p as text)",
|
||||
"-e", "end tell"
|
||||
])
|
||||
return self.r.run({"args" : command})
|
||||
|
||||
def notify(self, title = None, subtitle = None, sound = None):
|
||||
# Sends a notification
|
||||
if not title:
|
||||
return ("", "Malformed dict", 1)
|
||||
# Build our notification
|
||||
n_text = "display notification with title \"{}\"".format(title.replace("\"", "\\\""))
|
||||
if subtitle:
|
||||
n_text += " subtitle \"{}\"".format(subtitle.replace("\"", "\\\""))
|
||||
if sound:
|
||||
n_text += " sound name \"{}\"".format(sound.replace("\"", "\\\""))
|
||||
command = ["osascript", "-e", n_text]
|
||||
return self.r.run({"args" : command})
|
||||
151
ACPI/SSDTTime-master/Scripts/run.py
Normal file
151
ACPI/SSDTTime-master/Scripts/run.py
Normal file
@@ -0,0 +1,151 @@
|
||||
import sys, subprocess, time, threading, shlex
|
||||
try:
|
||||
from Queue import Queue, Empty
|
||||
except:
|
||||
from queue import Queue, Empty
|
||||
|
||||
ON_POSIX = 'posix' in sys.builtin_module_names
|
||||
|
||||
class Run:
|
||||
|
||||
def __init__(self):
|
||||
return
|
||||
|
||||
def _read_output(self, pipe, q):
|
||||
try:
|
||||
for line in iter(lambda: pipe.read(1), b''):
|
||||
q.put(line)
|
||||
except ValueError:
|
||||
pass
|
||||
pipe.close()
|
||||
|
||||
def _create_thread(self, output):
|
||||
# Creates a new queue and thread object to watch based on the output pipe sent
|
||||
q = Queue()
|
||||
t = threading.Thread(target=self._read_output, args=(output, q))
|
||||
t.daemon = True
|
||||
return (q,t)
|
||||
|
||||
def _stream_output(self, comm, shell = False):
|
||||
output = error = ""
|
||||
p = None
|
||||
try:
|
||||
if shell and type(comm) is list:
|
||||
comm = " ".join(shlex.quote(x) for x in comm)
|
||||
if not shell and type(comm) is str:
|
||||
comm = shlex.split(comm)
|
||||
p = subprocess.Popen(comm, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=0, universal_newlines=True, close_fds=ON_POSIX)
|
||||
# Setup the stdout thread/queue
|
||||
q,t = self._create_thread(p.stdout)
|
||||
qe,te = self._create_thread(p.stderr)
|
||||
# Start both threads
|
||||
t.start()
|
||||
te.start()
|
||||
|
||||
while True:
|
||||
c = z = ""
|
||||
try: c = q.get_nowait()
|
||||
except Empty: pass
|
||||
else:
|
||||
sys.stdout.write(c)
|
||||
output += c
|
||||
sys.stdout.flush()
|
||||
try: z = qe.get_nowait()
|
||||
except Empty: pass
|
||||
else:
|
||||
sys.stderr.write(z)
|
||||
error += z
|
||||
sys.stderr.flush()
|
||||
if not c==z=="": continue # Keep going until empty
|
||||
# No output - see if still running
|
||||
p.poll()
|
||||
if p.returncode != None:
|
||||
# Subprocess ended
|
||||
break
|
||||
# No output, but subprocess still running - stall for 20ms
|
||||
time.sleep(0.02)
|
||||
|
||||
o, e = p.communicate()
|
||||
return (output+o, error+e, p.returncode)
|
||||
except:
|
||||
if p:
|
||||
try: o, e = p.communicate()
|
||||
except: o = e = ""
|
||||
return (output+o, error+e, p.returncode)
|
||||
return ("", "Command not found!", 1)
|
||||
|
||||
def _decode(self, value, encoding="utf-8", errors="ignore"):
|
||||
# Helper method to only decode if bytes type
|
||||
if sys.version_info >= (3,0) and isinstance(value, bytes):
|
||||
return value.decode(encoding,errors)
|
||||
return value
|
||||
|
||||
def _run_command(self, comm, shell = False):
|
||||
c = None
|
||||
try:
|
||||
if shell and type(comm) is list:
|
||||
comm = " ".join(shlex.quote(x) for x in comm)
|
||||
if not shell and type(comm) is str:
|
||||
comm = shlex.split(comm)
|
||||
p = subprocess.Popen(comm, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
c = p.communicate()
|
||||
except:
|
||||
if c == None:
|
||||
return ("", "Command not found!", 1)
|
||||
return (self._decode(c[0]), self._decode(c[1]), p.returncode)
|
||||
|
||||
def run(self, command_list, leave_on_fail = False):
|
||||
# Command list should be an array of dicts
|
||||
if type(command_list) is dict:
|
||||
# We only have one command
|
||||
command_list = [command_list]
|
||||
output_list = []
|
||||
for comm in command_list:
|
||||
args = comm.get("args", [])
|
||||
shell = comm.get("shell", False)
|
||||
stream = comm.get("stream", False)
|
||||
sudo = comm.get("sudo", False)
|
||||
stdout = comm.get("stdout", False)
|
||||
stderr = comm.get("stderr", False)
|
||||
mess = comm.get("message", None)
|
||||
show = comm.get("show", False)
|
||||
|
||||
if not mess == None:
|
||||
print(mess)
|
||||
|
||||
if not len(args):
|
||||
# nothing to process
|
||||
continue
|
||||
if sudo:
|
||||
# Check if we have sudo
|
||||
out = self._run_command(["which", "sudo"])
|
||||
if "sudo" in out[0]:
|
||||
# Can sudo
|
||||
if type(args) is list:
|
||||
args.insert(0, out[0].replace("\n", "")) # add to start of list
|
||||
elif type(args) is str:
|
||||
args = out[0].replace("\n", "") + " " + args # add to start of string
|
||||
|
||||
if show:
|
||||
print(" ".join(args))
|
||||
|
||||
if stream:
|
||||
# Stream it!
|
||||
out = self._stream_output(args, shell)
|
||||
else:
|
||||
# Just run and gather output
|
||||
out = self._run_command(args, shell)
|
||||
if stdout and len(out[0]):
|
||||
print(out[0])
|
||||
if stderr and len(out[1]):
|
||||
print(out[1])
|
||||
# Append output
|
||||
output_list.append(out)
|
||||
# Check for errors
|
||||
if leave_on_fail and out[2] != 0:
|
||||
# Got an error - leave
|
||||
break
|
||||
if len(output_list) == 1:
|
||||
# We only ran one command - just return that output
|
||||
return output_list[0]
|
||||
return output_list
|
||||
263
ACPI/SSDTTime-master/Scripts/utils.py
Normal file
263
ACPI/SSDTTime-master/Scripts/utils.py
Normal file
@@ -0,0 +1,263 @@
|
||||
import sys, os, time, re, json, datetime, ctypes, subprocess
|
||||
|
||||
if os.name == "nt":
|
||||
# Windows
|
||||
import msvcrt
|
||||
else:
|
||||
# Not Windows \o/
|
||||
import select
|
||||
|
||||
class Utils:
|
||||
|
||||
def __init__(self, name = "Python Script"):
|
||||
self.name = name
|
||||
# Init our colors before we need to print anything
|
||||
cwd = os.getcwd()
|
||||
os.chdir(os.path.dirname(os.path.realpath(__file__)))
|
||||
if os.path.exists("colors.json"):
|
||||
self.colors_dict = json.load(open("colors.json"))
|
||||
else:
|
||||
self.colors_dict = {}
|
||||
os.chdir(cwd)
|
||||
|
||||
def check_admin(self):
|
||||
# Returns whether or not we're admin
|
||||
try:
|
||||
is_admin = os.getuid() == 0
|
||||
except AttributeError:
|
||||
is_admin = ctypes.windll.shell32.IsUserAnAdmin() != 0
|
||||
return is_admin
|
||||
|
||||
def elevate(self, file):
|
||||
# Runs the passed file as admin
|
||||
if self.check_admin():
|
||||
return
|
||||
if os.name == "nt":
|
||||
ctypes.windll.shell32.ShellExecuteW(None, "runas", '"{}"'.format(sys.executable), '"{}"'.format(file), None, 1)
|
||||
else:
|
||||
try:
|
||||
p = subprocess.Popen(["which", "sudo"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
c = p.communicate()[0].decode("utf-8", "ignore").replace("\n", "")
|
||||
os.execv(c, [ sys.executable, 'python'] + sys.argv)
|
||||
except:
|
||||
exit(1)
|
||||
|
||||
def compare_versions(self, vers1, vers2, **kwargs):
|
||||
# Helper method to compare ##.## strings
|
||||
#
|
||||
# vers1 < vers2 = True
|
||||
# vers1 = vers2 = None
|
||||
# vers1 > vers2 = False
|
||||
|
||||
# Sanitize the pads
|
||||
pad = str(kwargs.get("pad", ""))
|
||||
sep = str(kwargs.get("separator", "."))
|
||||
|
||||
ignore_case = kwargs.get("ignore_case", True)
|
||||
|
||||
# Cast as strings
|
||||
vers1 = str(vers1)
|
||||
vers2 = str(vers2)
|
||||
|
||||
if ignore_case:
|
||||
vers1 = vers1.lower()
|
||||
vers2 = vers2.lower()
|
||||
|
||||
# Split and pad lists
|
||||
v1_parts, v2_parts = self.pad_length(vers1.split(sep), vers2.split(sep))
|
||||
|
||||
# Iterate and compare
|
||||
for i in range(len(v1_parts)):
|
||||
# Remove non-numeric
|
||||
v1 = ''.join(c.lower() for c in v1_parts[i] if c.isalnum())
|
||||
v2 = ''.join(c.lower() for c in v2_parts[i] if c.isalnum())
|
||||
# Equalize the lengths
|
||||
v1, v2 = self.pad_length(v1, v2)
|
||||
# Compare
|
||||
if str(v1) < str(v2):
|
||||
return True
|
||||
elif str(v1) > str(v2):
|
||||
return False
|
||||
# Never differed - return None, must be equal
|
||||
return None
|
||||
|
||||
def pad_length(self, var1, var2, pad = "0"):
|
||||
# Pads the vars on the left side to make them equal length
|
||||
pad = "0" if len(str(pad)) < 1 else str(pad)[0]
|
||||
if not type(var1) == type(var2):
|
||||
# Type mismatch! Just return what we got
|
||||
return (var1, var2)
|
||||
if len(var1) < len(var2):
|
||||
if type(var1) is list:
|
||||
var1.extend([str(pad) for x in range(len(var2) - len(var1))])
|
||||
else:
|
||||
var1 = "{}{}".format((pad*(len(var2)-len(var1))), var1)
|
||||
elif len(var2) < len(var1):
|
||||
if type(var2) is list:
|
||||
var2.extend([str(pad) for x in range(len(var1) - len(var2))])
|
||||
else:
|
||||
var2 = "{}{}".format((pad*(len(var1)-len(var2))), var2)
|
||||
return (var1, var2)
|
||||
|
||||
def check_path(self, path):
|
||||
# Let's loop until we either get a working path, or no changes
|
||||
test_path = path
|
||||
last_path = None
|
||||
while True:
|
||||
# Bail if we've looped at least once and the path didn't change
|
||||
if last_path != None and last_path == test_path: return None
|
||||
last_path = test_path
|
||||
# Check if we stripped everything out
|
||||
if not len(test_path): return None
|
||||
# Check if we have a valid path
|
||||
if os.path.exists(test_path):
|
||||
return os.path.abspath(test_path)
|
||||
# Check for quotes
|
||||
if test_path[0] == test_path[-1] and test_path[0] in ('"',"'"):
|
||||
test_path = test_path[1:-1]
|
||||
continue
|
||||
# Check for a tilde and expand if needed
|
||||
if test_path[0] == "~":
|
||||
tilde_expanded = os.path.expanduser(test_path)
|
||||
if tilde_expanded != test_path:
|
||||
# Got a change
|
||||
test_path = tilde_expanded
|
||||
continue
|
||||
# Let's check for spaces - strip from the left first, then the right
|
||||
if test_path[0] in (" ","\t"):
|
||||
test_path = test_path[1:]
|
||||
continue
|
||||
if test_path[-1] in (" ","\t"):
|
||||
test_path = test_path[:-1]
|
||||
continue
|
||||
# Maybe we have escapes to handle?
|
||||
test_path = "\\".join([x.replace("\\", "") for x in test_path.split("\\\\")])
|
||||
|
||||
def grab(self, prompt, **kwargs):
|
||||
# Takes a prompt, a default, and a timeout and shows it with that timeout
|
||||
# returning the result
|
||||
timeout = kwargs.get("timeout",0)
|
||||
default = kwargs.get("default","")
|
||||
# If we don't have a timeout - then skip the timed sections
|
||||
if timeout <= 0:
|
||||
try:
|
||||
if sys.version_info >= (3, 0):
|
||||
return input(prompt)
|
||||
else:
|
||||
return str(raw_input(prompt))
|
||||
except EOFError:
|
||||
return default
|
||||
# Write our prompt
|
||||
sys.stdout.write(prompt)
|
||||
sys.stdout.flush()
|
||||
if os.name == "nt":
|
||||
start_time = time.time()
|
||||
i = ''
|
||||
while True:
|
||||
if msvcrt.kbhit():
|
||||
c = msvcrt.getche()
|
||||
if ord(c) == 13: # enter_key
|
||||
break
|
||||
elif ord(c) >= 32: # space_char
|
||||
i += c.decode() if sys.version_info >= (3,0) and isinstance(c,bytes) else c
|
||||
else:
|
||||
time.sleep(0.02) # Delay for 20ms to prevent CPU workload
|
||||
if len(i) == 0 and (time.time() - start_time) > timeout:
|
||||
break
|
||||
else:
|
||||
i, o, e = select.select( [sys.stdin], [], [], timeout )
|
||||
if i:
|
||||
i = sys.stdin.readline().strip()
|
||||
print('') # needed to move to next line
|
||||
if len(i) > 0:
|
||||
return i
|
||||
else:
|
||||
return default
|
||||
|
||||
def cls(self):
|
||||
if os.name == "nt":
|
||||
os.system("cls")
|
||||
elif os.environ.get("TERM"):
|
||||
os.system("clear")
|
||||
|
||||
def cprint(self, message, **kwargs):
|
||||
strip_colors = kwargs.get("strip_colors", False)
|
||||
if os.name == "nt":
|
||||
strip_colors = True
|
||||
reset = u"\u001b[0m"
|
||||
# Requires sys import
|
||||
for c in self.colors:
|
||||
if strip_colors:
|
||||
message = message.replace(c["find"], "")
|
||||
else:
|
||||
message = message.replace(c["find"], c["replace"])
|
||||
if strip_colors:
|
||||
return message
|
||||
sys.stdout.write(message)
|
||||
print(reset)
|
||||
|
||||
# Needs work to resize the string if color chars exist
|
||||
'''# Header drawing method
|
||||
def head(self, text = None, width = 55):
|
||||
if text == None:
|
||||
text = self.name
|
||||
self.cls()
|
||||
print(" {}".format("#"*width))
|
||||
len_text = self.cprint(text, strip_colors=True)
|
||||
mid_len = int(round(width/2-len(len_text)/2)-2)
|
||||
middle = " #{}{}{}#".format(" "*mid_len, len_text, " "*((width - mid_len - len(len_text))-2))
|
||||
if len(middle) > width+1:
|
||||
# Get the difference
|
||||
di = len(middle) - width
|
||||
# Add the padding for the ...#
|
||||
di += 3
|
||||
# Trim the string
|
||||
middle = middle[:-di]
|
||||
newlen = len(middle)
|
||||
middle += "...#"
|
||||
find_list = [ c["find"] for c in self.colors ]
|
||||
|
||||
# Translate colored string to len
|
||||
middle = middle.replace(len_text, text + self.rt_color) # always reset just in case
|
||||
self.cprint(middle)
|
||||
print("#"*width)'''
|
||||
|
||||
# Header drawing method
|
||||
def head(self, text = None, width = 55):
|
||||
if text == None:
|
||||
text = self.name
|
||||
self.cls()
|
||||
print(" {}".format("#"*width))
|
||||
mid_len = int(round(width/2-len(text)/2)-2)
|
||||
middle = " #{}{}{}#".format(" "*mid_len, text, " "*((width - mid_len - len(text))-2))
|
||||
if len(middle) > width+1:
|
||||
# Get the difference
|
||||
di = len(middle) - width
|
||||
# Add the padding for the ...#
|
||||
di += 3
|
||||
# Trim the string
|
||||
middle = middle[:-di] + "...#"
|
||||
print(middle)
|
||||
print("#"*width)
|
||||
|
||||
def resize(self, width, height):
|
||||
print('\033[8;{};{}t'.format(height, width))
|
||||
|
||||
def custom_quit(self):
|
||||
self.head()
|
||||
print("by CorpNewt\n")
|
||||
print("Thanks for testing it out, for bugs/comments/complaints")
|
||||
print("send me a message on Reddit, or check out my GitHub:\n")
|
||||
print("www.reddit.com/u/corpnewt")
|
||||
print("www.github.com/corpnewt\n")
|
||||
# Get the time and wish them a good morning, afternoon, evening, and night
|
||||
hr = datetime.datetime.now().time().hour
|
||||
if hr > 3 and hr < 12:
|
||||
print("Have a nice morning!\n\n")
|
||||
elif hr >= 12 and hr < 17:
|
||||
print("Have a nice afternoon!\n\n")
|
||||
elif hr >= 17 and hr < 21:
|
||||
print("Have a nice evening!\n\n")
|
||||
else:
|
||||
print("Have a nice night!\n\n")
|
||||
exit(0)
|
||||
@@ -1773,7 +1773,7 @@
|
||||
<key>SystemAudioVolume</key>
|
||||
<data>Rg==</data>
|
||||
<key>boot-args</key>
|
||||
<string>-v keepsyms=1 debug=0x100</string>
|
||||
<string>-v keepsyms=1 debug=0x100 igfxonln=1</string>
|
||||
<key>csr-active-config</key>
|
||||
<data>AAAAAA==</data>
|
||||
<key>prev-lang:kbd</key>
|
||||
@@ -2127,7 +2127,7 @@
|
||||
<key>Comment</key>
|
||||
<string>Fix black screen on wake from hibernation for Lenovo Thinkpad T490</string>
|
||||
<key>Enabled</key>
|
||||
<true/>
|
||||
<false/>
|
||||
<key>Size</key>
|
||||
<integer>4096</integer>
|
||||
<key>Type</key>
|
||||
|
||||
Reference in New Issue
Block a user