mirror of
https://github.com/MustardChef/WSABuilds.git
synced 2024-11-25 20:56:53 +01:00
Re-add scripts
This commit is contained in:
parent
2deb4d2262
commit
b52fea4122
34
MagiskOnWSA/Update Check/KernelSUUpdateCheck.py
Normal file
34
MagiskOnWSA/Update Check/KernelSUUpdateCheck.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
logging.captureWarnings(True)
|
||||||
|
env_file = os.getenv('GITHUB_ENV')
|
||||||
|
new_version_found = False
|
||||||
|
currentver = requests.get(f"https://raw.githubusercontent.com/MustardChef/WSABuilds/update/kernelsu.appversion").text.replace('\n', '')
|
||||||
|
git = (
|
||||||
|
"git checkout -f update || git switch --discard-changes --orphan update"
|
||||||
|
)
|
||||||
|
with open('kernelsu.appversion', 'w') as file:
|
||||||
|
file.write(currentver)
|
||||||
|
if not new_version_found:
|
||||||
|
latestver = ""
|
||||||
|
kernelsumsg = ""
|
||||||
|
latestver = json.loads(requests.get(f"https://api.github.com/repos/tiann/kernelsu/releases/latest").content)['tag_name'].replace('v', '').replace('\n', '')
|
||||||
|
kernelsumsg="Update KernelSU Version from `v" + currentver + "` to `v" + latestver + "`"
|
||||||
|
if currentver != latestver:
|
||||||
|
print("New version found: " + latestver)
|
||||||
|
new_version_found = True
|
||||||
|
subprocess.Popen(git, shell=True, stdout=None, stderr=None, executable='/bin/bash').wait()
|
||||||
|
with open('kernelsu.appversion', 'w+') as file:
|
||||||
|
file.seek(0)
|
||||||
|
file.truncate()
|
||||||
|
file.write(latestver)
|
||||||
|
with open(env_file, "a") as wr:
|
||||||
|
wr.write(f"KERNEL_SU_MSG={kernelsumsg}\n")
|
||||||
|
else:
|
||||||
|
kernelsumsg = "KernelSU Version: `" + latestver + "`"
|
||||||
|
with open(env_file, "a") as wr:
|
||||||
|
wr.write(f"KERNEL_SU_MSG={kernelsumsg}\n")
|
||||||
|
file.close()
|
34
MagiskOnWSA/Update Check/MTGUpdateCheck.py
Normal file
34
MagiskOnWSA/Update Check/MTGUpdateCheck.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
logging.captureWarnings(True)
|
||||||
|
env_file = os.getenv('GITHUB_ENV')
|
||||||
|
new_version_found = False
|
||||||
|
currentver = requests.get(f"https://raw.githubusercontent.com/MustardChef/WSABuilds/update/gapps.appversion").text.replace('\n', '')
|
||||||
|
git = (
|
||||||
|
"git checkout -f update || git switch --discard-changes --orphan update"
|
||||||
|
)
|
||||||
|
with open('gapps.appversion', 'w') as file:
|
||||||
|
file.write(currentver)
|
||||||
|
if not new_version_found:
|
||||||
|
latestver = ""
|
||||||
|
mtgmsg = ""
|
||||||
|
latestver = json.loads(requests.get(f"https://api.github.com/repos/MustardChef/MindTheGappsArchived/releases/latest").content)['name'].replace('\n', '')
|
||||||
|
mtgmsg="Update MindTheGapps Version from `v" + currentver + "` to `v" + latestver + "`"
|
||||||
|
if currentver != latestver:
|
||||||
|
print("New version found: " + latestver)
|
||||||
|
new_version_found = True
|
||||||
|
subprocess.Popen(git, shell=True, stdout=None, stderr=None, executable='/bin/bash').wait()
|
||||||
|
with open('gapps.appversion', 'w+') as file:
|
||||||
|
file.seek(0)
|
||||||
|
file.truncate()
|
||||||
|
file.write(latestver)
|
||||||
|
with open(env_file, "a") as wr:
|
||||||
|
wr.write(f"MTG_MSG={mtgmsg}\n")
|
||||||
|
else:
|
||||||
|
mtgmsg = "MindTheGapps Package Version: `" + latestver + "`"
|
||||||
|
with open(env_file, "a") as wr:
|
||||||
|
wr.write(f"MTG_MSG={mtgmsg}\n")
|
||||||
|
file.close()
|
34
MagiskOnWSA/Update Check/MagiskCanaryUpdateCheck.py
Normal file
34
MagiskOnWSA/Update Check/MagiskCanaryUpdateCheck.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
logging.captureWarnings(True)
|
||||||
|
env_file = os.getenv('GITHUB_ENV')
|
||||||
|
new_version_found = False
|
||||||
|
currentver = requests.get(f"https://raw.githubusercontent.com/MustardChef/WSABuilds/update/magiskcanary.appversion").text.replace('\n', '')
|
||||||
|
git = (
|
||||||
|
"git checkout -f update || git switch --discard-changes --orphan update"
|
||||||
|
)
|
||||||
|
with open('magiskcanary.appversion', 'w') as file:
|
||||||
|
file.write(currentver)
|
||||||
|
if not new_version_found:
|
||||||
|
latestver = ""
|
||||||
|
magiskcanarymsg = ""
|
||||||
|
latestver = json.loads(requests.get(f"https://github.com/topjohnwu/magisk-files/raw/master/canary.json").content)['magisk']['version'].replace('\n', '')
|
||||||
|
magiskcanarymsg="Update Magisk Canary Version from `v" + currentver + "` to `v" + latestver + "`"
|
||||||
|
if currentver != latestver:
|
||||||
|
print("New version found: " + latestver)
|
||||||
|
new_version_found = True
|
||||||
|
subprocess.Popen(git, shell=True, stdout=None, stderr=None, executable='/bin/bash').wait()
|
||||||
|
with open('magiskcanary.appversion', 'w+') as file:
|
||||||
|
file.seek(0)
|
||||||
|
file.truncate()
|
||||||
|
file.write(latestver)
|
||||||
|
with open(env_file, "a") as wr:
|
||||||
|
wr.write(f"MAGISK_CANARY_MSG={magiskcanarymsg}\n")
|
||||||
|
else:
|
||||||
|
magiskcanarymsg = "Magisk Canary Version: `" + latestver + "`"
|
||||||
|
with open(env_file, "a") as wr:
|
||||||
|
wr.write(f"MAGISK_CANARY_MSG={magiskcanarymsg}\n")
|
||||||
|
file.close()
|
34
MagiskOnWSA/Update Check/MagiskStableUpdateCheck.py
Normal file
34
MagiskOnWSA/Update Check/MagiskStableUpdateCheck.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
logging.captureWarnings(True)
|
||||||
|
env_file = os.getenv('GITHUB_ENV')
|
||||||
|
new_version_found = False
|
||||||
|
git = (
|
||||||
|
"git checkout -f update || git switch --discard-changes --orphan update"
|
||||||
|
)
|
||||||
|
currentver = requests.get(f"https://raw.githubusercontent.com/MustardChef/WSABuilds/update/magiskstable.appversion").text.replace('\n', '')
|
||||||
|
with open('magiskstable.appversion', 'w') as file:
|
||||||
|
file.write(currentver)
|
||||||
|
if not new_version_found:
|
||||||
|
latestver = ""
|
||||||
|
magiskstablemsg = ""
|
||||||
|
latestver = json.loads(requests.get(f"https://github.com/topjohnwu/magisk-files/raw/master/stable.json").content)['magisk']['version'].replace('\n', '')
|
||||||
|
magiskstablemsg="Update Magisk Stable Version from `v" + currentver + "` to `v" + latestver + "`"
|
||||||
|
if currentver != latestver:
|
||||||
|
print("New version found: " + latestver)
|
||||||
|
new_version_found = True
|
||||||
|
subprocess.Popen(git, shell=True, stdout=None, stderr=None, executable='/bin/bash').wait()
|
||||||
|
with open('magiskstable.appversion', 'w+') as file:
|
||||||
|
file.seek(0)
|
||||||
|
file.truncate()
|
||||||
|
file.write(latestver)
|
||||||
|
with open(env_file, "a") as wr:
|
||||||
|
wr.write(f"MAGISK_STABLE_MSG={magiskstablemsg}\n")
|
||||||
|
else:
|
||||||
|
magiskstablemsg = "Magisk Stable Version: `" + latestver + "`"
|
||||||
|
with open(env_file, "a") as wr:
|
||||||
|
wr.write(f"MAGISK_STABLE_MSG={magiskstablemsg}\n")
|
||||||
|
file.close()
|
162
MagiskOnWSA/Update Check/WSAInsiderUpdateCheck.py
Normal file
162
MagiskOnWSA/Update Check/WSAInsiderUpdateCheck.py
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
import base64
|
||||||
|
import os
|
||||||
|
import html
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from typing import Any, OrderedDict
|
||||||
|
from xml.dom import minidom
|
||||||
|
|
||||||
|
from requests import Session
|
||||||
|
from packaging import version
|
||||||
|
|
||||||
|
class Prop(OrderedDict):
|
||||||
|
def __init__(self, props: str = ...) -> None:
|
||||||
|
super().__init__()
|
||||||
|
for i, line in enumerate(props.splitlines(False)):
|
||||||
|
if '=' in line:
|
||||||
|
k, v = line.split('=', 1)
|
||||||
|
self[k] = v
|
||||||
|
else:
|
||||||
|
self[f".{i}"] = line
|
||||||
|
|
||||||
|
def __setattr__(self, __name: str, __value: Any) -> None:
|
||||||
|
self[__name] = __value
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '\n'.join(f'{item}={self[item]}' for item in self)
|
||||||
|
|
||||||
|
logging.captureWarnings(True)
|
||||||
|
env_file = os.getenv('GITHUB_ENV')
|
||||||
|
|
||||||
|
#Category ID
|
||||||
|
cat_id = '858014f3-3934-4abe-8078-4aa193e74ca8'
|
||||||
|
|
||||||
|
release_type = "WIF"
|
||||||
|
|
||||||
|
new_version_found = False
|
||||||
|
|
||||||
|
session = Session()
|
||||||
|
session.verify = False
|
||||||
|
|
||||||
|
git = (
|
||||||
|
"git checkout -f update || git switch --discard-changes --orphan update"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get("https://api.github.com/repos/bubbles-wow/MS-Account-Token/contents/token.cfg")
|
||||||
|
if response.status_code == 200:
|
||||||
|
content = response.json()["content"]
|
||||||
|
content = content.encode("utf-8")
|
||||||
|
content = base64.b64decode(content)
|
||||||
|
text = content.decode("utf-8")
|
||||||
|
user_code = Prop(text).get("user_code")
|
||||||
|
updatetime = Prop(text).get("update_time")
|
||||||
|
print("Successfully get user token from server!")
|
||||||
|
print(f"Last update time: {updatetime}\n")
|
||||||
|
else:
|
||||||
|
user_code = ""
|
||||||
|
print(f"Failed to get user token from server! Error code: {response.status_code}\n")
|
||||||
|
except:
|
||||||
|
user_code = ""
|
||||||
|
|
||||||
|
users = {"", user_code}
|
||||||
|
|
||||||
|
# The code inside the function WSAInsiderUpdateChecker starts here
|
||||||
|
currentver = requests.get(f"https://raw.githubusercontent.com/MustardChef/WSABuilds/update/WIF.appversion").text.replace('\n', '')
|
||||||
|
|
||||||
|
print("Current working directory:", os.getcwd())
|
||||||
|
print("Files in '/home/runner/work/WSABuilds/WSABuilds/MagiskOnWSALocal2/xml':", os.listdir('/home/runner/work/WSABuilds/WSABuilds/MagiskOnWSALocal2/xml'))
|
||||||
|
|
||||||
|
# Write for pushing later
|
||||||
|
try:
|
||||||
|
# Write for pushing later
|
||||||
|
file = open('WIF.appversion', 'w')
|
||||||
|
file.write(currentver)
|
||||||
|
file.close()
|
||||||
|
print("WIF.appversion file created successfully.")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error writing to file: {e}")
|
||||||
|
|
||||||
|
if not new_version_found:
|
||||||
|
# Get information
|
||||||
|
with open("/home/runner/work/WSABuilds/WSABuilds/MagiskOnWSALocal2/xml/GetCookie.xml", "r") as f:
|
||||||
|
cookie_content = f.read().format(user_code)
|
||||||
|
try:
|
||||||
|
out = session.post(
|
||||||
|
'https://fe3.delivery.mp.microsoft.com/ClientWebService/client.asmx',
|
||||||
|
data=cookie_content,
|
||||||
|
headers={'Content-Type': 'application/soap+xml; charset=utf-8'}
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
print("Network Error!")
|
||||||
|
exit(1)
|
||||||
|
doc = minidom.parseString(out.text)
|
||||||
|
cookie = doc.getElementsByTagName('EncryptedData')[0].firstChild.nodeValue
|
||||||
|
with open("/home/runner/work/WSABuilds/WSABuilds/MagiskOnWSALocal2/xml/WUIDRequest.xml", "r") as f:
|
||||||
|
cat_id_content = f.read().format(user_code, cookie, cat_id, release_type)
|
||||||
|
try:
|
||||||
|
out = session.post(
|
||||||
|
'https://fe3.delivery.mp.microsoft.com/ClientWebService/client.asmx',
|
||||||
|
data=cat_id_content,
|
||||||
|
headers={'Content-Type': 'application/soap+xml; charset=utf-8'}
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
print("Network Error!")
|
||||||
|
exit(1)
|
||||||
|
doc = minidom.parseString(html.unescape(out.text))
|
||||||
|
filenames = {}
|
||||||
|
for node in doc.getElementsByTagName('ExtendedUpdateInfo')[0].getElementsByTagName('Updates')[0].getElementsByTagName('Update'):
|
||||||
|
node_xml = node.getElementsByTagName('Xml')[0]
|
||||||
|
node_files = node_xml.getElementsByTagName('Files')
|
||||||
|
if not node_files:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
for node_file in node_files[0].getElementsByTagName('File'):
|
||||||
|
if node_file.hasAttribute('InstallerSpecificIdentifier') and node_file.hasAttribute('FileName'):
|
||||||
|
filenames[node.getElementsByTagName('ID')[0].firstChild.nodeValue] = (f"{node_file.attributes['InstallerSpecificIdentifier'].value}_{node_file.attributes['FileName'].value}",
|
||||||
|
node_xml.getElementsByTagName('ExtendedProperties')[0].attributes['PackageIdentityName'].value)
|
||||||
|
identities = {}
|
||||||
|
for node in doc.getElementsByTagName('NewUpdates')[0].getElementsByTagName('UpdateInfo'):
|
||||||
|
node_xml = node.getElementsByTagName('Xml')[0]
|
||||||
|
if not node_xml.getElementsByTagName('SecuredFragment'):
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
id = node.getElementsByTagName('ID')[0].firstChild.nodeValue
|
||||||
|
update_identity = node_xml.getElementsByTagName('UpdateIdentity')[0]
|
||||||
|
if id in filenames:
|
||||||
|
fileinfo = filenames[id]
|
||||||
|
if fileinfo[0] not in identities:
|
||||||
|
identities[fileinfo[0]] = ([update_identity.attributes['UpdateID'].value,
|
||||||
|
update_identity.attributes['RevisionNumber'].value], fileinfo[1])
|
||||||
|
wsa_build_ver = 0
|
||||||
|
for filename, value in identities.items():
|
||||||
|
if re.match(f"MicrosoftCorporationII.WindowsSubsystemForAndroid_.*.msixbundle", filename):
|
||||||
|
tmp_wsa_build_ver = re.search(r"\d{4}.\d{5}.\d{1,}.\d{1,}", filename).group()
|
||||||
|
if (wsa_build_ver == 0):
|
||||||
|
wsa_build_ver = tmp_wsa_build_ver
|
||||||
|
elif version.parse(wsa_build_ver) < version.parse(tmp_wsa_build_ver):
|
||||||
|
wsa_build_ver = tmp_wsa_build_ver
|
||||||
|
|
||||||
|
if version.parse(currentver) < version.parse(wsa_build_ver):
|
||||||
|
print(f"New version found: {wsa_build_ver}")
|
||||||
|
new_version_found = True
|
||||||
|
subprocess.Popen(git, shell=True, stdout=None, stderr=None, executable='/bin/bash').wait()
|
||||||
|
try:
|
||||||
|
with open('WIF.appversion', 'w') as file:
|
||||||
|
file.write(wsa_build_ver)
|
||||||
|
file.close()
|
||||||
|
print("WIF.appversion file created successfully.")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error writing to file: {e}")
|
||||||
|
msg = f'Update WSA Version from `v{currentver}` to `v{wsa_build_ver}`'
|
||||||
|
with open(env_file, "a") as wr:
|
||||||
|
wr.write(f"SHOULD_BUILD=yes\n")
|
||||||
|
wr.write(f"RELEASE_TYPE={release_type}\n")
|
||||||
|
wr.write(f"LATEST_WIF_VER={wsa_build_ver}\n")
|
||||||
|
wr.write(f"MSG={msg}\n")
|
||||||
|
wr.write(f"INSIDER_UPDATE=yes\n")
|
||||||
|
file.close()
|
145
MagiskOnWSA/Update Check/WSARetailUpdateCheck.py
Normal file
145
MagiskOnWSA/Update Check/WSARetailUpdateCheck.py
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
import base64
|
||||||
|
import os
|
||||||
|
import html
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from typing import Any, OrderedDict
|
||||||
|
from xml.dom import minidom
|
||||||
|
|
||||||
|
from requests import Session
|
||||||
|
from packaging import version
|
||||||
|
|
||||||
|
class Prop(OrderedDict):
|
||||||
|
def __init__(self, props: str = ...) -> None:
|
||||||
|
super().__init__()
|
||||||
|
for i, line in enumerate(props.splitlines(False)):
|
||||||
|
if '=' in line:
|
||||||
|
k, v = line.split('=', 1)
|
||||||
|
self[k] = v
|
||||||
|
else:
|
||||||
|
self[f".{i}"] = line
|
||||||
|
|
||||||
|
def __setattr__(self, __name: str, __value: Any) -> None:
|
||||||
|
self[__name] = __value
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '\n'.join(f'{item}={self[item]}' for item in self)
|
||||||
|
|
||||||
|
logging.captureWarnings(True)
|
||||||
|
env_file = os.getenv('GITHUB_ENV')
|
||||||
|
|
||||||
|
#Category ID
|
||||||
|
cat_id = '858014f3-3934-4abe-8078-4aa193e74ca8'
|
||||||
|
|
||||||
|
release_type = "retail"
|
||||||
|
|
||||||
|
new_version_found = False
|
||||||
|
|
||||||
|
session = Session()
|
||||||
|
session.verify = False
|
||||||
|
|
||||||
|
git = (
|
||||||
|
"git checkout -f update || git switch --discard-changes --orphan update"
|
||||||
|
)
|
||||||
|
|
||||||
|
user_code = ""
|
||||||
|
users = {""}
|
||||||
|
|
||||||
|
print("Current working directory:", os.getcwd())
|
||||||
|
print("Files in '/home/runner/work/WSABuilds/WSABuilds/MagiskOnWSALocal1/xml':", os.listdir('/home/runner/work/WSABuilds/WSABuilds/MagiskOnWSALocal1/xml'))
|
||||||
|
|
||||||
|
currentver = requests.get(f"https://raw.githubusercontent.com/MustardChef/WSABuilds/update/retail.appversion").text.replace('\n', '')
|
||||||
|
|
||||||
|
# Write for pushing later
|
||||||
|
try:
|
||||||
|
# Write for pushing later
|
||||||
|
file = open('retail.appversion', 'w')
|
||||||
|
file.write(currentver)
|
||||||
|
file.close()
|
||||||
|
print("retail.appversion file created successfully.")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error writing to file: {e}")
|
||||||
|
|
||||||
|
if not new_version_found:
|
||||||
|
# Get information
|
||||||
|
with open("/home/runner/work/WSABuilds/WSABuilds/MagiskOnWSALocal1/xml/GetCookie.xml", "r") as f:
|
||||||
|
cookie_content = f.read().format(user_code)
|
||||||
|
try:
|
||||||
|
out = session.post(
|
||||||
|
'https://fe3.delivery.mp.microsoft.com/ClientWebService/client.asmx',
|
||||||
|
data=cookie_content,
|
||||||
|
headers={'Content-Type': 'application/soap+xml; charset=utf-8'}
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
print("Network Error!")
|
||||||
|
exit(1)
|
||||||
|
doc = minidom.parseString(out.text)
|
||||||
|
cookie = doc.getElementsByTagName('EncryptedData')[0].firstChild.nodeValue
|
||||||
|
with open("/home/runner/work/WSABuilds/WSABuilds/MagiskOnWSALocal1/xml/WUIDRequest.xml", "r") as f:
|
||||||
|
cat_id_content = f.read().format(user_code, cookie, cat_id, release_type)
|
||||||
|
try:
|
||||||
|
out = session.post(
|
||||||
|
'https://fe3.delivery.mp.microsoft.com/ClientWebService/client.asmx',
|
||||||
|
data=cat_id_content,
|
||||||
|
headers={'Content-Type': 'application/soap+xml; charset=utf-8'}
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
print("Network Error!")
|
||||||
|
exit(1)
|
||||||
|
doc = minidom.parseString(html.unescape(out.text))
|
||||||
|
filenames = {}
|
||||||
|
for node in doc.getElementsByTagName('ExtendedUpdateInfo')[0].getElementsByTagName('Updates')[0].getElementsByTagName('Update'):
|
||||||
|
node_xml = node.getElementsByTagName('Xml')[0]
|
||||||
|
node_files = node_xml.getElementsByTagName('Files')
|
||||||
|
if not node_files:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
for node_file in node_files[0].getElementsByTagName('File'):
|
||||||
|
if node_file.hasAttribute('InstallerSpecificIdentifier') and node_file.hasAttribute('FileName'):
|
||||||
|
filenames[node.getElementsByTagName('ID')[0].firstChild.nodeValue] = (f"{node_file.attributes['InstallerSpecificIdentifier'].value}_{node_file.attributes['FileName'].value}",
|
||||||
|
node_xml.getElementsByTagName('ExtendedProperties')[0].attributes['PackageIdentityName'].value)
|
||||||
|
identities = {}
|
||||||
|
for node in doc.getElementsByTagName('NewUpdates')[0].getElementsByTagName('UpdateInfo'):
|
||||||
|
node_xml = node.getElementsByTagName('Xml')[0]
|
||||||
|
if not node_xml.getElementsByTagName('SecuredFragment'):
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
id = node.getElementsByTagName('ID')[0].firstChild.nodeValue
|
||||||
|
update_identity = node_xml.getElementsByTagName('UpdateIdentity')[0]
|
||||||
|
if id in filenames:
|
||||||
|
fileinfo = filenames[id]
|
||||||
|
if fileinfo[0] not in identities:
|
||||||
|
identities[fileinfo[0]] = ([update_identity.attributes['UpdateID'].value,
|
||||||
|
update_identity.attributes['RevisionNumber'].value], fileinfo[1])
|
||||||
|
wsa_build_ver = 0
|
||||||
|
for filename, value in identities.items():
|
||||||
|
if re.match(f"MicrosoftCorporationII.WindowsSubsystemForAndroid_.*.msixbundle", filename):
|
||||||
|
tmp_wsa_build_ver = re.search(r"\d{4}.\d{5}.\d{1,}.\d{1,}", filename).group()
|
||||||
|
if (wsa_build_ver == 0):
|
||||||
|
wsa_build_ver = tmp_wsa_build_ver
|
||||||
|
elif version.parse(wsa_build_ver) < version.parse(tmp_wsa_build_ver):
|
||||||
|
wsa_build_ver = tmp_wsa_build_ver
|
||||||
|
|
||||||
|
if version.parse(currentver) < version.parse(wsa_build_ver):
|
||||||
|
print(f"New version found: {wsa_build_ver}")
|
||||||
|
new_version_found = True
|
||||||
|
subprocess.Popen(git, shell=True, stdout=None, stderr=None, executable='/bin/bash').wait()
|
||||||
|
try:
|
||||||
|
with open('retail.appversion', 'w') as file:
|
||||||
|
file.write(wsa_build_ver)
|
||||||
|
file.close()
|
||||||
|
print("retail.appversion file created successfully.")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error writing to file: {e}")
|
||||||
|
msg = f'Update WSA Version from `v{currentver}` to `v{wsa_build_ver}`'
|
||||||
|
with open(env_file, "a") as wr:
|
||||||
|
wr.write(f"SHOULD_BUILD=yes\n")
|
||||||
|
wr.write(f"RELEASE_TYPE={release_type}\n")
|
||||||
|
wr.write(f"LATEST_RETAIL_VER={wsa_build_ver}\n")
|
||||||
|
wr.write(f"MSG={msg}\n")
|
||||||
|
wr.write(f"RETAIL_UPDATE=yes\n")
|
||||||
|
file.close()
|
92
MagiskOnWSA/Update Check/update-downloadlinks.py
Normal file
92
MagiskOnWSA/Update Check/update-downloadlinks.py
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
import os
|
||||||
|
from bs4 import BeautifulSoup, Tag
|
||||||
|
|
||||||
|
# Load the README.md file
|
||||||
|
with open('README.md', 'r') as file:
|
||||||
|
readme_content = file.read()
|
||||||
|
|
||||||
|
# Parse the content with BeautifulSoup
|
||||||
|
soup = BeautifulSoup(readme_content, 'html.parser')
|
||||||
|
|
||||||
|
# Define the headers to locate the table
|
||||||
|
headers = ['Operating System', 'Download Page', 'Download Mirror']
|
||||||
|
|
||||||
|
# Initialize target_table
|
||||||
|
target_table = None
|
||||||
|
|
||||||
|
# Find the table with the specified headers
|
||||||
|
for table in soup.find_all('table'):
|
||||||
|
ths = table.find_all('th')
|
||||||
|
if len(ths) == 3:
|
||||||
|
th_texts = [th.get_text(strip=True) if th.img is None else (th.img['alt'] if 'alt' in th.img.attrs else '') for th in ths]
|
||||||
|
if all(header_text == header for header_text, header in zip(th_texts, headers)):
|
||||||
|
target_table = table
|
||||||
|
break
|
||||||
|
|
||||||
|
# Check if a matching table was found
|
||||||
|
if target_table is None:
|
||||||
|
print("No table with the specified headers found in README.md")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Get the GitHub ENV variables
|
||||||
|
release_type = os.getenv('RELEASE_TYPE')
|
||||||
|
|
||||||
|
# Define the cell coordinates and corresponding ENV variables for each release type
|
||||||
|
release_types = {
|
||||||
|
'WIF': [
|
||||||
|
((1, 1), 'LINK_FOR_W11X64'),
|
||||||
|
((2, 1), 'LINK_FOR_W11ARM64'),
|
||||||
|
((5, 1), 'LINK_FOR_W10X64')
|
||||||
|
],
|
||||||
|
'retail': [
|
||||||
|
((3, 1), 'LINK_FOR_W11X64'),
|
||||||
|
((4, 1), 'LINK_FOR_W11ARM64'),
|
||||||
|
((6, 1), 'LINK_FOR_W10X64')
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if the release type is valid
|
||||||
|
if release_type not in release_types:
|
||||||
|
print(f"Invalid RELEASE_TYPE: {release_type}")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Create a 2D list (matrix) to represent the table
|
||||||
|
table_matrix = []
|
||||||
|
for _ in range(100): # Assuming the table will not have more than 100 rows
|
||||||
|
table_matrix.append([None] * 100) # Assuming the table will not have more than 100 columns
|
||||||
|
|
||||||
|
# Fill the table matrix with the cells from the table
|
||||||
|
for row_num, row in enumerate(target_table.find_all('tr')):
|
||||||
|
col_num = 0
|
||||||
|
for cell in row.find_all(['td', 'th']):
|
||||||
|
while table_matrix[row_num][col_num] is not None: # Skip columns that are already filled due to rowspan
|
||||||
|
col_num += 1
|
||||||
|
rowspan = int(cell.get('rowspan', 1))
|
||||||
|
colspan = int(cell.get('colspan', 1))
|
||||||
|
for i in range(row_num, row_num + rowspan):
|
||||||
|
for j in range(col_num, col_num + colspan):
|
||||||
|
table_matrix[i][j] = cell
|
||||||
|
|
||||||
|
# Replace the content of the specified cells
|
||||||
|
for (row_num, col_num), env_var in release_types[release_type]:
|
||||||
|
# Check if the cell coordinates are within the range of the table matrix
|
||||||
|
if row_num < len(table_matrix) and col_num < len(table_matrix[row_num]):
|
||||||
|
github_env_var = os.getenv(env_var)
|
||||||
|
if github_env_var is None:
|
||||||
|
print(f"The {env_var} environment variable is not set")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Parse the GitHub ENV variable with BeautifulSoup
|
||||||
|
github_env_var_soup = BeautifulSoup(github_env_var, 'html.parser')
|
||||||
|
|
||||||
|
# Replace the cell content with the GitHub ENV variable
|
||||||
|
target_cell = table_matrix[row_num][col_num]
|
||||||
|
target_cell.clear()
|
||||||
|
target_cell.append(github_env_var_soup)
|
||||||
|
else:
|
||||||
|
print(f"Cell coordinates ({row_num}, {col_num}) are out of range")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Write the updated content back to the README.md file
|
||||||
|
with open('README.md', 'w') as file:
|
||||||
|
file.write(str(soup))
|
41
MagiskOnWSA/Update Check/update-downloadvar.py
Normal file
41
MagiskOnWSA/Update Check/update-downloadvar.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
import os
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
# Load the README.md file
|
||||||
|
with open('README.md', 'r') as file:
|
||||||
|
readme_content = file.read()
|
||||||
|
|
||||||
|
# Parse the content with BeautifulSoup
|
||||||
|
soup = BeautifulSoup(readme_content, 'html.parser')
|
||||||
|
|
||||||
|
# Define the headers to locate the table
|
||||||
|
headers = ['Download Variant', 'Image', 'Image']
|
||||||
|
|
||||||
|
# Initialize target_table
|
||||||
|
target_table = None
|
||||||
|
|
||||||
|
# Find the table with the specified headers
|
||||||
|
for table in soup.find_all('table'):
|
||||||
|
ths = table.find_all('th')
|
||||||
|
if len(ths) == 3:
|
||||||
|
th_texts = [th.get_text(strip=True) if th.img is None else (th.img['alt'] if 'alt' in th.img.attrs else '') for th in ths]
|
||||||
|
if all(header_text == header for header_text, header in zip(th_texts, headers)):
|
||||||
|
target_table = table
|
||||||
|
break
|
||||||
|
|
||||||
|
# Check if a matching table was found
|
||||||
|
if target_table is None:
|
||||||
|
print("No table with the specified headers found in README.md")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Get the GitHub ENV variables
|
||||||
|
github_env_var = os.getenv('TEXT_TO_REPLACE_WITH')
|
||||||
|
row_num = int(os.getenv('ROW_NUM'))
|
||||||
|
col_num = int(os.getenv('COLUMN_NUM'))
|
||||||
|
|
||||||
|
# Replace the cell content with the GitHub ENV variable
|
||||||
|
target_table.find_all('tr')[row_num].find_all('td')[col_num].string = github_env_var
|
||||||
|
|
||||||
|
# Write the updated content back to the README.md file
|
||||||
|
with open('README.md', 'w') as file:
|
||||||
|
file.write(str(soup))
|
18
MagiskOnWSA/Update Check/windows10patch.ps1
Normal file
18
MagiskOnWSA/Update Check/windows10patch.ps1
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
Clear-Host
|
||||||
|
Write-Output "`r`nPatching Windows 10 AppxManifest file..."
|
||||||
|
$xml = [xml](Get-Content '.\AppxManifest.xml')
|
||||||
|
$nsm = New-Object Xml.XmlNamespaceManager($xml.NameTable)
|
||||||
|
$nsm.AddNamespace('rescap', "http://schemas.microsoft.com/appx/manifest/foundation/windows10/restrictedcapabilities")
|
||||||
|
$nsm.AddNamespace('desktop6', "http://schemas.microsoft.com/appx/manifest/desktop/windows10/6")
|
||||||
|
$node = $xml.Package.Capabilities.SelectSingleNode("rescap:Capability[@Name='customInstallActions']", $nsm)
|
||||||
|
$xml.Package.Capabilities.RemoveChild($node) | Out-Null
|
||||||
|
$node = $xml.Package.Extensions.SelectSingleNode("desktop6:Extension[@Category='windows.customInstall']", $nsm)
|
||||||
|
$xml.Package.Extensions.RemoveChild($node) | Out-Null
|
||||||
|
$xml.Package.Dependencies.TargetDeviceFamily.MinVersion = "10.0.19041.264"
|
||||||
|
$xml.Save(".\AppxManifest.xml")
|
||||||
|
Clear-Host
|
||||||
|
Write-Output "`r`nDownloading modifided DLL file..."
|
||||||
|
$ProgressPreference = 'SilentlyContinue'
|
||||||
|
Invoke-WebRequest -Uri "https://github.com/MustardChef/WSAPatch/raw/main/DLLs%20for%20WSABuilds/winhttp.dll" -OutFile "$outputDir\WSAClient\winhttp.dll"
|
||||||
|
Invoke-WebRequest -Uri "https://github.com/MustardChef/WSAPatch/raw/main/DLLs%20for%20WSABuilds/WsaPatch.dll" -OutFile "$outputDir\WSAClient\WsaPatch.dll"
|
||||||
|
Invoke-WebRequest -Uri "https://github.com/MustardChef/WSAPatch/raw/main/DLLs%20for%20WSABuilds/icu.dll" -OutFile "$outputDir\WSAClient\icu.dll"
|
Loading…
Reference in New Issue
Block a user