rework fetching due to kde profile name substituted by plasma
This commit is contained in:
parent
5f2af60c53
commit
f484bc06ce
1 changed files with 82 additions and 62 deletions
140
get-iso
140
get-iso
|
@ -8,21 +8,20 @@ import os
|
|||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import requests.exceptions
|
||||
from pathlib import Path
|
||||
|
||||
PROG_VERSION = "0.7"
|
||||
DEF_URL = \
|
||||
ISO_RELEASE_URL = \
|
||||
"https://gitlab.manjaro.org/webpage/iso-info/-/raw/master/file-info.json"
|
||||
FOLDER = Path.home()
|
||||
PROG_NAME = os.path.basename(__file__)
|
||||
GNU_URL = "https://www.gnu.org/licenses/gpl.html"
|
||||
REVIEW_URL = \
|
||||
REL_REVIEW_URL = \
|
||||
"https://api.github.com/repos/manjaro/release-review/releases/latest"
|
||||
REVIEW_DEV_URL = \
|
||||
DEV_PREVIEW_URL = \
|
||||
"https://api.github.com/repos/manjaro-edition/download/releases/latest"
|
||||
review_editions = ["gnome", "kde", "xfce"]
|
||||
review_editions = ["gnome", "plasma", "xfce"]
|
||||
|
||||
|
||||
def download_file(url: str, folder_name: str) -> bool:
|
||||
|
@ -51,21 +50,23 @@ def download_file(url: str, folder_name: str) -> bool:
|
|||
|
||||
|
||||
def get_definitions(url: str) -> dict:
|
||||
iso_def = {}
|
||||
try:
|
||||
resp = requests.get(url=url, timeout=10)
|
||||
resp.raise_for_status()
|
||||
iso_def = resp.json()
|
||||
return resp.json()
|
||||
except Exception as e:
|
||||
print(f"{e}")
|
||||
return iso_def
|
||||
print("Download error", e)
|
||||
print("Terminated")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def init_iso_list(url: str, review: bool = False, developer: bool = False) -> list:
|
||||
def init_iso_list(url: str, review: bool = False, preview: bool = False) -> list:
|
||||
if preview:
|
||||
return init_developer_preview_list(url)
|
||||
|
||||
if review:
|
||||
return init_review_iso_list(url)
|
||||
if developer:
|
||||
return init_dev_preview_iso_list(url)
|
||||
return init_release_review_list(url)
|
||||
|
||||
return init_official_iso_list(url)
|
||||
|
||||
|
||||
|
@ -104,7 +105,7 @@ def init_official_iso_list(url: str) -> list:
|
|||
return init_iso_result
|
||||
|
||||
|
||||
def init_review_iso_list(url: str) -> list:
|
||||
def init_release_review_list(url: str) -> list:
|
||||
"""
|
||||
Get data from review endpoint
|
||||
:param url:
|
||||
|
@ -112,33 +113,44 @@ def init_review_iso_list(url: str) -> list:
|
|||
"""
|
||||
# from the assets list we want to extract
|
||||
# the browser_download_url propoerty for each asset
|
||||
init_iso_result = []
|
||||
init_review_result = []
|
||||
data = get_definitions(url)
|
||||
data_assets: dict = data.get("assets")
|
||||
url_list = []
|
||||
review_list = []
|
||||
for asset in data_assets:
|
||||
url_list.append(asset["browser_download_url"])
|
||||
review_list.append(asset["browser_download_url"])
|
||||
|
||||
minimal = "minimal"
|
||||
sha256sum = ".iso.sha256"
|
||||
part = ".iso.z"
|
||||
for edition in review_editions:
|
||||
urls = [x for x in url_list if edition in x]
|
||||
if edition == "plasma":
|
||||
urls = [x for x in review_list if "kde" in x]
|
||||
else:
|
||||
urls = [x for x in review_list if edition in x]
|
||||
|
||||
full_iso = [x for x in urls if minimal not in x]
|
||||
minimal_iso = [x for x in urls if minimal in x]
|
||||
|
||||
f_part = [x for x in full_iso if part in x]
|
||||
f_256sum = [x for x in full_iso if sha256sum in x]
|
||||
|
||||
m_part = [x for x in minimal_iso if part in x]
|
||||
m_256sum = [x for x in minimal_iso if sha256sum in x]
|
||||
result = {"name": edition,
|
||||
"full": {"img": f_part, "shasum": f_256sum[0]},
|
||||
"minimal": {"img": m_part, "shasum": m_256sum[0]}}
|
||||
init_iso_result.append(result)
|
||||
|
||||
return init_iso_result
|
||||
init_review_result.append({"name": edition,
|
||||
"full": {
|
||||
"parts": f_part,
|
||||
"sha": f_256sum[0]},
|
||||
"minimal": {
|
||||
"parts": m_part,
|
||||
"sha": m_256sum[0]}
|
||||
})
|
||||
|
||||
return init_review_result
|
||||
|
||||
|
||||
def init_dev_preview_iso_list(url: str) -> list:
|
||||
def init_developer_preview_list(url: str) -> list:
|
||||
"""
|
||||
Get data from review endpoint
|
||||
:param url:
|
||||
|
@ -146,33 +158,39 @@ def init_dev_preview_iso_list(url: str) -> list:
|
|||
"""
|
||||
# from the assets list we want to extract
|
||||
# the browser_download_url propoerty for each asset
|
||||
init_iso_result = []
|
||||
init_devel_result = []
|
||||
|
||||
for edition in review_editions:
|
||||
if edition == "kde":
|
||||
edition = "plasma"
|
||||
data = get_definitions(url.replace("edition", edition))
|
||||
# replace edition in generic url
|
||||
edition_url = url.replace("edition", edition)
|
||||
data = get_definitions(edition_url)
|
||||
data_assets: dict = data.get("assets")
|
||||
url_list = []
|
||||
dev_list = []
|
||||
for asset in data_assets:
|
||||
url_list.append(asset["browser_download_url"])
|
||||
dev_list.append(asset["browser_download_url"])
|
||||
minimal = "minimal"
|
||||
sha256sum = ".iso.sha256"
|
||||
part = ".iso.z"
|
||||
|
||||
full_iso = [x for x in url_list if minimal not in x]
|
||||
minimal_iso = [x for x in url_list if minimal in x]
|
||||
full_iso = [x for x in dev_list if minimal not in x]
|
||||
minimal_iso = [x for x in dev_list if minimal in x]
|
||||
|
||||
f_part = [x for x in full_iso if part in x]
|
||||
f_256sum = [x for x in full_iso if sha256sum in x]
|
||||
|
||||
m_part = [x for x in minimal_iso if part in x]
|
||||
m_256sum = [x for x in minimal_iso if sha256sum in x]
|
||||
result = {"name": edition,
|
||||
"full": {"img": f_part, "shasum": f_256sum[0]},
|
||||
"minimal": {"img": m_part, "shasum": m_256sum[0]}
|
||||
}
|
||||
|
||||
init_iso_result.append(result)
|
||||
init_devel_result.append({"name": edition,
|
||||
"full": {
|
||||
"parts": f_part,
|
||||
"sha": f_256sum[0]},
|
||||
"minimal": {
|
||||
"parts": m_part,
|
||||
"sha": m_256sum[0]}
|
||||
})
|
||||
|
||||
return init_iso_result
|
||||
return init_devel_result
|
||||
|
||||
|
||||
def download(url: str) -> bool:
|
||||
|
@ -182,7 +200,7 @@ def download(url: str) -> bool:
|
|||
|
||||
|
||||
def main():
|
||||
iso_files = init_iso_list(DEF_URL, review=False)
|
||||
iso_files = init_iso_list(ISO_RELEASE_URL, review=False)
|
||||
choices = []
|
||||
for c in iso_files:
|
||||
choices.append(c["name"])
|
||||
|
@ -210,51 +228,54 @@ def main():
|
|||
help="Get Latest Developer Preview ISO")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.review is not None:
|
||||
if args.edition == "plasma":
|
||||
args.edition = "kde"
|
||||
if args.review:
|
||||
if args.edition in review_editions:
|
||||
iso_files = init_iso_list(REVIEW_URL, review=True)
|
||||
iso_files = init_iso_list(REL_REVIEW_URL, review=True)
|
||||
else:
|
||||
print("Invalid review edition. Valid editions: " +
|
||||
", ".join(review_editions))
|
||||
sys.exit(1)
|
||||
|
||||
if args.development is not None:
|
||||
if args.edition == "plasma":
|
||||
args.edition = "kde"
|
||||
if args.development:
|
||||
if args.edition in review_editions:
|
||||
iso_files = init_iso_list(REVIEW_DEV_URL, developer=True)
|
||||
iso_files = init_iso_list(DEV_PREVIEW_URL, preview=True)
|
||||
else:
|
||||
print("Invalid review edition. Valid editions: " +
|
||||
", ".join(review_editions))
|
||||
sys.exit(1)
|
||||
|
||||
edition = [x for x in iso_files if args.edition == x["name"]]
|
||||
if len(iso_files) == 0:
|
||||
print("Could not get iso file list")
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
result = [x for x in iso_files if args.edition == x["name"]]
|
||||
if args.full:
|
||||
iso = edition[0]["full"]
|
||||
iso = result[0]["full"]
|
||||
else:
|
||||
iso = edition[0]["minimal"]
|
||||
iso = result[0]["minimal"]
|
||||
except IndexError:
|
||||
print("Could not extract edition from data")
|
||||
sys.exit(1)
|
||||
|
||||
if args.review or args.development:
|
||||
sha_result = download(iso["shasum"])
|
||||
shaname = iso["shasum"].split("/")[-1]
|
||||
isozip = [x for x in iso["img"] if ".iso.zip" in x]
|
||||
zipname = isozip[0].split("/")[-1]
|
||||
sha_result = download(iso["sha"])
|
||||
shafile = iso["sha"].split("/")[-1]
|
||||
isozip = [x for x in iso["parts"] if ".iso.zip" in x]
|
||||
zipfile = isozip[0].split("/")[-1]
|
||||
zip_result = False
|
||||
for part in iso["img"]:
|
||||
for part in iso["parts"]:
|
||||
zip_result = download(part)
|
||||
if not zip_result:
|
||||
break
|
||||
|
||||
if zip_result and sha_result:
|
||||
subprocess.run(["7z", "-y", "t", f"{zipname}"],
|
||||
subprocess.run(["7z", "-y", "t", f"{zipfile}"],
|
||||
cwd=f"{FOLDER}")
|
||||
subprocess.run(["7z", "-y", "x", f"{zipname}"],
|
||||
subprocess.run(["7z", "-y", "x", f"{zipfile}"],
|
||||
cwd=f"{FOLDER}")
|
||||
print("\nWait for checksum to complete ...")
|
||||
subprocess.run(["sha256sum", "-c", f"{shaname}"],
|
||||
subprocess.run(["sha256sum", "-c", f"{shafile}"],
|
||||
cwd=f"{FOLDER}")
|
||||
else:
|
||||
print("Download failied")
|
||||
|
@ -266,7 +287,6 @@ def main():
|
|||
|
||||
if sig_result and iso_result:
|
||||
print("Wait for verification to complete ...")
|
||||
time.sleep(5)
|
||||
subprocess.run(["gpg", "--verify", f'{iso["sig"].split("/")[-1]}'],
|
||||
cwd=f"{FOLDER}")
|
||||
else:
|
||||
|
|
Loading…
Reference in a new issue