manjaro-get-iso/get-iso

285 lines
9.5 KiB
Text
Raw Normal View History

#!/usr/bin/env python
2023-08-23 07:48:09 +02:00
# -*- coding: utf-8 -*-
#
# @linux-aarhus - root.nix.dk
# License: GNU GPL, version 3 or later; <https://www.gnu.org/licenses/gpl.html>
2023-08-23 07:48:09 +02:00
import argparse
import os
import requests
import subprocess
import sys
2023-08-23 07:48:09 +02:00
import time
import requests.exceptions
2023-08-23 07:48:09 +02:00
from pathlib import Path
from pprint import pprint
2023-08-23 07:48:09 +02:00
DEF_URL = \
"https://gitlab.manjaro.org/webpage/iso-info/-/raw/master/file-info.json"
2023-08-23 07:48:09 +02:00
FOLDER = Path.home()
PROG_NAME = os.path.basename(__file__)
PROG_VERSION = "0.6"
GNU_URL = "https://www.gnu.org/licenses/gpl.html"
REVIEW_URL = \
"https://api.github.com/repos/manjaro/release-review/releases/latest"
REVIEW_DEV_URL = \
"https://api.github.com/repos/manjaro-edition/download/releases/latest"
review_editions = ["gnome", "kde", "xfce"]
2023-08-23 07:48:09 +02:00
def download_file(url: str, folder_name: str) -> bool:
filename: str = url.split("/")[-1]
path = os.path.join("/{}/{}".format(folder_name, filename))
try:
response = requests.get(url, stream=True)
total_size_in_bytes = int(response.headers.get("content-length", 0))
block_size = 1024
if total_size_in_bytes < block_size:
block_size = total_size_in_bytes
with open(path, "wb") as f:
progress = 0
for data in response.iter_content(block_size):
f.write(data)
if len(data) < block_size:
progress += len(data)
else:
progress += block_size
print(f"Downloading {round(progress/1024/1024)}MiB of "
f"{round(total_size_in_bytes/1024/1024)}MiB", end="\r")
2023-08-23 07:48:09 +02:00
except Exception as e:
print(f"{e}")
2023-08-23 07:48:09 +02:00
return False
return True
def get_definitions(url: str) -> dict:
iso_def = {}
try:
resp = requests.get(url=url, timeout=10)
resp.raise_for_status()
iso_def = resp.json()
except Exception as e:
print(f"{e}")
return iso_def
def init_iso_list(url: str, review: bool = False, developer: bool = False) -> list:
if review:
return init_review_iso_list(url)
if developer:
return init_dev_preview_iso_list(url)
return init_official_iso_list(url)
def init_official_iso_list(url: str) -> list:
2023-08-23 07:48:09 +02:00
data = get_definitions(url)
data_official: dict = data.get("official")
data_community: dict = data.get("community")
2023-08-23 07:48:09 +02:00
init_iso_result = []
for ok, ov in data_official.items():
try:
init_iso_result.append({"name": ok,
"full": {
"img": ov["image"],
"sig": ov["signature"]
},
"minimal": {
"img": ov["minimal"]["image"],
"sig": ov["minimal"]["signature"]
}
2023-08-23 07:48:09 +02:00
})
except KeyError:
2023-08-23 07:48:09 +02:00
continue
for ck, cv in data_community.items():
try:
init_iso_result.append({"name": ck,
"full": {
"img": cv["image"],
"sig": cv["signature"]
},
"minimal": {
"img": cv["minimal"]["image"],
"sig": cv["minimal"]["signature"]
}})
except KeyError:
2023-08-23 07:48:09 +02:00
continue
return init_iso_result
def init_review_iso_list(url: str) -> list:
"""
Get data from review endpoint
:param url:
:return:
"""
# from the assets list we want to extract
# the browser_download_url propoerty for each asset
init_iso_result = []
data = get_definitions(url)
data_assets: dict = data.get("assets")
url_list = []
for asset in data_assets:
url_list.append(asset["browser_download_url"])
minimal = "minimal"
sha256sum = ".iso.sha256"
part = ".iso.z"
for edition in review_editions:
urls = [x for x in url_list if edition in x]
full_iso = [x for x in urls if minimal not in x]
minimal_iso = [x for x in urls if minimal in x]
f_part = [x for x in full_iso if part in x]
f_256sum = [x for x in full_iso if sha256sum in x]
m_part = [x for x in minimal_iso if part in x]
m_256sum = [x for x in minimal_iso if sha256sum in x]
result = {"name": edition,
"full": {"img": f_part, "shasum": f_256sum[0]},
"minimal": {"img": m_part, "shasum": m_256sum[0]}}
init_iso_result.append(result)
return init_iso_result
def init_dev_preview_iso_list(url: str) -> list:
"""
Get data from review endpoint
:param url:
:return:
"""
# from the assets list we want to extract
# the browser_download_url propoerty for each asset
init_iso_result = []
for edition in review_editions:
if edition == "kde":
edition = "plasma"
data = get_definitions(url.replace("edition", edition))
data_assets: dict = data.get("assets")
url_list = []
for asset in data_assets:
url_list.append(asset["browser_download_url"])
minimal = "minimal"
sha256sum = ".iso.sha256"
part = ".iso.z"
full_iso = [x for x in url_list if minimal not in x]
minimal_iso = [x for x in url_list if minimal in x]
f_part = [x for x in full_iso if part in x]
f_256sum = [x for x in full_iso if sha256sum in x]
m_part = [x for x in minimal_iso if part in x]
m_256sum = [x for x in minimal_iso if sha256sum in x]
result = {"name": edition,
"full": {"img": f_part, "shasum": f_256sum[0]},
"minimal": {"img": m_part, "shasum": m_256sum[0]}
}
init_iso_result.append(result)
2023-08-23 07:48:09 +02:00
return init_iso_result
def download(url: str) -> bool:
print(f'Download: {url.split("/")[-1]}')
2023-08-23 07:48:09 +02:00
success = download_file(url, f"{FOLDER}")
return success
def main():
iso_files = init_iso_list(DEF_URL, review=False)
2023-08-23 07:48:09 +02:00
choices = []
for c in iso_files:
choices.append(c["name"])
parser = argparse.ArgumentParser(
prog=f"{PROG_NAME}",
description="This tool will download a named Manjaro ISO",
epilog=f"{PROG_NAME} v. {PROG_VERSION} - GPL v3 or later <{GNU_URL}>")
2023-08-23 07:48:09 +02:00
parser.add_argument("edition",
type=str,
help="Edition e.g. plasma or xfce.",
2023-08-23 07:48:09 +02:00
choices=choices)
parser.add_argument("-f", "--full",
required=False,
action="store_true",
help="Download full ISO")
previews = parser.add_argument_group("Previews")
preview = previews.add_mutually_exclusive_group()
preview.add_argument("-r", "--review",
required=False,
action="store_true",
help="Get Latest Release Review ISO")
preview.add_argument("-d", "--development",
required=False,
action="store_true",
help="Get Latest Developer Preview ISO")
2023-08-23 07:48:09 +02:00
args = parser.parse_args()
if args.review is not None:
if args.edition == "plasma":
args.edition = "kde"
if args.edition in review_editions:
iso_files = init_iso_list(REVIEW_URL, review=True)
else:
print("Invalid review edition. Valid editions: " +
", ".join(review_editions))
sys.exit(1)
if args.development is not None:
if args.edition in review_editions:
iso_files = init_iso_list(REVIEW_DEV_URL, developer=True)
else:
print("Invalid review edition. Valid editions: " +
", ".join(review_editions))
sys.exit(1)
2023-08-23 07:48:09 +02:00
edition = [x for x in iso_files if args.edition == x["name"]]
if args.full:
iso = edition[0]["full"]
else:
iso = edition[0]["minimal"]
if args.review or args.development:
sha_result = download(iso["shasum"])
shaname = iso["shasum"].split("/")[-1]
isozip = [x for x in iso["img"] if ".iso.zip" in x]
zipname = isozip[0].split("/")[-1]
zip_result = False
for part in iso["img"]:
zip_result = download(part)
if not zip_result:
break
if zip_result and sha_result:
subprocess.run(["7z", "-y", "t", f"{zipname}"],
cwd=f"{FOLDER}")
subprocess.run(["7z", "-y", "x", f"{zipname}"],
cwd=f"{FOLDER}")
print("\nWait for checksum to complete ...")
subprocess.run(["sha256sum", "-c", f"{shaname}"],
cwd=f"{FOLDER}")
2023-08-23 07:48:09 +02:00
else:
print("Download failied")
sys.exit(1)
else:
2023-08-23 07:48:09 +02:00
iso_result = download(iso["img"])
sig_result = download(iso["sig"])
2023-08-23 07:48:09 +02:00
if sig_result and iso_result:
print("Wait for verification to complete ...")
2023-08-23 07:48:09 +02:00
time.sleep(5)
subprocess.run(["gpg", "--verify", f'{iso["sig"].split("/")[-1]}'],
cwd=f"{FOLDER}")
2023-08-23 07:48:09 +02:00
else:
print("Download failed")
sys.exit(1)
2023-08-23 07:48:09 +02:00
sys.exit(0)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print("\n" + "Exit: interrupted by the user.")
sys.exit(1)