2023-12-07 10:50:12 +01:00
|
|
|
#!/usr/bin/env python
|
2023-08-23 07:48:09 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
#
|
|
|
|
# @linux-aarhus - root.nix.dk
|
2023-12-07 10:50:12 +01:00
|
|
|
# License: GNU GPL, version 3 or later; <https://www.gnu.org/licenses/gpl.html>
|
2023-08-23 07:48:09 +02:00
|
|
|
import argparse
|
|
|
|
import os
|
|
|
|
import requests
|
2023-12-07 10:50:12 +01:00
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import requests.exceptions
|
2023-08-23 07:48:09 +02:00
|
|
|
from pathlib import Path
|
|
|
|
|
2023-12-07 13:11:13 +01:00
|
|
|
PROG_VERSION = "0.7"
|
2023-12-07 15:28:05 +01:00
|
|
|
ISO_RELEASE_URL = \
|
2023-12-07 10:50:12 +01:00
|
|
|
"https://gitlab.manjaro.org/webpage/iso-info/-/raw/master/file-info.json"
|
2023-08-23 07:48:09 +02:00
|
|
|
FOLDER = Path.home()
|
2023-08-24 08:41:56 +02:00
|
|
|
PROG_NAME = os.path.basename(__file__)
|
|
|
|
GNU_URL = "https://www.gnu.org/licenses/gpl.html"
|
2023-12-07 15:28:05 +01:00
|
|
|
REL_REVIEW_URL = \
|
2023-12-07 10:50:12 +01:00
|
|
|
"https://api.github.com/repos/manjaro/release-review/releases/latest"
|
2023-12-07 15:28:05 +01:00
|
|
|
DEV_PREVIEW_URL = \
|
2023-12-07 10:50:12 +01:00
|
|
|
"https://api.github.com/repos/manjaro-edition/download/releases/latest"
|
2023-12-07 15:28:05 +01:00
|
|
|
review_editions = ["gnome", "plasma", "xfce"]
|
2023-08-23 07:48:09 +02:00
|
|
|
|
|
|
|
|
|
|
|
def download_file(url: str, folder_name: str) -> bool:
|
|
|
|
filename: str = url.split("/")[-1]
|
|
|
|
path = os.path.join("/{}/{}".format(folder_name, filename))
|
|
|
|
try:
|
|
|
|
response = requests.get(url, stream=True)
|
|
|
|
total_size_in_bytes = int(response.headers.get("content-length", 0))
|
|
|
|
block_size = 1024
|
|
|
|
if total_size_in_bytes < block_size:
|
|
|
|
block_size = total_size_in_bytes
|
|
|
|
with open(path, "wb") as f:
|
|
|
|
progress = 0
|
|
|
|
for data in response.iter_content(block_size):
|
|
|
|
f.write(data)
|
|
|
|
if len(data) < block_size:
|
|
|
|
progress += len(data)
|
|
|
|
else:
|
|
|
|
progress += block_size
|
2023-12-07 10:50:12 +01:00
|
|
|
print(f"Downloading {round(progress/1024/1024)}MiB of "
|
|
|
|
f"{round(total_size_in_bytes/1024/1024)}MiB", end="\r")
|
2023-08-23 07:48:09 +02:00
|
|
|
except Exception as e:
|
2023-12-07 10:50:12 +01:00
|
|
|
print(f"{e}")
|
2023-08-23 07:48:09 +02:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def get_definitions(url: str) -> dict:
|
|
|
|
try:
|
|
|
|
resp = requests.get(url=url, timeout=10)
|
|
|
|
resp.raise_for_status()
|
2023-12-07 15:28:05 +01:00
|
|
|
return resp.json()
|
2023-08-23 07:48:09 +02:00
|
|
|
except Exception as e:
|
2023-12-07 15:28:05 +01:00
|
|
|
print("Download error", e)
|
|
|
|
print("Terminated")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2023-08-23 07:48:09 +02:00
|
|
|
|
2023-12-07 15:28:05 +01:00
|
|
|
def init_iso_list(url: str, review: bool = False, preview: bool = False) -> list:
|
|
|
|
if preview:
|
|
|
|
return init_developer_preview_list(url)
|
2023-08-23 07:48:09 +02:00
|
|
|
|
2023-12-07 10:50:12 +01:00
|
|
|
if review:
|
2023-12-07 15:28:05 +01:00
|
|
|
return init_release_review_list(url)
|
|
|
|
|
2023-12-07 10:50:12 +01:00
|
|
|
return init_official_iso_list(url)
|
|
|
|
|
|
|
|
|
|
|
|
def init_official_iso_list(url: str) -> list:
|
2023-08-23 07:48:09 +02:00
|
|
|
data = get_definitions(url)
|
2023-12-07 10:50:12 +01:00
|
|
|
data_official: dict = data.get("official")
|
|
|
|
data_community: dict = data.get("community")
|
2023-08-23 07:48:09 +02:00
|
|
|
init_iso_result = []
|
|
|
|
for ok, ov in data_official.items():
|
|
|
|
try:
|
|
|
|
init_iso_result.append({"name": ok,
|
2023-12-07 10:50:12 +01:00
|
|
|
"full": {
|
|
|
|
"img": ov["image"],
|
|
|
|
"sig": ov["signature"]
|
|
|
|
},
|
|
|
|
"minimal": {
|
|
|
|
"img": ov["minimal"]["image"],
|
|
|
|
"sig": ov["minimal"]["signature"]
|
|
|
|
}
|
2023-08-23 07:48:09 +02:00
|
|
|
})
|
2023-12-07 10:50:12 +01:00
|
|
|
except KeyError:
|
2023-08-23 07:48:09 +02:00
|
|
|
continue
|
|
|
|
for ck, cv in data_community.items():
|
|
|
|
try:
|
|
|
|
init_iso_result.append({"name": ck,
|
2023-12-07 10:50:12 +01:00
|
|
|
"full": {
|
|
|
|
"img": cv["image"],
|
|
|
|
"sig": cv["signature"]
|
|
|
|
},
|
|
|
|
"minimal": {
|
|
|
|
"img": cv["minimal"]["image"],
|
|
|
|
"sig": cv["minimal"]["signature"]
|
|
|
|
}})
|
|
|
|
except KeyError:
|
2023-08-23 07:48:09 +02:00
|
|
|
continue
|
2023-12-07 10:50:12 +01:00
|
|
|
return init_iso_result
|
|
|
|
|
|
|
|
|
2023-12-07 15:28:05 +01:00
|
|
|
def init_release_review_list(url: str) -> list:
|
2023-12-07 10:50:12 +01:00
|
|
|
"""
|
|
|
|
Get data from review endpoint
|
|
|
|
:param url:
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
# from the assets list we want to extract
|
|
|
|
# the browser_download_url propoerty for each asset
|
2023-12-07 15:28:05 +01:00
|
|
|
init_review_result = []
|
2023-12-07 10:50:12 +01:00
|
|
|
data = get_definitions(url)
|
|
|
|
data_assets: dict = data.get("assets")
|
2023-12-07 15:28:05 +01:00
|
|
|
review_list = []
|
2023-12-07 10:50:12 +01:00
|
|
|
for asset in data_assets:
|
2023-12-07 15:28:05 +01:00
|
|
|
review_list.append(asset["browser_download_url"])
|
2023-12-07 10:50:12 +01:00
|
|
|
|
|
|
|
minimal = "minimal"
|
|
|
|
sha256sum = ".iso.sha256"
|
|
|
|
part = ".iso.z"
|
|
|
|
for edition in review_editions:
|
2023-12-07 15:28:05 +01:00
|
|
|
if edition == "plasma":
|
|
|
|
urls = [x for x in review_list if "kde" in x]
|
|
|
|
else:
|
|
|
|
urls = [x for x in review_list if edition in x]
|
|
|
|
|
2023-12-07 10:50:12 +01:00
|
|
|
full_iso = [x for x in urls if minimal not in x]
|
|
|
|
minimal_iso = [x for x in urls if minimal in x]
|
2023-12-07 15:28:05 +01:00
|
|
|
|
2023-12-07 10:50:12 +01:00
|
|
|
f_part = [x for x in full_iso if part in x]
|
|
|
|
f_256sum = [x for x in full_iso if sha256sum in x]
|
2023-12-07 15:28:05 +01:00
|
|
|
|
2023-12-07 10:50:12 +01:00
|
|
|
m_part = [x for x in minimal_iso if part in x]
|
|
|
|
m_256sum = [x for x in minimal_iso if sha256sum in x]
|
|
|
|
|
2023-12-07 15:28:05 +01:00
|
|
|
init_review_result.append({"name": edition,
|
|
|
|
"full": {
|
|
|
|
"parts": f_part,
|
|
|
|
"sha": f_256sum[0]},
|
|
|
|
"minimal": {
|
|
|
|
"parts": m_part,
|
|
|
|
"sha": m_256sum[0]}
|
|
|
|
})
|
|
|
|
|
|
|
|
return init_review_result
|
2023-12-07 10:50:12 +01:00
|
|
|
|
|
|
|
|
2023-12-07 15:28:05 +01:00
|
|
|
def init_developer_preview_list(url: str) -> list:
|
2023-12-07 10:50:12 +01:00
|
|
|
"""
|
|
|
|
Get data from review endpoint
|
|
|
|
:param url:
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
# from the assets list we want to extract
|
|
|
|
# the browser_download_url propoerty for each asset
|
2023-12-07 15:28:05 +01:00
|
|
|
init_devel_result = []
|
|
|
|
|
2023-12-07 10:50:12 +01:00
|
|
|
for edition in review_editions:
|
2023-12-07 15:28:05 +01:00
|
|
|
# replace edition in generic url
|
|
|
|
edition_url = url.replace("edition", edition)
|
|
|
|
data = get_definitions(edition_url)
|
2023-12-07 10:50:12 +01:00
|
|
|
data_assets: dict = data.get("assets")
|
2023-12-07 15:28:05 +01:00
|
|
|
dev_list = []
|
2023-12-07 10:50:12 +01:00
|
|
|
for asset in data_assets:
|
2023-12-07 15:28:05 +01:00
|
|
|
dev_list.append(asset["browser_download_url"])
|
2023-12-07 10:50:12 +01:00
|
|
|
minimal = "minimal"
|
|
|
|
sha256sum = ".iso.sha256"
|
|
|
|
part = ".iso.z"
|
|
|
|
|
2023-12-07 15:28:05 +01:00
|
|
|
full_iso = [x for x in dev_list if minimal not in x]
|
|
|
|
minimal_iso = [x for x in dev_list if minimal in x]
|
|
|
|
|
2023-12-07 10:50:12 +01:00
|
|
|
f_part = [x for x in full_iso if part in x]
|
|
|
|
f_256sum = [x for x in full_iso if sha256sum in x]
|
2023-12-07 15:28:05 +01:00
|
|
|
|
2023-12-07 10:50:12 +01:00
|
|
|
m_part = [x for x in minimal_iso if part in x]
|
|
|
|
m_256sum = [x for x in minimal_iso if sha256sum in x]
|
|
|
|
|
2023-12-07 15:28:05 +01:00
|
|
|
init_devel_result.append({"name": edition,
|
|
|
|
"full": {
|
|
|
|
"parts": f_part,
|
|
|
|
"sha": f_256sum[0]},
|
|
|
|
"minimal": {
|
|
|
|
"parts": m_part,
|
|
|
|
"sha": m_256sum[0]}
|
|
|
|
})
|
2023-08-23 07:48:09 +02:00
|
|
|
|
2023-12-07 15:28:05 +01:00
|
|
|
return init_devel_result
|
2023-08-23 07:48:09 +02:00
|
|
|
|
|
|
|
|
|
|
|
def download(url: str) -> bool:
|
2023-12-07 10:50:12 +01:00
|
|
|
print(f'Download: {url.split("/")[-1]}')
|
2023-08-23 07:48:09 +02:00
|
|
|
success = download_file(url, f"{FOLDER}")
|
|
|
|
return success
|
|
|
|
|
|
|
|
|
2023-08-24 08:41:56 +02:00
|
|
|
def main():
|
2023-12-07 15:28:05 +01:00
|
|
|
iso_files = init_iso_list(ISO_RELEASE_URL, review=False)
|
2023-08-23 07:48:09 +02:00
|
|
|
choices = []
|
|
|
|
for c in iso_files:
|
|
|
|
choices.append(c["name"])
|
|
|
|
parser = argparse.ArgumentParser(
|
2023-08-24 08:41:56 +02:00
|
|
|
prog=f"{PROG_NAME}",
|
2023-12-07 10:50:12 +01:00
|
|
|
description="This tool will download a named Manjaro ISO",
|
2023-08-24 08:41:56 +02:00
|
|
|
epilog=f"{PROG_NAME} v. {PROG_VERSION} - GPL v3 or later <{GNU_URL}>")
|
2023-08-23 07:48:09 +02:00
|
|
|
parser.add_argument("edition",
|
|
|
|
type=str,
|
2023-12-07 10:50:12 +01:00
|
|
|
help="Edition e.g. plasma or xfce.",
|
2023-08-23 07:48:09 +02:00
|
|
|
choices=choices)
|
|
|
|
parser.add_argument("-f", "--full",
|
|
|
|
required=False,
|
|
|
|
action="store_true",
|
|
|
|
help="Download full ISO")
|
2023-12-07 10:50:12 +01:00
|
|
|
previews = parser.add_argument_group("Previews")
|
|
|
|
preview = previews.add_mutually_exclusive_group()
|
|
|
|
preview.add_argument("-r", "--review",
|
|
|
|
required=False,
|
|
|
|
action="store_true",
|
|
|
|
help="Get Latest Release Review ISO")
|
2023-12-17 09:15:39 +01:00
|
|
|
preview.add_argument("-p", "--preview",
|
2023-12-07 10:50:12 +01:00
|
|
|
required=False,
|
|
|
|
action="store_true",
|
|
|
|
help="Get Latest Developer Preview ISO")
|
|
|
|
|
2023-08-23 07:48:09 +02:00
|
|
|
args = parser.parse_args()
|
2023-12-07 15:28:05 +01:00
|
|
|
if args.review:
|
2023-12-07 10:50:12 +01:00
|
|
|
if args.edition in review_editions:
|
2023-12-07 15:28:05 +01:00
|
|
|
iso_files = init_iso_list(REL_REVIEW_URL, review=True)
|
2023-12-07 10:50:12 +01:00
|
|
|
else:
|
|
|
|
print("Invalid review edition. Valid editions: " +
|
|
|
|
", ".join(review_editions))
|
|
|
|
sys.exit(1)
|
|
|
|
|
2023-12-17 09:25:05 +01:00
|
|
|
if args.preview:
|
2023-12-07 10:50:12 +01:00
|
|
|
if args.edition in review_editions:
|
2023-12-07 15:28:05 +01:00
|
|
|
iso_files = init_iso_list(DEV_PREVIEW_URL, preview=True)
|
2023-12-07 10:50:12 +01:00
|
|
|
else:
|
|
|
|
print("Invalid review edition. Valid editions: " +
|
|
|
|
", ".join(review_editions))
|
|
|
|
sys.exit(1)
|
|
|
|
|
2023-12-07 15:28:05 +01:00
|
|
|
if len(iso_files) == 0:
|
|
|
|
print("Could not get iso file list")
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
try:
|
|
|
|
result = [x for x in iso_files if args.edition == x["name"]]
|
|
|
|
if args.full:
|
|
|
|
iso = result[0]["full"]
|
|
|
|
else:
|
|
|
|
iso = result[0]["minimal"]
|
|
|
|
except IndexError:
|
|
|
|
print("Could not extract edition from data")
|
|
|
|
sys.exit(1)
|
2023-12-07 10:50:12 +01:00
|
|
|
|
2023-12-17 09:25:05 +01:00
|
|
|
if args.review or args.preview:
|
2023-12-07 15:28:05 +01:00
|
|
|
sha_result = download(iso["sha"])
|
|
|
|
shafile = iso["sha"].split("/")[-1]
|
|
|
|
isozip = [x for x in iso["parts"] if ".iso.zip" in x]
|
|
|
|
zipfile = isozip[0].split("/")[-1]
|
2023-12-07 10:50:12 +01:00
|
|
|
zip_result = False
|
2023-12-07 15:28:05 +01:00
|
|
|
for part in iso["parts"]:
|
2023-12-07 10:50:12 +01:00
|
|
|
zip_result = download(part)
|
|
|
|
if not zip_result:
|
|
|
|
break
|
|
|
|
|
|
|
|
if zip_result and sha_result:
|
2023-12-07 15:28:05 +01:00
|
|
|
subprocess.run(["7z", "-y", "t", f"{zipfile}"],
|
2023-12-07 10:50:12 +01:00
|
|
|
cwd=f"{FOLDER}")
|
2023-12-07 15:28:05 +01:00
|
|
|
subprocess.run(["7z", "-y", "x", f"{zipfile}"],
|
2023-12-07 10:50:12 +01:00
|
|
|
cwd=f"{FOLDER}")
|
|
|
|
print("\nWait for checksum to complete ...")
|
2023-12-07 15:28:05 +01:00
|
|
|
subprocess.run(["sha256sum", "-c", f"{shafile}"],
|
2023-12-07 10:50:12 +01:00
|
|
|
cwd=f"{FOLDER}")
|
2023-08-23 07:48:09 +02:00
|
|
|
else:
|
2023-12-07 10:50:12 +01:00
|
|
|
print("Download failied")
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
else:
|
2023-08-23 07:48:09 +02:00
|
|
|
iso_result = download(iso["img"])
|
|
|
|
sig_result = download(iso["sig"])
|
2023-12-07 10:50:12 +01:00
|
|
|
|
2023-08-23 07:48:09 +02:00
|
|
|
if sig_result and iso_result:
|
2023-12-07 10:50:12 +01:00
|
|
|
print("Wait for verification to complete ...")
|
|
|
|
subprocess.run(["gpg", "--verify", f'{iso["sig"].split("/")[-1]}'],
|
|
|
|
cwd=f"{FOLDER}")
|
2023-08-23 07:48:09 +02:00
|
|
|
else:
|
2023-12-07 10:50:12 +01:00
|
|
|
print("Download failed")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2023-08-23 07:48:09 +02:00
|
|
|
sys.exit(0)
|
2023-08-24 08:41:56 +02:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
try:
|
|
|
|
main()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
print("\n" + "Exit: interrupted by the user.")
|
|
|
|
sys.exit(1)
|