From 70829f5d67189f280f7e60d7852e617dab790a9e Mon Sep 17 00:00:00 2001 From: Frede Hundewadt Date: Mon, 8 Jan 2024 10:12:10 +0100 Subject: [PATCH] merge development into wonky main --- PKGBUILD | 17 +++ get-iso | 321 +++++++++++++++++++++++++++++++++++++++++++++++ requirements.txt | 1 + 3 files changed, 339 insertions(+) create mode 100644 PKGBUILD create mode 100755 get-iso create mode 100644 requirements.txt diff --git a/PKGBUILD b/PKGBUILD new file mode 100644 index 0000000..0d06ae7 --- /dev/null +++ b/PKGBUILD @@ -0,0 +1,17 @@ +# Maintainer: linux-aarhus + +pkgname='manjaro-get-iso' +pkgver=0.9 +pkgrel=1 +pkgdesc='A tool to download Manjaro ISO' +arch=('any') +url='https://scm.nix.dk/root/manjaro-get-iso' +licnse=('GPL') +depends=('python-requests' 'p7zip') +source=("${url}/archive/v${pkgver}.tar.gz") + +package() { + install -d m755 "$pkgdir/usr/bin" + install -m755 "$srcdir/$pkgname/get-iso" "$pkgdir/usr/bin" +} +sha256sums=('64703dadb058cdb9dc6bfa847259ad42a85a408755352e3837eb6d8187647451') diff --git a/get-iso b/get-iso new file mode 100755 index 0000000..f4ef3b5 --- /dev/null +++ b/get-iso @@ -0,0 +1,321 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# @linux-aarhus - root.nix.dk +# License: GNU GPL, version 3 or later; +import argparse +import os +import requests +import subprocess +import sys +import requests.exceptions +from pathlib import Path + +PROG_VERSION = "0.9" +PROG_NAME = os.path.basename(__file__) +ISO_RELEASE_URL = \ + "https://gitlab.manjaro.org/webpage/iso-info/-/raw/master/file-info.json" +GNU_URL = "https://www.gnu.org/licenses/gpl.html" +REL_REVIEW_URL = \ + "https://api.github.com/repos/manjaro/release-review/releases/latest" +DEV_PREVIEW_URL = \ + "https://api.github.com/repos/manjaro-edition/download/releases/latest" +review_editions = ["gnome", "plasma", "xfce"] +FOLDER = Path.home() + + +def download_file(url: str, folder_name: str) -> bool: + filename: str = url.split("/")[-1] + path = os.path.join("/{}/{}".format(folder_name, filename)) + try: + response = requests.get(url, stream=True) + total_size_in_bytes = int(response.headers.get("content-length", 0)) + block_size = 1024 + if total_size_in_bytes < block_size: + block_size = total_size_in_bytes + with open(path, "wb") as f: + progress = 0 + for data in response.iter_content(block_size): + f.write(data) + if len(data) < block_size: + progress += len(data) + else: + progress += block_size + print(f"Downloading {round(progress/1024/1024)}MiB of " + f"{round(total_size_in_bytes/1024/1024)}MiB", end="\r") + except Exception as e: + print(f"{e}") + return False + return True + + +def get_definitions(url: str) -> dict: + try: + resp = requests.get(url=url, timeout=10) + resp.raise_for_status() + return resp.json() + except Exception as e: + print("Download error", e) + print("Terminated") + sys.exit(1) + + +def init_iso_list(url: str, review: bool = False, preview: bool = False) -> list: + if preview: + return init_developer_preview_list(url) + + if review: + return init_release_review_list(url) + + return init_official_iso_list(url) + + +def init_official_iso_list(url: str) -> list: + data = get_definitions(url) + data_official: dict = data.get("official") + data_community: dict = data.get("community") + init_iso_result = [] + for ok, ov in data_official.items(): + try: + init_iso_result.append({"name": ok, + "full": { + "img": ov["image"], + "sig": ov["signature"] + }, + "minimal": { + "img": ov["minimal"]["image"], + "sig": ov["minimal"]["signature"] + } + }) + except KeyError: + continue + for ck, cv in data_community.items(): + try: + init_iso_result.append({"name": ck, + "full": { + "img": cv["image"], + "sig": cv["signature"] + }, + "minimal": { + "img": cv["minimal"]["image"], + "sig": cv["minimal"]["signature"] + }}) + except KeyError: + continue + return init_iso_result + + +def init_release_review_list(url: str) -> list: + """ + Get data from review endpoint + :param url: + :return: + """ + # from the assets list we want to extract + # the browser_download_url propoerty for each asset + init_review_result = [] + data = get_definitions(url) + data_assets: dict = data.get("assets") + review_list = [] + for asset in data_assets: + review_list.append(asset["browser_download_url"]) + + minimal = "minimal" + sha256sum = ".iso.sha256" + part = ".iso.z" + for edition in review_editions: + if edition == "plasma": + urls = [x for x in review_list if "kde" in x] + else: + urls = [x for x in review_list if edition in x] + + full_iso = [x for x in urls if minimal not in x] + minimal_iso = [x for x in urls if minimal in x] + + f_part = [x for x in full_iso if part in x] + f_256sum = [x for x in full_iso if sha256sum in x] + + m_part = [x for x in minimal_iso if part in x] + m_256sum = [x for x in minimal_iso if sha256sum in x] + + init_review_result.append({"name": edition, + "full": { + "parts": f_part, + "sha": f_256sum[0]}, + "minimal": { + "parts": m_part, + "sha": m_256sum[0]} + }) + + return init_review_result + + +def init_developer_preview_list(url: str) -> list: + """ + Get data from review endpoint + :param url: + :return: + """ + # from the assets list we want to extract + # the browser_download_url propoerty for each asset + init_devel_result = [] + + for edition in review_editions: + # replace edition in generic url + edition_url = url.replace("edition", edition) + data = get_definitions(edition_url) + data_assets: dict = data.get("assets") + dev_list = [] + for asset in data_assets: + dev_list.append(asset["browser_download_url"]) + minimal = "minimal" + sha256sum = ".iso.sha256" + part = ".iso.z" + + full_iso = [x for x in dev_list if minimal not in x] + minimal_iso = [x for x in dev_list if minimal in x] + + f_part = [x for x in full_iso if part in x] + f_256sum = [x for x in full_iso if sha256sum in x] + + m_part = [x for x in minimal_iso if part in x] + m_256sum = [x for x in minimal_iso if sha256sum in x] + + init_devel_result.append({"name": edition, + "full": { + "parts": f_part, + "sha": f_256sum[0]}, + "minimal": { + "parts": m_part, + "sha": m_256sum[0]} + }) + + return init_devel_result + + +def download(url: str, out_dir: str) -> bool: + print(f'Download: {url.split("/")[-1]}') + success = download_file(url, f"{out_dir}") + return success + + +def dir_path(path: str) -> str: + if os.path.isdir(path): + return path + else: + raise NotADirectoryError(path) + + +def main(): + out_dir = FOLDER + iso_files = init_iso_list(ISO_RELEASE_URL, review=False) + choices = [] + for c in iso_files: + choices.append(c["name"]) + parser = argparse.ArgumentParser( + prog=f"{PROG_NAME}", + description="This tool will download a named Manjaro ISO", + epilog=f"{PROG_NAME} v. {PROG_VERSION} - GPL v3 or later <{GNU_URL}>") + parser.add_argument("edition", + type=str, + help="Edition e.g. plasma or xfce.", + choices=choices) + parser.add_argument("-f", "--full", + required=False, + action="store_true", + help="Download full ISO") + parser.add_argument("-o", "--out-dir", + nargs='?', + type=dir_path, + default=FOLDER, + help="Folder to store dowloaded ISO files.") + previews = parser.add_argument_group("Previews") + preview = previews.add_mutually_exclusive_group() + preview.add_argument("-r", "--review", + required=False, + action="store_true", + help="Get Latest Release Review ISO") + preview.add_argument("-p", "--preview", + required=False, + action="store_true", + help="Get Latest Developer Preview ISO") + + args = parser.parse_args() + + if args.out_dir is not None: + out_dir = args.out_dir + + if args.review: + if args.edition in review_editions: + iso_files = init_iso_list(REL_REVIEW_URL, review=True) + else: + print("Invalid review edition. Valid editions: " + + ", ".join(review_editions)) + sys.exit(1) + + if args.preview: + if args.edition in review_editions: + iso_files = init_iso_list(DEV_PREVIEW_URL, preview=True) + else: + print("Invalid review edition. Valid editions: " + + ", ".join(review_editions)) + sys.exit(1) + + if len(iso_files) == 0: + print("Could not get iso file list") + sys.exit(1) + + try: + result = [x for x in iso_files if args.edition == x["name"]] + if args.full: + iso = result[0]["full"] + else: + iso = result[0]["minimal"] + except IndexError: + print("Could not extract edition from data") + sys.exit(1) + + if args.review or args.preview: + sha_result = download(iso["sha"], out_dir=out_dir) + shafile = iso["sha"].split("/")[-1] + isozip = [x for x in iso["parts"] if ".iso.zip" in x] + zipfile = isozip[0].split("/")[-1] + zip_result = False + for part in iso["parts"]: + zip_result = download(part, out_dir=out_dir) + if not zip_result: + break + + if zip_result and sha_result: + subprocess.run(["7z", "-y", "t", f"{zipfile}"], + cwd=f"{out_dir}") + subprocess.run(["7z", "-y", "x", f"{zipfile}"], + cwd=f"{out_dir}") + print("\nWait for checksum to complete ...") + subprocess.run(["sha256sum", "-c", f"{shafile}"], + cwd=f"{out_dir}") + else: + print("Download failied") + sys.exit(1) + + else: + iso_result = download(iso["img"], out_dir=out_dir) + sig_result = download(iso["sig"], out_dir=out_dir) + + if sig_result and iso_result: + print("Wait for verification to complete ...") + subprocess.run(["gpg", "--verify", f'{iso["sig"].split("/")[-1]}'], + cwd=f"{out_dir}") + else: + print("Download failed") + sys.exit(1) + print("Finished downloading ISO to '{}'".format(out_dir)) + sys.exit(0) + + +if __name__ == '__main__': + try: + main() + except KeyboardInterrupt: + print("\n" + "Exit: interrupted by the user.") + sys.exit(1) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..663bd1f --- /dev/null +++ b/requirements.txt @@ -0,0 +1 @@ +requests \ No newline at end of file