diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c2a5246 --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +/__pypackages__ +regolith_de.egg-info +/*.deb +/PKGBUILD.new +/.pdm.toml +/*.pickle +/*.pkg.tar +/pkg diff --git a/README.md b/README.md index 750812d..b7639e0 100644 --- a/README.md +++ b/README.md @@ -4,15 +4,15 @@ Standalone Regolith desktop environment for Arch Linux -This is a PKGBUILD for Regolith Linux's fork of the i3 (plus Gnome-flashback) window manager/desktop environment, to rather hackily rip the desktop environment for Regolith Linux and make it work on Arch Linux and it's derivatives. +This is a PKGBUILD for Regolith Linux's fork of the i3 (plus Gnome-flashback) window manager/desktop environment, to rather hackily rip the desktop environment for Regolith Linux and make it work on Arch Linux and it's derivatives. I will check weekly for upstream updates to the launchpad packages, which is honestly probably way more often than necessary - Regolith is based on Ubuntu after all, and so it doesn't update its DE packages at the same rate as we're used to on Arch. Where possible (more accurately, where I've found possible), Arch/AUR packages will be used if compatible versions exist. Currently Arch's i3-gaps, rofi, and some other packages are being used. -Because of the way AUR packages work, combined with Ubuntu-specific idiosyncracies in the original config file for Regolith's i3, I've had to bundle the regolith build of st - the simple terminal, and use that as the default terminal. Feel free to change it in the config file (`/etc/regolith/i3/config`) just like you would with regular i3, it's just the only other option was for there to be no default terminal, or else try and choose one that some people will have and others won't. I get around this by just including the terminal, and luckily it's st so it's really small. It's its own package, so you can also remove it (`sudo pacman -R regolith-st`), it's not a dependency of anything, it just gets installed with the meta-package. +Because of the way AUR packages work, combined with Ubuntu-specific idiosyncracies in the original config file for Regolith's i3, I've had to bundle the regolith build of st - the simple terminal, and use that as the default terminal. Feel free to change it in the config file (`/etc/regolith/i3/config`) just like you would with regular i3, it's just the only other option was for there to be no default terminal, or else try and choose one that some people will have and others won't. I get around this by just including the terminal, and luckily it's st so it's really small. It's its own package, so you can also remove it (`sudo pacman -R regolith-st`), it's not a dependency of anything, it just gets installed with the meta-package. -As of the latest update to Regolith proper, it seems that my package no longer conflicts with gnome-shell or GDM. If you have any issues let me know. +As of the latest update to Regolith proper, it seems that my package no longer conflicts with gnome-shell or GDM. If you have any issues let me know. ## Contributing @@ -59,20 +59,23 @@ PRs are welcome. This will build and install regolith-i3, regolith-i3xrocks, regolith-desktop-config, regolith-st, and regolith-styles, the five packages that make up the entire desktop environment (along with their dependencies). ## Looks/Styles - Regolith has a pretty cool (IMO) way of styles/theming, and I've kept all that intact. - - - You can run `regolith-look` to get a list of commands, but basically, `regolith-look stage` will do the initial setup of copying the regolith and Xresouces files to your user directory (in their own, independent locations, so they will NOT overwrite ~/.Xresources or ~/.config/i3/config, they will go in ~/.Xresources-regolith and ~/.config/regolith/i3/config). - + Regolith has a pretty cool (IMO) way of styles/theming, and I've kept all that intact. + + - You can run `regolith-look` to get a list of commands, but basically, `regolith-look stage` will do the initial setup of copying the regolith and Xresouces files to your user directory (in their own, independent locations, so they will NOT overwrite ~/.Xresources or ~/.config/i3/config, they will go in ~/.Xresources-regolith and ~/.config/regolith/i3/config). + - To set your look, run `regolith look set `, from the list of style directories in /etc/regolith/styles (also can be retrieved with `regolith-look list`), such as cahuella, lascaille (the default), ayu, ayu-dark, pop-os, ubuntu, etc. - - - `regolith-look refresh` will refresh it for your current session, changing the terminal theme, i3xrocks theme, and wallpaper (for the styles that have their own wallpaper). It's pretty simple. + + - `regolith-look refresh` will refresh it for your current session, changing the terminal theme, i3xrocks theme, and wallpaper (for the styles that have their own wallpaper). It's pretty simple. Note: VMs generally don't play well with picom/compton compositing. If you are running Regolith in a VM and have any issues with performance, make sure to kill the compositor. +## Refreshing the PKGBUILD + +The repo contains a Python script you can use to refresh packages versions in the PKGBUILD. You will need [pdm](https://pdm.fming.dev/) - `pdm sync` will install dependencies. Run `./pull-upstream.py --help` for all the details. ## Credits -Credit to Kevin Gilmer @kgilmer for the creation of Regolith Linux as well as invaluable insight during the creation of this PKGBUILD. -Pull requests are welcome, the number of packages here is enormous (it is a full desktop environment, after all), and this is my first software/package management project of any kind. +Credit to Kevin Gilmer @kgilmer for the creation of Regolith Linux as well as invaluable insight during the creation of this PKGBUILD. +Pull requests are welcome, the number of packages here is enormous (it is a full desktop environment, after all), and this is my first software/package management project of any kind. Credit also to Avinash Duduskar, for valuable contributions. diff --git a/pdm.lock b/pdm.lock new file mode 100644 index 0000000..fb0895a --- /dev/null +++ b/pdm.lock @@ -0,0 +1,198 @@ +[[package]] +name = "beautifulsoup4" +sections = ["default"] +version = "4.9.3" +summary = "Screen-scraping library" +dependencies = [ + "soupsieve>1.2; python_version >= \"3.0\"", +] + +[[package]] +name = "certifi" +sections = ["default"] +version = "2020.12.5" +summary = "Python package for providing Mozilla's CA Bundle." + +[[package]] +name = "chardet" +sections = ["default"] +version = "4.0.0" +summary = "Universal encoding detector for Python 2 and 3" + +[[package]] +name = "click" +sections = ["default"] +version = "7.1.2" +summary = "Composable command line interface toolkit" + +[[package]] +name = "colorama" +sections = ["default"] +version = "0.4.4" +marker = "sys_platform == 'win32'" +summary = "Cross-platform colored terminal text." + +[[package]] +name = "fancycompleter" +sections = ["dev"] +version = "0.9.1" +summary = "colorful TAB completion for Python prompt" +dependencies = [ + "pyrepl>=0.8.2", + "pyreadline; platform_system == \"Windows\"", +] + +[[package]] +name = "idna" +sections = ["default"] +version = "2.10" +summary = "Internationalized Domain Names in Applications (IDNA)" + +[[package]] +name = "loguru" +sections = ["default"] +version = "0.5.3" +summary = "Python logging made (stupidly) simple" +dependencies = [ + "colorama>=0.3.4; sys_platform == \"win32\"", + "win32-setctime>=1.0.0; sys_platform == \"win32\"", +] + +[[package]] +name = "pdbpp" +sections = ["dev"] +version = "0.10.2" +summary = "pdb++, a drop-in replacement for pdb" +dependencies = [ + "fancycompleter>=0.8", + "wmctrl", + "pygments", +] + +[[package]] +name = "pygments" +sections = ["dev"] +version = "2.7.4" +summary = "Pygments is a syntax highlighting package written in Python." + +[[package]] +name = "pyreadline" +sections = ["dev"] +version = "2.1" +marker = "platform_system == 'Windows'" +summary = "A python implmementation of GNU readline." + +[[package]] +name = "pyrepl" +sections = ["dev"] +version = "0.9.0" +summary = "A library for building flexible command line interfaces" + +[[package]] +name = "requests" +sections = ["default"] +version = "2.25.1" +summary = "Python HTTP for Humans." +dependencies = [ + "chardet<5,>=3.0.2", + "idna<3,>=2.5", + "urllib3<1.27,>=1.21.1", + "certifi>=2017.4.17", +] + +[[package]] +name = "soupsieve" +sections = ["default"] +version = "2.1" +marker = "python_version >= '3.0'" +summary = "A modern CSS selector implementation for Beautiful Soup." + +[[package]] +name = "urllib3" +sections = ["default"] +version = "1.26.3" +summary = "HTTP library with thread-safe connection pooling, file post, and more." + +[[package]] +name = "win32-setctime" +sections = ["default"] +version = "1.0.3" +marker = "sys_platform == 'win32'" +summary = "A small Python utility to set file creation time on Windows" + +[[package]] +name = "wmctrl" +sections = ["dev"] +version = "0.3" +summary = "A tool to programmatically control windows inside X" + +[metadata] +lock_version = "2" +content_hash = "sha256:4ea3972b713ad5f3a616f303a57d3690c13cc2fa7b8b4e0e03c51f8374e12b05" + +[metadata.files] +"beautifulsoup4 4.9.3" = [ + {file = "beautifulsoup4-4.9.3-py2-none-any.whl", hash = "sha256:4c98143716ef1cb40bf7f39a8e3eec8f8b009509e74904ba3a7b315431577e35"}, + {file = "beautifulsoup4-4.9.3-py3-none-any.whl", hash = "sha256:fff47e031e34ec82bf17e00da8f592fe7de69aeea38be00523c04623c04fb666"}, + {file = "beautifulsoup4-4.9.3.tar.gz", hash = "sha256:84729e322ad1d5b4d25f805bfa05b902dd96450f43842c4e99067d5e1369eb25"}, +] +"certifi 2020.12.5" = [ + {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, + {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, +] +"chardet 4.0.0" = [ + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +] +"click 7.1.2" = [ + {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, + {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, +] +"colorama 0.4.4" = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +"fancycompleter 0.9.1" = [ + {file = "fancycompleter-0.9.1-py3-none-any.whl", hash = "sha256:dd076bca7d9d524cc7f25ec8f35ef95388ffef9ef46def4d3d25e9b044ad7080"}, + {file = "fancycompleter-0.9.1.tar.gz", hash = "sha256:09e0feb8ae242abdfd7ef2ba55069a46f011814a80fe5476be48f51b00247272"}, +] +"idna 2.10" = [ + {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, + {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, +] +"loguru 0.5.3" = [ + {file = "loguru-0.5.3-py3-none-any.whl", hash = "sha256:f8087ac396b5ee5f67c963b495d615ebbceac2796379599820e324419d53667c"}, + {file = "loguru-0.5.3.tar.gz", hash = "sha256:b28e72ac7a98be3d28ad28570299a393dfcd32e5e3f6a353dec94675767b6319"}, +] +"pdbpp 0.10.2" = [ + {file = "pdbpp-0.10.2.tar.gz", hash = "sha256:73ff220d5006e0ecdc3e2705d8328d8aa5ac27fef95cc06f6e42cd7d22d55eb8"}, +] +"pygments 2.7.4" = [ + {file = "Pygments-2.7.4-py3-none-any.whl", hash = "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435"}, + {file = "Pygments-2.7.4.tar.gz", hash = "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337"}, +] +"pyreadline 2.1" = [ + {file = "pyreadline-2.1.zip", hash = "sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1"}, +] +"pyrepl 0.9.0" = [ + {file = "pyrepl-0.9.0.tar.gz", hash = "sha256:292570f34b5502e871bbb966d639474f2b57fbfcd3373c2d6a2f3d56e681a775"}, +] +"requests 2.25.1" = [ + {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, + {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, +] +"soupsieve 2.1" = [ + {file = "soupsieve-2.1-py3-none-any.whl", hash = "sha256:4bb21a6ee4707bf43b61230e80740e71bfe56e55d1f1f50924b087bb2975c851"}, + {file = "soupsieve-2.1.tar.gz", hash = "sha256:6dc52924dc0bc710a5d16794e6b3480b2c7c08b07729505feab2b2c16661ff6e"}, +] +"urllib3 1.26.3" = [ + {file = "urllib3-1.26.3-py2.py3-none-any.whl", hash = "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80"}, + {file = "urllib3-1.26.3.tar.gz", hash = "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"}, +] +"win32-setctime 1.0.3" = [ + {file = "win32_setctime-1.0.3-py3-none-any.whl", hash = "sha256:dc925662de0a6eb987f0b01f599c01a8236cb8c62831c22d9cada09ad958243e"}, + {file = "win32_setctime-1.0.3.tar.gz", hash = "sha256:4e88556c32fdf47f64165a2180ba4552f8bb32c1103a2fafd05723a0bd42bd4b"}, +] +"wmctrl 0.3" = [ + {file = "wmctrl-0.3.tar.gz", hash = "sha256:d806f65ac1554366b6e31d29d7be2e8893996c0acbb2824bbf2b1f49cf628a13"}, +] diff --git a/pull-upstream.py b/pull-upstream.py new file mode 100755 index 0000000..24a54fd --- /dev/null +++ b/pull-upstream.py @@ -0,0 +1,366 @@ +#!/usr/bin/env python3 + +from datetime import date, datetime +import os.path +import pdb +import pickle +import re +import subprocess + +from bs4 import BeautifulSoup +import click +from loguru import logger +import requests + +## +## CONFIGURABLES +## No need to edit this, just pass args. +## + +DEFAULT_CHAN="release" # also valid: unstable, stable +DEFAULT_ARCH="amd64" # in Ubuntu's terms ; not Arch's + +PKG_DATA_PICKLE_PATH="upstream_pkg_data.pickle" + +## +## END OF CONFIGURABLES +## + +def get_in(d, ks): + """Get a value from arbitrarily nested dicts""" + if not ks: + return d + elif len(ks) == 1: + return d.get(ks[0]) + else: + return get_in(d.get(keys[0], {}), keys[1:]) + + +def http_get_or_die(uri): + res = requests.get(uri) + if res.status_code != 200: + exit(f"error {res.status_code} fetching '{uri}'") + return BeautifulSoup(res.text, features="html.parser") + + +_ABSOLUTE_URI_RE=re.compile("^(https?:)?//") # yucki but good enough for launchpad in early 2021 +def crawl_upstream_page(uri): + logger.info(f"Fetching {uri}") + page = http_get_or_die(uri) + result = {} + + # Crawl descending directories + dir_imgs = page.select("IMG[alt='[DIR]']") + for dir_img in dir_imgs: + a_elements = dir_img.parent.parent.select("a") + href = a_elements[0]["href"] + assert(not _ABSOLUTE_URI_RE.match(href)) + child_uri = uri + href + result.update(crawl_upstream_page(child_uri)) + + # Look for archives + file_imgs = page.select("IMG[alt='[ ]']") + for file_img in file_imgs: + a_elements = file_img.parent.parent.select("a") + href = a_elements[0]["href"] + assert(not _ABSOLUTE_URI_RE.match(href)) + result[href] = uri + href + + if len(file_imgs) > 0: + logger.info(f"Added {len(file_imgs)} files") + + return result + + +def crawl_upstream(main_uri): + logger.info(f"Starting upstream crawl from {main_uri}") + pkg_data = crawl_upstream_page(main_uri) + + if os.path.exists(PKG_DATA_PICKLE_PATH): + # Make sure we create a new file so that ctime is properly set + os.remove(PKG_DATA_PICKLE_PATH) + + with open(PKG_DATA_PICKLE_PATH, 'wb') as f: + pickle.dump(pkg_data, f) + + logger.success(f"Wrote upstream pkg data to {PKG_DATA_PICKLE_PATH}") + return pkg_data + + +def get_package_list(main_uri, force_crawl): + if os.path.exists(PKG_DATA_PICKLE_PATH) and not force_crawl: + ctime_ts = os.stat(PKG_DATA_PICKLE_PATH).st_ctime + pickle_age = datetime.now() - datetime.fromtimestamp(ctime_ts) + if pickle_age.days > 7: # older than a week + logger.warning(f"Loading {pickle_age.days} days old pickled pkg data" + + f"from {PKG_DATA_PICKLE_PATH}. 'rm' it to fetch fresher data.") + else: + logger.info(f"Loading pickled pkg data from {PKG_DATA_PICKLE_PATH}") + + with open(PKG_DATA_PICKLE_PATH, 'rb') as f: + pkgs = pickle.load(f) + + logger.success("Done loading") + else: + pkgs = crawl_upstream(main_uri) + + return pkgs + + +_PKG_NAME_RE = re.compile("(?:.*/)?([^_]+)_([^_]+)_([^.]+)\.deb") +def pkg_name_parser(pkgname, extra_data={}): + m = _PKG_NAME_RE.match(pkgname) + if m: + result = {"name": m[1], "version": m[2], "arch": m[3], "filename": pkgname} + else: + result = {"url": pkgname, "mismatch": True} + result.update(extra_data) + return result + + +def pkgbuild_load(): + with open("PKGBUILD", "r") as f: + lines = f.readlines() + return lines + + +def pkgbuild_end_eval(shell_src): + # I wrote a stupid parser that was OK parsing current PKGBUILD, but fairly brittle. + # And then I thought ... what parses a PKGBUILD really well? + # Oh yes, an actual shell. Like bash. + # + # And because this PKGBUILD has zero top level commands (yay!), something like + # + # sh$ echo '${source[@]}' | cat PKGBUILD - | bash + # + # works just file, and prints a very easy to parse space separated source list. + result = subprocess.run(f"echo '{shell_src}' | cat PKGBUILD - | bash -s", + capture_output=True, shell=True, text=True, check=True) + return result + + +def pkgbuild_parse(pkgbuild_lines): + src_result = pkgbuild_end_eval("echo ${source[@]}") + sha_result = pkgbuild_end_eval("echo ${sha256sums[@]}") + + pkg_lines = src_result.stdout.split() + pkg_dicts = [pkg_name_parser(line, {"idx": i}) for i, line in enumerate(pkg_lines)] + pkg_data = {pkg_dict["name"]: pkg_dict for pkg_dict in pkg_dicts if "name" in pkg_dict} + + sha_lines = sha_result.stdout.split() + for datum in pkg_data.values(): + datum["sha256"] = sha_lines[datum["idx"]] + + return pkg_data + + +# Dodging version compares by just patching in upstream versions for anything we got. +# +# semver_re = regexp.compile(...) +# def version_gt(a, b): +# """True if a is more recent than b, False otherwise.""" + + +_SOURCE_PREFIX="source=(" +def pkgbuild_patch_pkg(pkgbuild_lines, pkgbuild_pkg, upstream_pkg): + cur_name = pkgbuild_pkg['name'] + ups_name = upstream_pkg['name'] + assert(cur_name == ups_name) + + cur_vers = pkgbuild_pkg['version'] + ups_vers = upstream_pkg['version'] + assert(cur_vers != ups_vers) + + logger.info(f"Upgrading {cur_name} from current {cur_vers} to {ups_vers}") + + # Hunt for our package in PKGBUILD lines + # XXX name and vers should be regexp-escaped + line_re = re.compile('( *)("[^"]+"/)' + f"({cur_name}_{cur_vers}.*)") + line_re_2 = re.compile('( *)(.*/)' + f"({cur_name}_{cur_vers}.*)") + matches = [(i, line_re.match(l) or line_re_2.match(l)) for i, l in enumerate(pkgbuild_lines)] + matches = [(i, m) for i, m in matches if m] + + if len(matches) != 2: + logger.error(f"Found {len(matches)} matches for {cur_name}_{cur_vers} in PKGBUILD. Not patching.") + return pkgbuild_lines, False + + new_lines = pkgbuild_lines.copy() + for line, match in matches: + if "srcdir" in match[0]: + # Keep variable expansion for local files + new_line = match[1] + match[2] + upstream_pkg["filename"] + else: + # Rebuild patched line, keeping prefix, discarding in-PKGBUILD variables expansions + # to keep it 'simple' + new_line = match[1] + upstream_pkg["url"] + + # Rebuild patched PKGBUILD lines + new_lines[line] = f"{new_line}\n" + + return new_lines, True + + +def pkgbuild_update(pkgbuild_lines, pkgbuild_pkgs, upstream_pkgs): + to_rehash_pkgs = [] + + for name, pkgbuild_pkg in pkgbuild_pkgs.items(): + if name not in upstream_pkgs: + # XXX the script should be able to remove these + logger.warning(f"Package {name} was not found while crawling upstream. Ignoring.") + continue + + upstream_pkg = upstream_pkgs[name] + if upstream_pkg["version"] != pkgbuild_pkg["version"]: + pkgbuild_lines, patched = pkgbuild_patch_pkg(pkgbuild_lines, pkgbuild_pkg, upstream_pkg) + if patched: + to_rehash_pkgs.append((pkgbuild_pkg, upstream_pkg)) + else: + logger.debug(f"Keeping {name} at version {pkgbuild_pkg['version']}") + + return pkgbuild_lines, to_rehash_pkgs + + +def rehash_pkgs(to_rehash_pkgs): + rehashes = [] + + for pkgbuild_pkg, upstream_pkg in to_rehash_pkgs: + url = upstream_pkg["url"] + filename = upstream_pkg["filename"] + + # XXX support force download + if os.path.exists(filename): + logger.info(f"Using local copy of {filename} - download skipped") + else: + logger.info(f"Downloading {url}") + response = requests.get(url) + if response.status_code != 200: + logger.error(f"Problem fetching {url} ; {response}") + continue + + with open(upstream_pkg["filename"], "wb") as f: + f.write(response.content) + + # Just run sha256sum as a subprocess, which is likely to be faster than any pure Python + # implem. Maybe going with a CPython extension would be ~faster, but I cba to select an + # additional dependency. + result = subprocess.run(["sha256sum", filename], capture_output=True, text=True, check=True) + sha256 = result.stdout.split()[0] + rehashes.append((pkgbuild_pkg['sha256'], sha256)) + + return rehashes + + +def pkgbuild_update_hashes(pkgbuild_lines, rehashes): + new_lines = [] + rehash_set = set(rehashes) + for i, line in enumerate(pkgbuild_lines): + to_remove = None + new_line = None + for rehash_tuple in rehash_set: + rehash_from, rehash_to = rehash_tuple + if rehash_from in line: + new_line = line.replace(rehash_from, rehash_to) + to_remove = rehash_tuple + # XXX is it safe to break here? (not if one SHA256 be present several times) + new_lines.append(new_line if new_line else line) + if to_remove: + rehash_set.remove(to_remove) # Gradually decrease the inner loop iteration count + return new_lines + + +# _UBUNTU_VERSION_RE=re.compile(r""" +# ^(?P[0-9]+)\. # Upstream Major +# (?P[0-9]+)\. # Upstream Minor +# (?P[0-9]+)- # Upstream Patch +# (?P[0-9]+) # Ubuntu Patchlevel +# (?P[a-z]+)? # Ubuntu Releasename +# (?P~.*)?$ # Ubuntu ~whatever at the end +# """, re.VERBOSE) +# +# +# _UBUNTU_RELEASE_NAMES = dict(zesty=1704, artful=1710, bionic=1804, cosmic=1810, disco=1904, +# eoan=1910, focal=2004, groovy=2010, hirsute=2104) +# +# +# class UbuntuVersion: +# def __init__(self, version_string): +# self.s = version_string +# self.m = _UBUNTU_VERSION_RE.match(version_string) +# assert(self.m) +# def __lt__(self, other): +# pass +# def __gt__(self, other): +# pass +# def __eq__(self, other): +# return self.s == other.s +# def __le__(self, other): +# return self.__lt__(self, other) or self.__eq__(self, other) +# def __ge__(self, other): +# return self.__gt__(self, other) or self.__eq__(self, other) +# def __ne__(self, other): +# return not self.__eq__(self, other) + + +def filter_crawled_pkgs(upstream_names_to_urls, arch): + # Parse all names into dicts + parsed_pkgs = [pkg_name_parser(name, {"url": url}) for name, url in upstream_names_to_urls.items()] + + # Filter mis-parsed and wrong archs packages + filter_fn = lambda p: "mismatch" not in p and (p["arch"] == arch or p["arch"] == "all") + filtered_pkgs = [pkg for pkg in parsed_pkgs if filter_fn(pkg)] + + # Index back all packages into a dict ; names to upstream versions (plural) + indexed_pkgs = {} + for pkg in filtered_pkgs: + name = pkg["name"] + if name not in indexed_pkgs: + indexed_pkgs[name] = [pkg] + else: + indexed_pkgs[name].append(pkg) + + # Select the version we're interested in ; for cases with several upstream versions + result = {} + for name, pkgs in indexed_pkgs.items(): + if len(pkgs) == 1: + result[name] = pkgs[0] + else: + # 'Highest' version in lexicographical order wins + vs = sorted(pkgs, key=lambda x: x["version"]) + result[name] = vs[-1] + + return result + + +@click.command() +@click.option("-a", "--arch", default=DEFAULT_ARCH, help=f"Any upstream supported arch (default {DEFAULT_ARCH})") +@click.option("-c", "--chan", default=DEFAULT_CHAN, help=f"unstable|stable|release (default {DEFAULT_CHAN})") +@click.option("-f", "--force/--no-force", default=False, help=f"Don't reuse saved crawl data if present") +def main(arch, chan, force): + """ + Helper script to update regolith-de PKGBUILD. + + This was written to ease the maintainer's life, and for adventurous people willing to + break their system in the name of progress. + + With it, you can get packages not made for amd64, or get some of the stable/unstable + packages versions that are not pushed in the repo. + """ + main_uri=f"http://ppa.launchpad.net/regolith-linux/{chan}/ubuntu/pool/main/" + + upstream_names_to_urls = get_package_list(main_uri, force) + upstream_pkgs = filter_crawled_pkgs(upstream_names_to_urls, arch) + + pkgbuild_lines = pkgbuild_load() + pkgbuild_pkgs = pkgbuild_parse(pkgbuild_lines) + + updated_pkgbuild_lines, to_rehash_pkgs = pkgbuild_update(pkgbuild_lines, pkgbuild_pkgs, upstream_pkgs) + pkg_rehashes = rehash_pkgs(to_rehash_pkgs) + updated_pkgbuild_lines = pkgbuild_update_hashes(updated_pkgbuild_lines, pkg_rehashes) + + with open("PKGBUILD.new", "w") as f: + f.writelines(updated_pkgbuild_lines) + logger.success("Wrote PKGBUILD.new - Please diff then copy over if satisfied.") + + +if __name__ == "__main__": + main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..858bdf9 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,30 @@ +[project] +name = "regolith-de" +version = "0.0.1" +description = "" +authors = [ + {name = "gardotd426"}, + {name = "mrzor"} +] +dependencies = [ + "beautifulsoup4~=4.9", + "requests~=2.25", + "loguru~=0.5", + "click~=7.1", +] +dev-dependencies = [ + "pdbpp~=0.10", +] +requires-python = ">=3.9" +dynamic = ["classifiers"] +license = {text = "MIT"} + +[project.urls] +homepage = "" + +[tool] +[tool.pdm] + +[build-system] +requires = ["pdm-pep517"] +build-backend = "pdm.pep517.api"