This repository has been archived on 2021-07-03. You can view files and clone it, but cannot push or open issues or pull requests.
abra/bin/abralib.py
2021-06-05 22:41:50 +02:00

109 lines
2.7 KiB
Python

"""Shared utilities for bin/*.py scripts."""
from logging import DEBUG, basicConfig, getLogger
from os import chdir, mkdir
from os.path import exists, expanduser
from pathlib import Path
from shlex import split
from subprocess import check_output
from sys import exit
from requests import get
HOME_PATH = expanduser("~/")
CLONES_PATH = Path(f"{HOME_PATH}/.abra/apps").absolute()
REPOS_TO_SKIP = (
"abra",
"abra-apps",
"abra-gandi",
"abra-hetzner",
"auto-apps-json",
"auto-mirror",
"backup-bot",
"coopcloud.tech",
"coturn",
"docker-cp-deploy",
"docker-dind-bats-kcov",
"docs.coopcloud.tech",
"example",
"gardening",
"organising",
"pyabra",
"radicle-seed-node",
"stack-ssh-deploy",
"swarm-cronjob",
)
YQ_PATH = Path(f"{HOME_PATH}/.abra/vendor/yq")
JQ_PATH = Path(f"{HOME_PATH}/.abra/vendor/jq")
log = getLogger(__name__)
basicConfig()
log.setLevel(DEBUG)
def _run_cmd(cmd, shell=False, **kwargs):
"""Run a shell command."""
args = [split(cmd)]
if shell:
args = [cmd]
kwargs = {"shell": shell}
try:
return check_output(*args, **kwargs).decode("utf-8").strip()
except Exception as exception:
log.error(f"Failed to run {cmd}, saw {str(exception)}")
exit(1)
def get_repos_json():
""" Retrieve repo list from Gitea """
url = "https://git.autonomic.zone/api/v1/orgs/coop-cloud/repos"
log.info(f"Retrieving {url}")
repos = []
response = True
page = 1
try:
while response:
log.info(f"Trying to fetch page {page}")
response = get(url + f"?page={page}", timeout=10).json()
repos.extend(response)
page += 1
return repos
except Exception as exception:
log.error(f"Failed to retrieve {url}, saw {str(exception)}")
exit(1)
def clone_all_apps(repos_json, ssh=False):
"""Clone all Co-op Cloud apps to ~/.abra/apps."""
if not exists(CLONES_PATH):
mkdir(CLONES_PATH)
if ssh:
repos = [[p["name"], p["ssh_url"]] for p in repos_json]
else:
repos = [[p["name"], p["clone_url"]] for p in repos_json]
for name, url in repos:
if name in REPOS_TO_SKIP:
continue
if not exists(f"{CLONES_PATH}/{name}"):
log.info(f"Retrieving {url}")
_run_cmd(f"git clone {url} {CLONES_PATH}/{name}")
chdir(f"{CLONES_PATH}/{name}")
if not int(_run_cmd("git branch --list | wc -l", shell=True)):
log.info(f"Guessing main branch is HEAD for {name}")
_run_cmd("git checkout main")
else:
log.info(f"Updating {name}")
chdir(f"{CLONES_PATH}/{name}")
_run_cmd("git fetch -a")