2021-03-26 00:14:14 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2021-04-02 14:05:31 +00:00
|
|
|
# Usage: ./app-json.py
|
|
|
|
#
|
|
|
|
# Gather metadata from Co-op Cloud apps in $ABRA_DIR/apps (default
|
|
|
|
# ~/.abra/apps), and format it as JSON so that it can be hosted here:
|
2021-04-18 01:43:21 +00:00
|
|
|
# https://apps.coopcloud.tech
|
2021-04-02 14:05:31 +00:00
|
|
|
|
2021-03-28 09:40:49 +00:00
|
|
|
from json import dump
|
2021-04-02 18:40:31 +00:00
|
|
|
from logging import DEBUG, basicConfig, getLogger
|
2021-04-02 14:05:31 +00:00
|
|
|
from os import chdir, listdir, mkdir
|
2021-04-03 18:42:02 +00:00
|
|
|
from os.path import basename, exists, expanduser
|
2021-03-26 00:14:14 +00:00
|
|
|
from pathlib import Path
|
2021-03-28 09:40:49 +00:00
|
|
|
from re import findall, search
|
2021-03-26 00:14:14 +00:00
|
|
|
from shlex import split
|
2021-04-08 12:53:50 +00:00
|
|
|
from subprocess import DEVNULL, check_output
|
2021-04-02 18:40:31 +00:00
|
|
|
from sys import exit
|
2021-03-26 00:14:14 +00:00
|
|
|
|
|
|
|
from requests import get
|
|
|
|
|
2021-03-26 19:48:08 +00:00
|
|
|
HOME_PATH = expanduser("~/")
|
|
|
|
CLONES_PATH = Path(f"{HOME_PATH}/.abra/apps").absolute()
|
2021-04-03 13:44:13 +00:00
|
|
|
YQ_PATH = Path(f"{HOME_PATH}/.abra/vendor/yq")
|
2021-04-02 13:49:18 +00:00
|
|
|
SCRIPT_PATH = Path(__file__).absolute().parent
|
2021-04-02 19:00:55 +00:00
|
|
|
REPOS_TO_SKIP = (
|
|
|
|
"abra",
|
2021-04-25 10:05:49 +00:00
|
|
|
"abra-apps",
|
2021-05-02 18:24:34 +00:00
|
|
|
"abra-gandi",
|
|
|
|
"abra-hetzner",
|
2021-04-02 19:00:55 +00:00
|
|
|
"backup-bot",
|
2021-05-02 18:23:57 +00:00
|
|
|
"coopcloud.tech",
|
|
|
|
"coturn",
|
2021-05-02 19:42:34 +00:00
|
|
|
"docker-cp-deploy",
|
|
|
|
"docker-dind-bats-kcov",
|
2021-05-02 18:23:57 +00:00
|
|
|
"docs.coopcloud.tech",
|
2021-04-02 19:00:55 +00:00
|
|
|
"example",
|
2021-05-02 19:48:35 +00:00
|
|
|
"gardening",
|
2021-05-02 19:58:02 +00:00
|
|
|
"organising",
|
2021-04-02 19:00:55 +00:00
|
|
|
"pyabra",
|
2021-04-05 10:31:45 +00:00
|
|
|
"radicle-seed-node",
|
2021-05-02 18:23:57 +00:00
|
|
|
"stack-ssh-deploy",
|
2021-05-02 19:42:34 +00:00
|
|
|
"swarm-cronjob",
|
2021-04-02 19:00:55 +00:00
|
|
|
)
|
2021-03-26 19:48:08 +00:00
|
|
|
|
2021-04-02 18:40:31 +00:00
|
|
|
log = getLogger(__name__)
|
|
|
|
basicConfig()
|
|
|
|
log.setLevel(DEBUG)
|
2021-03-26 00:14:14 +00:00
|
|
|
|
2021-04-02 18:40:31 +00:00
|
|
|
|
2021-04-05 10:31:45 +00:00
|
|
|
def _run_cmd(cmd, shell=False, **kwargs):
|
2021-04-02 18:40:31 +00:00
|
|
|
"""Run a shell command."""
|
|
|
|
args = [split(cmd)]
|
|
|
|
|
|
|
|
if shell:
|
|
|
|
args = [cmd]
|
|
|
|
kwargs = {"shell": shell}
|
|
|
|
|
|
|
|
try:
|
|
|
|
return check_output(*args, **kwargs).decode("utf-8").strip()
|
|
|
|
except Exception as exception:
|
|
|
|
log.error(f"Failed to run {cmd}, saw {str(exception)}")
|
|
|
|
exit(1)
|
|
|
|
|
|
|
|
|
2021-04-18 01:43:21 +00:00
|
|
|
def get_repos_json():
|
|
|
|
""" Retrieve repo list from Gitea """
|
|
|
|
|
|
|
|
url = "https://git.autonomic.zone/api/v1/orgs/coop-cloud/repos"
|
|
|
|
|
|
|
|
log.info(f"Retrieving {url}")
|
|
|
|
|
2021-04-25 10:05:49 +00:00
|
|
|
repos = []
|
|
|
|
response = True
|
|
|
|
page = 1
|
|
|
|
|
2021-04-18 01:43:21 +00:00
|
|
|
try:
|
2021-04-25 10:05:49 +00:00
|
|
|
while response:
|
|
|
|
log.info(f"Trying to fetch page {page}")
|
2021-04-28 08:35:01 +00:00
|
|
|
response = get(url + f"?page={page}", timeout=10).json()
|
2021-04-25 10:05:49 +00:00
|
|
|
repos.extend(response)
|
|
|
|
page += 1
|
|
|
|
|
|
|
|
return repos
|
2021-04-18 01:43:21 +00:00
|
|
|
except Exception as exception:
|
|
|
|
log.error(f"Failed to retrieve {url}, saw {str(exception)}")
|
|
|
|
exit(1)
|
|
|
|
|
|
|
|
|
2021-04-03 18:42:28 +00:00
|
|
|
def get_published_apps_json():
|
|
|
|
"""Retrieve already published apps json."""
|
2021-04-18 01:43:21 +00:00
|
|
|
url = "https://apps.coopcloud.tech"
|
2021-04-03 18:42:28 +00:00
|
|
|
|
|
|
|
log.info(f"Retrieving {url}")
|
|
|
|
|
|
|
|
try:
|
|
|
|
return get(url, timeout=5).json()
|
|
|
|
except Exception as exception:
|
|
|
|
log.error(f"Failed to retrieve {url}, saw {str(exception)}")
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
2021-04-18 01:43:21 +00:00
|
|
|
def clone_all_apps(repos_json):
|
2021-04-02 18:40:31 +00:00
|
|
|
"""Clone all Co-op Cloud apps to ~/.abra/apps."""
|
2021-03-26 19:48:08 +00:00
|
|
|
if not exists(CLONES_PATH):
|
|
|
|
mkdir(CLONES_PATH)
|
2021-04-02 18:40:31 +00:00
|
|
|
|
2021-04-18 01:43:21 +00:00
|
|
|
repos = [[p["name"], p["ssh_url"]] for p in repos_json]
|
2021-03-26 19:48:08 +00:00
|
|
|
|
2021-03-26 00:14:14 +00:00
|
|
|
for name, url in repos:
|
2021-04-02 19:00:55 +00:00
|
|
|
if name in REPOS_TO_SKIP:
|
2021-03-26 19:48:08 +00:00
|
|
|
continue
|
|
|
|
|
2021-04-02 18:40:31 +00:00
|
|
|
if not exists(f"{CLONES_PATH}/{name}"):
|
2021-04-04 22:06:43 +00:00
|
|
|
log.info(f"Retrieving {url}")
|
2021-04-03 18:54:11 +00:00
|
|
|
_run_cmd(f"git clone {url} {CLONES_PATH}/{name}")
|
2021-04-02 18:40:31 +00:00
|
|
|
|
|
|
|
chdir(f"{CLONES_PATH}/{name}")
|
|
|
|
if not int(_run_cmd("git branch --list | wc -l", shell=True)):
|
|
|
|
log.info(f"Guessing main branch is HEAD for {name}")
|
2021-04-02 19:00:55 +00:00
|
|
|
_run_cmd("git checkout main")
|
2021-04-03 18:54:16 +00:00
|
|
|
else:
|
2021-04-04 22:06:43 +00:00
|
|
|
log.info(f"Updating {name}")
|
|
|
|
chdir(f"{CLONES_PATH}/{name}")
|
2021-04-03 18:54:16 +00:00
|
|
|
_run_cmd("git fetch -a")
|
2021-03-26 00:14:14 +00:00
|
|
|
|
|
|
|
|
2021-04-18 01:43:21 +00:00
|
|
|
def generate_apps_json(repos_json):
|
2021-04-02 18:40:31 +00:00
|
|
|
"""Generate the abra-apps.json application versions file."""
|
2021-03-26 19:48:08 +00:00
|
|
|
apps_json = {}
|
2021-04-03 18:42:02 +00:00
|
|
|
cached_apps_json = get_published_apps_json()
|
2021-03-26 19:48:08 +00:00
|
|
|
|
|
|
|
for app in listdir(CLONES_PATH):
|
2021-04-02 19:00:55 +00:00
|
|
|
if app in REPOS_TO_SKIP:
|
|
|
|
log.info(f"Skipping {app}")
|
|
|
|
continue
|
|
|
|
|
2021-04-28 08:35:01 +00:00
|
|
|
repo_details = next(filter(lambda x: x["name"] == app, repos_json), {})
|
2021-04-18 01:43:21 +00:00
|
|
|
|
2021-03-26 19:48:08 +00:00
|
|
|
app_path = f"{CLONES_PATH}/{app}"
|
|
|
|
chdir(app_path)
|
|
|
|
|
2021-04-25 10:05:49 +00:00
|
|
|
metadata = get_app_metadata(app_path)
|
2021-05-30 12:35:18 +00:00
|
|
|
|
2021-04-25 10:05:49 +00:00
|
|
|
name = metadata.pop("name", "")
|
2021-04-18 01:43:21 +00:00
|
|
|
|
2021-04-02 19:00:55 +00:00
|
|
|
log.info(f"Processing {app}")
|
|
|
|
apps_json[app] = {
|
2021-04-25 10:05:49 +00:00
|
|
|
"name": name,
|
|
|
|
"category": metadata.get("category", ""),
|
|
|
|
"repository": repo_details.get("clone_url", ""),
|
|
|
|
"default_branch": repo_details.get("default_branch", ""),
|
|
|
|
"description": repo_details.get("description", ""),
|
|
|
|
"website": repo_details.get("website", ""),
|
|
|
|
"features": metadata,
|
2021-04-03 18:42:02 +00:00
|
|
|
"versions": get_app_versions(app_path, cached_apps_json),
|
2021-04-28 08:35:01 +00:00
|
|
|
"icon": repo_details.get("avatar_url", ""),
|
2021-04-02 19:00:55 +00:00
|
|
|
}
|
2021-03-26 19:48:08 +00:00
|
|
|
|
|
|
|
return apps_json
|
|
|
|
|
|
|
|
|
2021-04-25 10:05:49 +00:00
|
|
|
def get_app_metadata(app_path):
|
|
|
|
"""Parse metadata from app repo README files."""
|
|
|
|
metadata = {}
|
2021-03-26 19:48:08 +00:00
|
|
|
|
2021-04-02 14:43:43 +00:00
|
|
|
chdir(app_path)
|
|
|
|
|
2021-05-02 20:10:58 +00:00
|
|
|
try:
|
|
|
|
with open(f"{app_path}/README.md", "r") as handle:
|
|
|
|
log.info(f"{app_path}/README.md")
|
|
|
|
contents = handle.read()
|
|
|
|
except Exception:
|
|
|
|
log.info(f"No {app_path}/README.md discovered, moving on")
|
|
|
|
return {}
|
2021-03-26 19:48:08 +00:00
|
|
|
|
2021-04-03 13:44:29 +00:00
|
|
|
try:
|
2021-05-30 12:35:18 +00:00
|
|
|
for match in findall(r"\*\*.*", contents):
|
2021-04-03 13:44:29 +00:00
|
|
|
title = search(r"(?<=\*\*).*(?=\*\*)", match).group().lower()
|
|
|
|
|
|
|
|
if title == "image":
|
|
|
|
value = {
|
|
|
|
"image": search(r"(?<=`).*(?=`)", match).group(),
|
|
|
|
"url": search(r"(?<=\().*(?=\))", match).group(),
|
|
|
|
"rating": match.split(",")[1].strip(),
|
|
|
|
"source": match.split(",")[-1].replace("*", "").strip(),
|
|
|
|
}
|
2021-04-27 17:34:36 +00:00
|
|
|
elif title == "status":
|
2021-05-02 19:42:55 +00:00
|
|
|
value = {"❶💚": 1, "❷💛": 2, "❸🍎": 3, "❹💣": 4, "?": 5, "": 5}[
|
|
|
|
match.split(":")[-1].replace("*", "").strip()
|
|
|
|
]
|
2021-04-03 13:44:29 +00:00
|
|
|
else:
|
|
|
|
value = match.split(":")[-1].replace("*", "").strip()
|
|
|
|
|
2021-04-25 10:05:49 +00:00
|
|
|
metadata[title] = value
|
|
|
|
metadata["name"] = findall(r"^# (.*)", contents)[0]
|
2021-04-03 13:44:29 +00:00
|
|
|
except (IndexError, AttributeError):
|
|
|
|
log.info(f"Can't parse {app_path}/README.md")
|
|
|
|
return {}
|
|
|
|
finally:
|
|
|
|
_run_cmd("git checkout HEAD")
|
2021-03-28 09:40:49 +00:00
|
|
|
|
2021-04-25 10:05:49 +00:00
|
|
|
log.info(f"Parsed {metadata}")
|
2021-04-02 18:40:31 +00:00
|
|
|
|
2021-04-25 10:05:49 +00:00
|
|
|
return metadata
|
2021-03-28 09:40:49 +00:00
|
|
|
|
|
|
|
|
2021-04-03 18:42:02 +00:00
|
|
|
def get_app_versions(app_path, cached_apps_json):
|
2021-04-02 19:00:55 +00:00
|
|
|
versions = {}
|
2021-03-26 19:48:08 +00:00
|
|
|
|
2021-03-28 09:40:49 +00:00
|
|
|
chdir(app_path)
|
2021-04-02 19:00:55 +00:00
|
|
|
|
|
|
|
tags = _run_cmd("git tag --list").split()
|
|
|
|
|
|
|
|
if not tags:
|
2021-04-04 19:15:00 +00:00
|
|
|
log.info("No tags discovered, moving on")
|
2021-04-02 19:00:55 +00:00
|
|
|
return {}
|
|
|
|
|
2021-04-03 18:28:31 +00:00
|
|
|
initial_branch = _run_cmd("git rev-parse --abbrev-ref HEAD")
|
|
|
|
|
2021-04-03 18:42:02 +00:00
|
|
|
app_name = basename(app_path)
|
2021-04-03 18:46:34 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
existing_tags = cached_apps_json[app_name]["versions"].keys()
|
|
|
|
except KeyError:
|
|
|
|
existing_tags = []
|
2021-04-03 18:42:02 +00:00
|
|
|
|
2021-04-02 19:00:55 +00:00
|
|
|
for tag in tags:
|
2021-04-28 08:35:01 +00:00
|
|
|
_run_cmd(f"git checkout {tag}", stderr=DEVNULL)
|
2021-04-02 19:00:55 +00:00
|
|
|
|
2021-04-03 13:44:13 +00:00
|
|
|
services_cmd = f"{YQ_PATH} e '.services | keys | .[]' compose*.yml"
|
2021-04-02 19:00:55 +00:00
|
|
|
services = _run_cmd(services_cmd, shell=True).split()
|
|
|
|
|
2021-04-03 18:22:53 +00:00
|
|
|
parsed_services = []
|
2021-04-03 18:18:31 +00:00
|
|
|
service_versions = {}
|
2021-04-02 19:00:55 +00:00
|
|
|
for service in services:
|
2021-04-03 19:07:03 +00:00
|
|
|
if service in ("null", "---"):
|
|
|
|
continue
|
|
|
|
|
2021-04-28 08:35:01 +00:00
|
|
|
if (
|
|
|
|
tag in existing_tags
|
|
|
|
and service in cached_apps_json[app_name]["versions"][tag]
|
|
|
|
):
|
2021-04-03 19:07:03 +00:00
|
|
|
log.info(f"Skipping {tag} because we've already processed it")
|
|
|
|
existing_versions = cached_apps_json[app_name]["versions"][tag][service]
|
|
|
|
service_versions[service] = existing_versions
|
|
|
|
_run_cmd(f"git checkout {initial_branch}")
|
|
|
|
continue
|
|
|
|
|
2021-04-03 18:22:53 +00:00
|
|
|
if service in parsed_services:
|
2021-04-04 22:06:43 +00:00
|
|
|
log.info(f"Skipped {service}, we've already parsed it locally")
|
2021-04-03 18:22:53 +00:00
|
|
|
continue
|
|
|
|
|
2021-04-03 13:44:13 +00:00
|
|
|
services_cmd = f"{YQ_PATH} e '.services.{service}.image' compose*.yml"
|
2021-04-02 19:00:55 +00:00
|
|
|
images = _run_cmd(services_cmd, shell=True).split()
|
|
|
|
|
|
|
|
for image in images:
|
|
|
|
if image in ("null", "---"):
|
|
|
|
continue
|
|
|
|
|
|
|
|
images_cmd = f"skopeo inspect docker://{image} | jq '.Digest'"
|
|
|
|
output = _run_cmd(images_cmd, shell=True)
|
|
|
|
|
|
|
|
service_version_info = {
|
2021-04-03 18:18:31 +00:00
|
|
|
"image": image.split(":")[0],
|
|
|
|
"tag": image.split(":")[-1],
|
|
|
|
"digest": output.split(":")[-1][:8],
|
2021-04-02 18:40:31 +00:00
|
|
|
}
|
|
|
|
|
2021-04-02 19:00:55 +00:00
|
|
|
log.info(f"Parsed {service_version_info}")
|
2021-04-03 18:18:31 +00:00
|
|
|
service_versions[service] = service_version_info
|
2021-04-02 19:00:55 +00:00
|
|
|
|
2021-04-03 18:22:53 +00:00
|
|
|
parsed_services.append(service)
|
|
|
|
|
2021-04-02 19:00:55 +00:00
|
|
|
versions[tag] = service_versions
|
2021-04-02 14:54:40 +00:00
|
|
|
|
2021-04-03 18:28:31 +00:00
|
|
|
_run_cmd(f"git checkout {initial_branch}")
|
2021-03-26 19:48:08 +00:00
|
|
|
|
2021-03-28 09:40:49 +00:00
|
|
|
return versions
|
2021-03-26 00:14:14 +00:00
|
|
|
|
|
|
|
|
2021-04-02 18:40:31 +00:00
|
|
|
def main():
|
|
|
|
"""Run the script."""
|
2021-04-18 01:43:21 +00:00
|
|
|
repos_json = get_repos_json()
|
|
|
|
clone_all_apps(repos_json)
|
2021-04-02 18:40:31 +00:00
|
|
|
|
2021-04-25 10:05:49 +00:00
|
|
|
target = f"{SCRIPT_PATH}/../deploy/apps.coopcloud.tech/apps.json"
|
2021-04-02 18:40:31 +00:00
|
|
|
with open(target, "w", encoding="utf-8") as handle:
|
2021-05-02 20:14:15 +00:00
|
|
|
dump(
|
|
|
|
generate_apps_json(repos_json),
|
|
|
|
handle,
|
|
|
|
ensure_ascii=False,
|
|
|
|
indent=4,
|
|
|
|
sort_keys=True,
|
|
|
|
)
|
2021-04-02 13:49:18 +00:00
|
|
|
|
2021-04-02 18:40:31 +00:00
|
|
|
log.info(f"Successfully generated {target}")
|
2021-04-02 14:54:40 +00:00
|
|
|
|
2021-04-02 13:49:18 +00:00
|
|
|
|
2021-04-02 18:40:31 +00:00
|
|
|
main()
|