starting work on hub mode and spoke mode -- implemented hub model
This commit is contained in:
parent
d8d6124005
commit
c59dc21ba6
@ -12,7 +12,7 @@ from flask import render_template
|
|||||||
from flask import url_for
|
from flask import url_for
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
from capsulflask import operation_model, cli
|
from capsulflask import hub_model, spoke_model, cli
|
||||||
from capsulflask.btcpay import client as btcpay
|
from capsulflask.btcpay import client as btcpay
|
||||||
|
|
||||||
load_dotenv(find_dotenv())
|
load_dotenv(find_dotenv())
|
||||||
@ -20,11 +20,18 @@ load_dotenv(find_dotenv())
|
|||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
app.config.from_mapping(
|
app.config.from_mapping(
|
||||||
|
|
||||||
BASE_URL=os.environ.get("BASE_URL", default="http://localhost:5000"),
|
BASE_URL=os.environ.get("BASE_URL", default="http://localhost:5000"),
|
||||||
SECRET_KEY=os.environ.get("SECRET_KEY", default="dev"),
|
SECRET_KEY=os.environ.get("SECRET_KEY", default="dev"),
|
||||||
OPERATION_MODEL=os.environ.get("OPERATION_MODEL", default="mock"),
|
HUB_MODE_ENABLED=os.environ.get("HUB_MODE_ENABLED", default="False").lower() in ['true', '1', 't', 'y', 'yes'],
|
||||||
|
SPOKE_MODE_ENABLED=os.environ.get("SPOKE_MODE_ENABLED", default="True").lower() in ['true', '1', 't', 'y', 'yes'],
|
||||||
|
HUB_MODEL=os.environ.get("HUB_MODEL", default="mock"),
|
||||||
|
SPOKE_MODEL=os.environ.get("SPOKE_MODEL", default="mock"),
|
||||||
LOG_LEVEL=os.environ.get("LOG_LEVEL", default="INFO"),
|
LOG_LEVEL=os.environ.get("LOG_LEVEL", default="INFO"),
|
||||||
ADMIN_EMAIL_ADDRESSES=os.environ.get("ADMIN_EMAIL_ADDRESSES", default="ops@cyberia.club"),
|
SPOKE_HOST_ID=os.environ.get("SPOKE_HOST_ID", default="default"),
|
||||||
|
SPOKE_HOST_TOKEN=os.environ.get("SPOKE_HOST_TOKEN", default="default"),
|
||||||
|
HUB_TOKEN=os.environ.get("HUB_TOKEN", default="default"),
|
||||||
|
HUB_URL=os.environ.get("HUB_URL", default="https://capsul.org"),
|
||||||
|
|
||||||
DATABASE_URL=os.environ.get("DATABASE_URL", default="sql://postgres:dev@localhost:5432/postgres"),
|
DATABASE_URL=os.environ.get("DATABASE_URL", default="sql://postgres:dev@localhost:5432/postgres"),
|
||||||
DATABASE_SCHEMA=os.environ.get("DATABASE_SCHEMA", default="public"),
|
DATABASE_SCHEMA=os.environ.get("DATABASE_SCHEMA", default="public"),
|
||||||
@ -35,6 +42,7 @@ app.config.from_mapping(
|
|||||||
MAIL_USERNAME=os.environ.get("MAIL_USERNAME", default="forest@nullhex.com"),
|
MAIL_USERNAME=os.environ.get("MAIL_USERNAME", default="forest@nullhex.com"),
|
||||||
MAIL_PASSWORD=os.environ.get("MAIL_PASSWORD", default=""),
|
MAIL_PASSWORD=os.environ.get("MAIL_PASSWORD", default=""),
|
||||||
MAIL_DEFAULT_SENDER=os.environ.get("MAIL_DEFAULT_SENDER", default="forest@nullhex.com"),
|
MAIL_DEFAULT_SENDER=os.environ.get("MAIL_DEFAULT_SENDER", default="forest@nullhex.com"),
|
||||||
|
ADMIN_EMAIL_ADDRESSES=os.environ.get("ADMIN_EMAIL_ADDRESSES", default="ops@cyberia.club"),
|
||||||
|
|
||||||
PROMETHEUS_URL=os.environ.get("PROMETHEUS_URL", default="https://prometheus.cyberia.club"),
|
PROMETHEUS_URL=os.environ.get("PROMETHEUS_URL", default="https://prometheus.cyberia.club"),
|
||||||
|
|
||||||
@ -74,27 +82,40 @@ stripe.api_version = app.config['STRIPE_API_VERSION']
|
|||||||
|
|
||||||
app.config['FLASK_MAIL_INSTANCE'] = Mail(app)
|
app.config['FLASK_MAIL_INSTANCE'] = Mail(app)
|
||||||
|
|
||||||
if app.config['OPERATION_MODEL'] == "shell_scripts":
|
|
||||||
app.config['OPERATION_MODEL'] = operation_model.GoshtOperation()
|
|
||||||
else:
|
|
||||||
app.config['OPERATION_MODEL'] = operation_model.MockOperation()
|
|
||||||
|
|
||||||
app.config['BTCPAY_CLIENT'] = btcpay.Client(api_uri=app.config['BTCPAY_URL'], pem=app.config['BTCPAY_PRIVATE_KEY'])
|
app.config['BTCPAY_CLIENT'] = btcpay.Client(api_uri=app.config['BTCPAY_URL'], pem=app.config['BTCPAY_PRIVATE_KEY'])
|
||||||
|
|
||||||
from capsulflask import db
|
if app.config['HUB_MODE_ENABLED']:
|
||||||
|
|
||||||
db.init_app(app)
|
if app.config['HUB_MODEL'] == "capsul-flask":
|
||||||
|
app.config['HUB_MODEL'] = hub_model.CapsulFlaskHub()
|
||||||
|
else:
|
||||||
|
app.config['HUB_MODEL'] = hub_model.MockHub()
|
||||||
|
|
||||||
|
from capsulflask import db
|
||||||
|
db.init_app(app)
|
||||||
|
|
||||||
from capsulflask import auth, landing, console, payment, metrics, cli
|
from capsulflask import auth, landing, console, payment, metrics, cli, hub_api
|
||||||
|
|
||||||
app.register_blueprint(landing.bp)
|
app.register_blueprint(landing.bp)
|
||||||
app.register_blueprint(auth.bp)
|
app.register_blueprint(auth.bp)
|
||||||
app.register_blueprint(console.bp)
|
app.register_blueprint(console.bp)
|
||||||
app.register_blueprint(payment.bp)
|
app.register_blueprint(payment.bp)
|
||||||
app.register_blueprint(metrics.bp)
|
app.register_blueprint(metrics.bp)
|
||||||
app.register_blueprint(cli.bp)
|
app.register_blueprint(cli.bp)
|
||||||
|
app.register_blueprint(hub_api.bp)
|
||||||
|
|
||||||
app.add_url_rule("/", endpoint="index")
|
app.add_url_rule("/", endpoint="index")
|
||||||
|
|
||||||
|
if app.config['SPOKE_MODE_ENABLED']:
|
||||||
|
|
||||||
|
if app.config['SPOKE_MODEL'] == "shell-scripts":
|
||||||
|
app.config['SPOKE_MODEL'] = spoke_model.ShellScriptSpoke()
|
||||||
|
else:
|
||||||
|
app.config['SPOKE_MODEL'] = spoke_model.MockSpoke()
|
||||||
|
|
||||||
|
from capsulflask import spoke_api
|
||||||
|
|
||||||
|
app.register_blueprint(spoke_api.bp)
|
||||||
|
|
||||||
@app.after_request
|
@app.after_request
|
||||||
def security_headers(response):
|
def security_headers(response):
|
||||||
|
@ -249,13 +249,13 @@ def notify_users_about_account_balance():
|
|||||||
if index_to_send == len(warnings)-1:
|
if index_to_send == len(warnings)-1:
|
||||||
for vm in vms:
|
for vm in vms:
|
||||||
current_app.logger.warning(f"cron_task: deleting {vm['id']} ( {account['email']} ) due to negative account balance.")
|
current_app.logger.warning(f"cron_task: deleting {vm['id']} ( {account['email']} ) due to negative account balance.")
|
||||||
current_app.config["OPERATION_MODEL"].destroy(email=account["email"], id=vm['id'])
|
current_app.config["HUB_MODEL"].destroy(email=account["email"], id=vm['id'])
|
||||||
get_model().delete_vm(email=account["email"], id=vm['id'])
|
get_model().delete_vm(email=account["email"], id=vm['id'])
|
||||||
|
|
||||||
|
|
||||||
def ensure_vms_and_db_are_synced():
|
def ensure_vms_and_db_are_synced():
|
||||||
db_ids = get_model().all_non_deleted_vm_ids()
|
db_ids = get_model().all_non_deleted_vm_ids()
|
||||||
virt_ids = current_app.config["OPERATION_MODEL"].list_ids()
|
virt_ids = current_app.config["HUB_MODEL"].list_ids()
|
||||||
|
|
||||||
db_ids_dict = dict()
|
db_ids_dict = dict()
|
||||||
virt_ids_dict = dict()
|
virt_ids_dict = dict()
|
||||||
|
@ -27,7 +27,7 @@ def makeCapsulId():
|
|||||||
|
|
||||||
def double_check_capsul_address(id, ipv4):
|
def double_check_capsul_address(id, ipv4):
|
||||||
try:
|
try:
|
||||||
result = current_app.config["OPERATION_MODEL"].get(id)
|
result = current_app.config["HUB_MODEL"].get(id)
|
||||||
if result.ipv4 != ipv4:
|
if result.ipv4 != ipv4:
|
||||||
ipv4 = result.ipv4
|
ipv4 = result.ipv4
|
||||||
get_model().update_vm_ip(email=session["account"], id=id, ipv4=result.ipv4)
|
get_model().update_vm_ip(email=session["account"], id=id, ipv4=result.ipv4)
|
||||||
@ -98,7 +98,7 @@ def detail(id):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
current_app.logger.info(f"deleting {vm['id']} per user request ({session['account']})")
|
current_app.logger.info(f"deleting {vm['id']} per user request ({session['account']})")
|
||||||
current_app.config["OPERATION_MODEL"].destroy(email=session['account'], id=id)
|
current_app.config["HUB_MODEL"].destroy(email=session['account'], id=id)
|
||||||
get_model().delete_vm(email=session['account'], id=id)
|
get_model().delete_vm(email=session['account'], id=id)
|
||||||
|
|
||||||
return render_template("capsul-detail.html", vm=vm, delete=True, deleted=True)
|
return render_template("capsul-detail.html", vm=vm, delete=True, deleted=True)
|
||||||
@ -125,7 +125,7 @@ def create():
|
|||||||
operating_systems = get_model().operating_systems_dict()
|
operating_systems = get_model().operating_systems_dict()
|
||||||
ssh_public_keys = get_model().list_ssh_public_keys_for_account(session["account"])
|
ssh_public_keys = get_model().list_ssh_public_keys_for_account(session["account"])
|
||||||
account_balance = get_account_balance(get_vms(), get_payments(), datetime.utcnow())
|
account_balance = get_account_balance(get_vms(), get_payments(), datetime.utcnow())
|
||||||
capacity_avaliable = current_app.config["OPERATION_MODEL"].capacity_avaliable(512*1024*1024)
|
capacity_avaliable = current_app.config["HUB_MODEL"].capacity_avaliable(512*1024*1024)
|
||||||
errors = list()
|
errors = list()
|
||||||
|
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
@ -165,7 +165,7 @@ def create():
|
|||||||
if len(posted_keys) == 0:
|
if len(posted_keys) == 0:
|
||||||
errors.append("At least one SSH Public Key is required")
|
errors.append("At least one SSH Public Key is required")
|
||||||
|
|
||||||
capacity_avaliable = current_app.config["OPERATION_MODEL"].capacity_avaliable(vm_sizes[size]['memory_mb']*1024*1024)
|
capacity_avaliable = current_app.config["HUB_MODEL"].capacity_avaliable(vm_sizes[size]['memory_mb']*1024*1024)
|
||||||
|
|
||||||
if not capacity_avaliable:
|
if not capacity_avaliable:
|
||||||
errors.append("""
|
errors.append("""
|
||||||
@ -181,7 +181,7 @@ def create():
|
|||||||
os=os,
|
os=os,
|
||||||
ssh_public_keys=list(map(lambda x: x["name"], posted_keys))
|
ssh_public_keys=list(map(lambda x: x["name"], posted_keys))
|
||||||
)
|
)
|
||||||
current_app.config["OPERATION_MODEL"].create(
|
current_app.config["HUB_MODEL"].create(
|
||||||
email = session["account"],
|
email = session["account"],
|
||||||
id=id,
|
id=id,
|
||||||
template_image_file_name=operating_systems[os]['template_image_file_name'],
|
template_image_file_name=operating_systems[os]['template_image_file_name'],
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
from nanoid import generate
|
from nanoid import generate
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from typing import List
|
from typing import List
|
||||||
|
from capsulflask.hub_model import HTTPResult
|
||||||
|
|
||||||
class OnlineHost:
|
class OnlineHost:
|
||||||
def __init__(self, id: str, url: str):
|
def __init__(self, id: str, url: str):
|
||||||
@ -284,7 +285,7 @@ class DBModel:
|
|||||||
self.cursor.execute("SELECT id, https_url FROM hosts WHERE last_health_check > NOW() - INTERVAL '10 seconds'")
|
self.cursor.execute("SELECT id, https_url FROM hosts WHERE last_health_check > NOW() - INTERVAL '10 seconds'")
|
||||||
return list(map(lambda x: OnlineHost(id=x[0], url=x[1]), self.cursor.fetchall()))
|
return list(map(lambda x: OnlineHost(id=x[0], url=x[1]), self.cursor.fetchall()))
|
||||||
|
|
||||||
def create_operation(self, online_hosts: List[OnlineHost], email: str, payload: str) -> None:
|
def create_operation(self, online_hosts: List[OnlineHost], email: str, payload: str) -> int:
|
||||||
|
|
||||||
self.cursor.execute( "INSERT INTO operations (email, payload) VALUES (%s, %s) RETURNING id", (email, payload) )
|
self.cursor.execute( "INSERT INTO operations (email, payload) VALUES (%s, %s) RETURNING id", (email, payload) )
|
||||||
operation_id = self.cursor.fetchone()[0]
|
operation_id = self.cursor.fetchone()[0]
|
||||||
@ -293,6 +294,22 @@ class DBModel:
|
|||||||
self.cursor.execute( "INSERT INTO host_operation (host, operation) VALUES (%s, %s)", (host.id, operation_id) )
|
self.cursor.execute( "INSERT INTO host_operation (host, operation) VALUES (%s, %s)", (host.id, operation_id) )
|
||||||
|
|
||||||
self.connection.commit()
|
self.connection.commit()
|
||||||
|
return operation_id
|
||||||
|
|
||||||
|
def update_host_operation(self, host_id: str, operation_id: int, assignment_status: str):
|
||||||
|
self.cursor.execute(
|
||||||
|
"UPDATE host_operation SET assignment_status = %s WHERE host = %s AND operation = %s",
|
||||||
|
(assignment_status, host_id, operation_id)
|
||||||
|
)
|
||||||
|
self.connection.commit()
|
||||||
|
|
||||||
|
def host_of_capsul(self, capsul_id: str):
|
||||||
|
self.cursor.execute("SELECT host from vms where id = %s", (capsul_id,))
|
||||||
|
row = self.cursor.fetchone()
|
||||||
|
if row:
|
||||||
|
return row[0]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
22
capsulflask/hub_api.py
Normal file
22
capsulflask/hub_api.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
|
||||||
|
from flask import Blueprint
|
||||||
|
from flask import current_app
|
||||||
|
from flask import request
|
||||||
|
from werkzeug.exceptions import abort
|
||||||
|
|
||||||
|
from capsulflask.db import get_model, my_exec_info_message
|
||||||
|
|
||||||
|
bp = Blueprint("hosts", __name__, url_prefix="/hosts")
|
||||||
|
|
||||||
|
def authorized_for_host(id):
|
||||||
|
auth_header_value = request.headers.get('Authorization').replace("Bearer ", "")
|
||||||
|
return get_model().authorized_for_host(id, auth_header_value)
|
||||||
|
|
||||||
|
@bp.route("/heartbeat/<string:id>", methods=("POST"))
|
||||||
|
def heartbeat(id):
|
||||||
|
if authorized_for_host(id):
|
||||||
|
get_model().host_heartbeat(id)
|
||||||
|
else:
|
||||||
|
current_app.logger.info(f"/hosts/heartbeat/{id} returned 401: invalid token")
|
||||||
|
return abort(401, "invalid host id or token")
|
||||||
|
|
282
capsulflask/hub_model.py
Normal file
282
capsulflask/hub_model.py
Normal file
@ -0,0 +1,282 @@
|
|||||||
|
import subprocess
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import asyncio
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from flask import current_app
|
||||||
|
from time import sleep
|
||||||
|
from os.path import join
|
||||||
|
from subprocess import run
|
||||||
|
|
||||||
|
from capsulflask.db_model import OnlineHost
|
||||||
|
from capsulflask.spoke_model import VirtualMachine
|
||||||
|
from capsulflask.spoke_model import validate_capsul_id
|
||||||
|
from capsulflask.db import get_model, my_exec_info_message
|
||||||
|
|
||||||
|
class HTTPResult:
|
||||||
|
def __init__(self, status_code, body=None):
|
||||||
|
self.status_code = status_code
|
||||||
|
self.body = body
|
||||||
|
|
||||||
|
class HubInterface:
|
||||||
|
def capacity_avaliable(self, additional_ram_bytes: int) -> bool:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get(self, id: str) -> VirtualMachine:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def list_ids(self) -> list:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def create(self, email: str, id: str, template_image_file_name: str, vcpus: int, memory: int, ssh_public_keys: list):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self, email: str, id: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class MockHub(HubInterface):
|
||||||
|
def capacity_avaliable(self, additional_ram_bytes):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get(self, id):
|
||||||
|
validate_capsul_id(id)
|
||||||
|
return VirtualMachine(id, current_app.config["SPOKE_HOST_ID"], ipv4="1.1.1.1")
|
||||||
|
|
||||||
|
def list_ids(self) -> list:
|
||||||
|
return get_model().all_non_deleted_vm_ids()
|
||||||
|
|
||||||
|
def create(self, email: str, id: str, template_image_file_name: str, vcpus: int, memory_mb: int, ssh_public_keys: list):
|
||||||
|
validate_capsul_id(id)
|
||||||
|
current_app.logger.info(f"mock create: {id} for {email}")
|
||||||
|
sleep(1)
|
||||||
|
|
||||||
|
def destroy(self, email: str, id: str):
|
||||||
|
current_app.logger.info(f"mock destroy: {id} for {email}")
|
||||||
|
|
||||||
|
|
||||||
|
class CapsulFlaskHub(HubInterface):
|
||||||
|
|
||||||
|
|
||||||
|
async def post_json(self, method: str, url: str, body: str, session: aiohttp.ClientSession) -> HTTPResult:
|
||||||
|
response = None
|
||||||
|
try:
|
||||||
|
response = await session.request(
|
||||||
|
method=method,
|
||||||
|
url=url,
|
||||||
|
json=body,
|
||||||
|
auth=aiohttp.BasicAuth("hub", current_app.config['HUB_TOKEN']),
|
||||||
|
verify_ssl=True,
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
error_message = my_exec_info_message(sys.exc_info())
|
||||||
|
response_body = json.dumps({"error_message": f"error contacting spoke: {error_message}"})
|
||||||
|
current_app.logger.error(f"""
|
||||||
|
error contacting spoke: post_json (HTTP {method} {url}) failed with: {error_message}"""
|
||||||
|
)
|
||||||
|
return HTTPResult(-1, response_body)
|
||||||
|
|
||||||
|
response_body = None
|
||||||
|
try:
|
||||||
|
response_body = await response.text()
|
||||||
|
except:
|
||||||
|
error_message = my_exec_info_message(sys.exc_info())
|
||||||
|
response_body = json.dumps({"error_message": f"error reading response from spoke: {error_message}"})
|
||||||
|
current_app.logger.error(f"""
|
||||||
|
error reading response from spoke: HTTP {method} {url} (status {response.status}) failed with: {error_message}"""
|
||||||
|
)
|
||||||
|
|
||||||
|
return HTTPResult(response.status, response_body)
|
||||||
|
|
||||||
|
async def make_requests(self, online_hosts: List[OnlineHost], body: str) -> List(HTTPResult):
|
||||||
|
timeout_seconds = 5
|
||||||
|
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=timeout_seconds)) as session:
|
||||||
|
tasks = []
|
||||||
|
# append to tasks in the same order as online_hosts
|
||||||
|
for host in online_hosts:
|
||||||
|
tasks.append(
|
||||||
|
self.post_json(method="POST", url=host.url, body=body, session=session)
|
||||||
|
)
|
||||||
|
# gather is like Promise.all from javascript, it returns a future which resolves to an array of results
|
||||||
|
# in the same order as the tasks that we passed in -- which were in the same order as online_hosts
|
||||||
|
results = await asyncio.gather(*tasks)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
async def generic_operation(self, hosts: List[OnlineHost], payload: str, immediate_mode: bool) -> Tuple[int, List[HTTPResult]]:
|
||||||
|
operation_id = get_model().create_operation(hosts, payload)
|
||||||
|
results = await self.make_requests(hosts, payload)
|
||||||
|
for i in range(len(hosts)):
|
||||||
|
host = hosts[i]
|
||||||
|
result = results[i]
|
||||||
|
task_result = None
|
||||||
|
assignment_status = "pending"
|
||||||
|
if result.status_code == -1:
|
||||||
|
assignment_status = "no_response_from_host"
|
||||||
|
if result.status_code != 200:
|
||||||
|
assignment_status = "error_response_from_host"
|
||||||
|
else:
|
||||||
|
valid_statuses = {
|
||||||
|
"assigned": True,
|
||||||
|
"not_applicable": True,
|
||||||
|
"assigned_to_other_host": True,
|
||||||
|
}
|
||||||
|
result_is_json = False
|
||||||
|
result_is_dict = False
|
||||||
|
result_has_status = False
|
||||||
|
result_has_valid_status = False
|
||||||
|
assignment_status = "invalid_response_from_host"
|
||||||
|
try:
|
||||||
|
if immediate_mode:
|
||||||
|
task_result = result.body
|
||||||
|
result_body = json.loads(result.body)
|
||||||
|
result_is_json = True
|
||||||
|
result_is_dict = isinstance(result_body, dict)
|
||||||
|
result_has_status = result_is_dict and 'assignment_status' in result_body
|
||||||
|
result_has_valid_status = result_has_status and result_body['assignment_status'] in valid_statuses
|
||||||
|
if result_has_valid_status:
|
||||||
|
assignment_status = result_body['assignment_status']
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not result_has_valid_status:
|
||||||
|
current_app.logger.error(f"""error reading assignment_status for operation {operation_id} from host {host.id}:
|
||||||
|
result_is_json: {result_is_json}
|
||||||
|
result_is_dict: {result_is_dict}
|
||||||
|
result_has_status: {result_has_status}
|
||||||
|
result_has_valid_status: {result_has_valid_status}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
get_model().update_host_operation(host.id, operation_id, assignment_status, task_result)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
async def capacity_avaliable(self, additional_ram_bytes):
|
||||||
|
online_hosts = get_model().get_online_hosts()
|
||||||
|
payload = json.dumps(dict(type="capacity_avaliable", additional_ram_bytes=additional_ram_bytes))
|
||||||
|
op = await self.generic_operation(online_hosts, payload, True)
|
||||||
|
results = op[1]
|
||||||
|
for result in results:
|
||||||
|
try:
|
||||||
|
result_body = json.loads(result.body)
|
||||||
|
if isinstance(result_body, dict) and 'capacity_avaliable' in result_body and result_body['capacity_avaliable'] == True:
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
async def get(self, id) -> VirtualMachine:
|
||||||
|
validate_capsul_id(id)
|
||||||
|
host = get_model().host_of_capsul(id)
|
||||||
|
if host is not None:
|
||||||
|
payload = json.dumps(dict(type="get", id=id))
|
||||||
|
op = await self.generic_operation([host], payload, True)
|
||||||
|
results = op[1]
|
||||||
|
for result in results:
|
||||||
|
try:
|
||||||
|
result_body = json.loads(result.body)
|
||||||
|
if isinstance(result_body, dict) and ('ipv4' in result_body or 'ipv6' in result_body):
|
||||||
|
return VirtualMachine(id, host=host, ipv4=result_body['ipv4'], ipv6=result_body['ipv6'])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def list_ids(self) -> list:
|
||||||
|
online_hosts = get_model().get_online_hosts()
|
||||||
|
payload = json.dumps(dict(type="list_ids"))
|
||||||
|
op = await self.generic_operation(online_hosts, payload, False)
|
||||||
|
operation_id = op[0]
|
||||||
|
results = op[1]
|
||||||
|
to_return = []
|
||||||
|
for i in range(len(results)):
|
||||||
|
host = online_hosts[i]
|
||||||
|
result = results[i]
|
||||||
|
try:
|
||||||
|
result_body = json.loads(result.body)
|
||||||
|
if isinstance(result_body, dict) and 'ids' in result_body and isinstance(result_body['ids'], list):
|
||||||
|
all_valid = True
|
||||||
|
for id in result_body['ids']:
|
||||||
|
try:
|
||||||
|
validate_capsul_id(id)
|
||||||
|
to_return.append(id)
|
||||||
|
except:
|
||||||
|
all_valid = False
|
||||||
|
if all_valid:
|
||||||
|
get_model().update_host_operation(host.id, operation_id, None, result.body)
|
||||||
|
else:
|
||||||
|
result_json_string = json.dumps({"error_message": "invalid capsul id returned"})
|
||||||
|
get_model().update_host_operation(host.id, operation_id, None, result_json_string)
|
||||||
|
current_app.logger.error(f"""error reading ids for list_ids operation {operation_id}, host {host.id}""")
|
||||||
|
else:
|
||||||
|
result_json_string = json.dumps({"error_message": "invalid response, missing 'ids' list"})
|
||||||
|
get_model().update_host_operation(host.id, operation_id, "invalid_response_from_host", result_json_string)
|
||||||
|
current_app.logger.error(f"""missing 'ids' list for list_ids operation {operation_id}, host {host.id}""")
|
||||||
|
except:
|
||||||
|
# no need to do anything here since if it cant be parsed then generic_operation will handle it.
|
||||||
|
pass
|
||||||
|
|
||||||
|
return to_return
|
||||||
|
|
||||||
|
def create(self, email: str, id: str, template_image_file_name: str, vcpus: int, memory_mb: int, ssh_public_keys: list):
|
||||||
|
validate_capsul_id(id)
|
||||||
|
online_hosts = get_model().get_online_hosts()
|
||||||
|
payload = json.dumps(dict(
|
||||||
|
type="create",
|
||||||
|
email=email,
|
||||||
|
id=id,
|
||||||
|
template_image_file_name=template_image_file_name,
|
||||||
|
vcpus=vcpus,
|
||||||
|
memory_mb=memory_mb,
|
||||||
|
ssh_public_keys=ssh_public_keys,
|
||||||
|
))
|
||||||
|
op = await self.generic_operation(online_hosts, payload, False)
|
||||||
|
operation_id = op[0]
|
||||||
|
results = op[1]
|
||||||
|
number_of_assigned = 0
|
||||||
|
assigned_hosts = []
|
||||||
|
for i in range(len(results)):
|
||||||
|
host = online_hosts[i]
|
||||||
|
result = results[i]
|
||||||
|
try:
|
||||||
|
result_body = json.loads(result.body)
|
||||||
|
if isinstance(result_body, dict) and 'assignment_status' in result_body and result_body['assignment_status'] == "assigned":
|
||||||
|
number_of_assigned += 1
|
||||||
|
assigned_hosts.append(host.id)
|
||||||
|
except:
|
||||||
|
# no need to do anything here since if it cant be parsed then generic_operation will handle it.
|
||||||
|
pass
|
||||||
|
|
||||||
|
if number_of_assigned != 1:
|
||||||
|
assigned_hosts_string = ", ".join(assigned_hosts)
|
||||||
|
raise ValueError(f"expected create capsul operation {operation_id} to be assigned to one host, it was assigned to {number_of_assigned} ({assigned_hosts_string})")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def destroy(self, email: str, id: str):
|
||||||
|
validate_capsul_id(id)
|
||||||
|
result_status = None
|
||||||
|
host = get_model().host_of_capsul(id)
|
||||||
|
if host is not None:
|
||||||
|
payload = json.dumps(dict(type="destroy", id=id))
|
||||||
|
op = await self.generic_operation([host], payload, True)
|
||||||
|
results = op[1]
|
||||||
|
result_json_string = "<no response from host>"
|
||||||
|
for result in results:
|
||||||
|
try:
|
||||||
|
result_json_string = result.body
|
||||||
|
result_body = json.loads(result_json_string)
|
||||||
|
if isinstance(result_body, dict) and 'status' in result_body:
|
||||||
|
result_status = result_body['status']
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not result_status == "success":
|
||||||
|
raise ValueError(f"""failed to destroy vm "{id}" on host "{host}" for {email}: {result_json_string}""")
|
22
capsulflask/spoke_api.py
Normal file
22
capsulflask/spoke_api.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
|
||||||
|
from flask import Blueprint
|
||||||
|
from flask import current_app
|
||||||
|
from flask import request
|
||||||
|
from werkzeug.exceptions import abort
|
||||||
|
|
||||||
|
from capsulflask.db import get_model, my_exec_info_message
|
||||||
|
|
||||||
|
bp = Blueprint("hosts", __name__, url_prefix="/hosts")
|
||||||
|
|
||||||
|
def authorized_for_host(id):
|
||||||
|
auth_header_value = request.headers.get('Authorization').replace("Bearer ", "")
|
||||||
|
return get_model().authorized_for_host(id, auth_header_value)
|
||||||
|
|
||||||
|
@bp.route("/heartbeat/<string:id>", methods=("POST"))
|
||||||
|
def heartbeat(id):
|
||||||
|
if authorized_for_host(id):
|
||||||
|
get_model().host_heartbeat(id)
|
||||||
|
else:
|
||||||
|
current_app.logger.info(f"/hosts/heartbeat/{id} returned 401: invalid token")
|
||||||
|
return abort(401, "invalid host id or token")
|
||||||
|
|
173
capsulflask/spoke_model.py
Normal file
173
capsulflask/spoke_model.py
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
import subprocess
|
||||||
|
import re
|
||||||
|
|
||||||
|
from flask import current_app
|
||||||
|
from time import sleep
|
||||||
|
from os.path import join
|
||||||
|
from subprocess import run
|
||||||
|
|
||||||
|
from capsulflask.db import get_model
|
||||||
|
|
||||||
|
def validate_capsul_id(id):
|
||||||
|
if not re.match(r"^(cvm|capsul)-[a-z0-9]{10}$", id):
|
||||||
|
raise ValueError(f"vm id \"{id}\" must match \"^capsul-[a-z0-9]{{10}}$\"")
|
||||||
|
|
||||||
|
class VirtualMachine:
|
||||||
|
def __init__(self, id, host, ipv4=None, ipv6=None):
|
||||||
|
self.id = id
|
||||||
|
self.host = host
|
||||||
|
self.ipv4 = ipv4
|
||||||
|
self.ipv6 = ipv6
|
||||||
|
|
||||||
|
class SpokeInterface:
|
||||||
|
def capacity_avaliable(self, additional_ram_bytes: int) -> bool:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get(self, id: str) -> VirtualMachine:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def list_ids(self) -> list:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def create(self, email: str, id: str, template_image_file_name: str, vcpus: int, memory: int, ssh_public_keys: list):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self, email: str, id: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class MockSpoke(SpokeInterface):
|
||||||
|
def capacity_avaliable(self, additional_ram_bytes):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get(self, id):
|
||||||
|
validate_capsul_id(id)
|
||||||
|
return VirtualMachine(id, current_app.config["SPOKE_HOST_ID"], ipv4="1.1.1.1")
|
||||||
|
|
||||||
|
def list_ids(self) -> list:
|
||||||
|
return get_model().all_non_deleted_vm_ids()
|
||||||
|
|
||||||
|
def create(self, email: str, id: str, template_image_file_name: str, vcpus: int, memory_mb: int, ssh_public_keys: list):
|
||||||
|
validate_capsul_id(id)
|
||||||
|
current_app.logger.info(f"mock create: {id} for {email}")
|
||||||
|
sleep(1)
|
||||||
|
|
||||||
|
def destroy(self, email: str, id: str):
|
||||||
|
current_app.logger.info(f"mock destroy: {id} for {email}")
|
||||||
|
|
||||||
|
|
||||||
|
class ShellScriptSpoke(SpokeInterface):
|
||||||
|
|
||||||
|
def validate_completed_process(self, completedProcess, email=None):
|
||||||
|
emailPart = ""
|
||||||
|
if email != None:
|
||||||
|
emailPart = f"for {email}"
|
||||||
|
|
||||||
|
if completedProcess.returncode != 0:
|
||||||
|
raise RuntimeError(f"""{" ".join(completedProcess.args)} failed {emailPart} with exit code {completedProcess.returncode}
|
||||||
|
stdout:
|
||||||
|
{completedProcess.stdout}
|
||||||
|
stderr:
|
||||||
|
{completedProcess.stderr}
|
||||||
|
""")
|
||||||
|
|
||||||
|
def capacity_avaliable(self, additional_ram_bytes):
|
||||||
|
my_args=[join(current_app.root_path, 'shell_scripts/capacity-avaliable.sh'), str(additional_ram_bytes)]
|
||||||
|
completedProcess = run(my_args, capture_output=True)
|
||||||
|
|
||||||
|
if completedProcess.returncode != 0:
|
||||||
|
current_app.logger.error(f"""
|
||||||
|
capacity-avaliable.sh exited {completedProcess.returncode} with
|
||||||
|
stdout:
|
||||||
|
{completedProcess.stdout}
|
||||||
|
stderr:
|
||||||
|
{completedProcess.stderr}
|
||||||
|
""")
|
||||||
|
return False
|
||||||
|
|
||||||
|
lines = completedProcess.stdout.splitlines()
|
||||||
|
output = lines[len(lines)-1]
|
||||||
|
if not output == b"yes":
|
||||||
|
current_app.logger.error(f"capacity-avaliable.sh exited 0 and returned {output} but did not return \"yes\" ")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get(self, id):
|
||||||
|
validate_capsul_id(id)
|
||||||
|
completedProcess = run([join(current_app.root_path, 'shell_scripts/get.sh'), id], capture_output=True)
|
||||||
|
self.validate_completed_process(completedProcess)
|
||||||
|
lines = completedProcess.stdout.splitlines()
|
||||||
|
ipaddr = lines[0].decode("utf-8")
|
||||||
|
|
||||||
|
if not re.match(r"^([0-9]{1,3}\.){3}[0-9]{1,3}$", ipaddr):
|
||||||
|
return None
|
||||||
|
|
||||||
|
return VirtualMachine(id, current_app.config["SPOKE_HOST_ID"], ipv4=ipaddr)
|
||||||
|
|
||||||
|
def list_ids(self) -> list:
|
||||||
|
completedProcess = run([join(current_app.root_path, 'shell_scripts/list-ids.sh')], capture_output=True)
|
||||||
|
self.validate_completed_process(completedProcess)
|
||||||
|
return list(map(lambda x: x.decode("utf-8"), completedProcess.stdout.splitlines() ))
|
||||||
|
|
||||||
|
def create(self, email: str, id: str, template_image_file_name: str, vcpus: int, memory_mb: int, ssh_public_keys: list):
|
||||||
|
validate_capsul_id(id)
|
||||||
|
|
||||||
|
if not re.match(r"^[a-zA-Z0-9/_.-]+$", template_image_file_name):
|
||||||
|
raise ValueError(f"template_image_file_name \"{template_image_file_name}\" must match \"^[a-zA-Z0-9/_.-]+$\"")
|
||||||
|
|
||||||
|
for ssh_public_key in ssh_public_keys:
|
||||||
|
if not re.match(r"^(ssh|ecdsa)-[0-9A-Za-z+/_=@. -]+$", ssh_public_key):
|
||||||
|
raise ValueError(f"ssh_public_key \"{ssh_public_key}\" must match \"^(ssh|ecdsa)-[0-9A-Za-z+/_=@. -]+$\"")
|
||||||
|
|
||||||
|
if vcpus < 1 or vcpus > 8:
|
||||||
|
raise ValueError(f"vcpus \"{vcpus}\" must match 1 <= vcpus <= 8")
|
||||||
|
|
||||||
|
if memory_mb < 512 or memory_mb > 16384:
|
||||||
|
raise ValueError(f"memory_mb \"{memory_mb}\" must match 512 <= memory_mb <= 16384")
|
||||||
|
|
||||||
|
ssh_keys_string = "\n".join(ssh_public_keys)
|
||||||
|
|
||||||
|
completedProcess = run([
|
||||||
|
join(current_app.root_path, 'shell_scripts/create.sh'),
|
||||||
|
id,
|
||||||
|
template_image_file_name,
|
||||||
|
str(vcpus),
|
||||||
|
str(memory_mb),
|
||||||
|
ssh_keys_string
|
||||||
|
], capture_output=True)
|
||||||
|
|
||||||
|
self.validate_completed_process(completedProcess, email)
|
||||||
|
lines = completedProcess.stdout.splitlines()
|
||||||
|
status = lines[len(lines)-1].decode("utf-8")
|
||||||
|
|
||||||
|
vmSettings = f"""
|
||||||
|
id={id}
|
||||||
|
template_image_file_name={template_image_file_name}
|
||||||
|
vcpus={str(vcpus)}
|
||||||
|
memory={str(memory_mb)}
|
||||||
|
ssh_public_keys={ssh_keys_string}
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not status == "success":
|
||||||
|
raise ValueError(f"""failed to create vm for {email} with:
|
||||||
|
{vmSettings}
|
||||||
|
stdout:
|
||||||
|
{completedProcess.stdout}
|
||||||
|
stderr:
|
||||||
|
{completedProcess.stderr}
|
||||||
|
""")
|
||||||
|
|
||||||
|
def destroy(self, email: str, id: str):
|
||||||
|
validate_capsul_id(id)
|
||||||
|
completedProcess = run([join(current_app.root_path, 'shell_scripts/destroy.sh'), id], capture_output=True)
|
||||||
|
self.validate_completed_process(completedProcess, email)
|
||||||
|
lines = completedProcess.stdout.splitlines()
|
||||||
|
status = lines[len(lines)-1].decode("utf-8")
|
||||||
|
|
||||||
|
if not status == "success":
|
||||||
|
raise ValueError(f"""failed to destroy vm "{id}" for {email}:
|
||||||
|
stdout:
|
||||||
|
{completedProcess.stdout}
|
||||||
|
stderr:
|
||||||
|
{completedProcess.stderr}
|
||||||
|
""")
|
Loading…
Reference in New Issue
Block a user