Compare commits

..

14 Commits

Author SHA1 Message Date
forest 6d52f56d27 documenting the janky tests logs situation
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/pr Build is failing Details
2021-07-27 14:49:10 -05:00
forest f5640a1d01 getting unit tests to log properly
continuous-integration/drone/push Build is passing Details
2021-07-27 14:44:10 -05:00
forest 45d7e2c62e breaking up after abusive relationship with logger 2021-07-27 14:28:42 -05:00
forest 56b00934be trying to do CaptureLogOutputDuringTestsFilter (but no worky yet)
continuous-integration/drone/pr Build is failing Details
continuous-integration/drone/push Build is passing Details
2021-07-27 13:56:58 -05:00
forest aa67a1e1b2 blah failed attempts at getting tests to log 2021-07-27 13:28:49 -05:00
forest 3fb8254c15 add debug test log
continuous-integration/drone/push Build is passing Details
2021-07-27 12:50:50 -05:00
forest 8a4794a344 trying to get tests to pass with hub_model=capsulflask
continuous-integration/drone/push Build is passing Details
2021-07-27 12:48:48 -05:00
forest 4cf11798aa remove redundant get_vms() and add testing documentation from pull
continuous-integration/drone/pr Build is failing Details
continuous-integration/drone/push Build is passing Details
request
2021-07-27 12:02:48 -05:00
3wc e1867eb430 Fix capsul create tests, post-test cleanup, tidy merge
continuous-integration/drone/pr Build is failing Details
continuous-integration/drone/push Build is passing Details
2021-07-23 13:40:00 +02:00
forest 62c7355b4c ensure that app is defined in app.py to fix login link logging issues.
continuous-integration/drone/pr Build is failing Details
continuous-integration/drone/push Build is passing Details
also shuffled some things around for cleanliness
2021-07-23 03:02:17 +02:00
3wc 202d0aefff Disable VM creation check for the moment 2021-07-23 03:02:17 +02:00
3wc 01478dfd87 Add SSH key tests 2021-07-23 03:02:17 +02:00
3wc a4837aff87 Initial console tests
NB capsul create isn't working properly, see #83
2021-07-23 03:02:17 +02:00
3wc ecc8f885fa Basic testing using flask-testing
This commit makes it possible to override settings during tests, by
switching capsulflask/__init__.py to a "create_app" pattern, and using
`dotenv_values` instead of `load_dotenv`.

The create_app() method returns a Flask app instance, to give
more control over when to initialise the app. This allows setting
environment variables in test files.

Then, use dotenv_values to override loaded .env variables with ones from
the environment, so that tests can set `POSTGRES_CONNECTION_PARAMETERS`
and `SPOKE_MODEL` (possibly others in future..).

Inital tests for the "landing" pages, and login / activation, are
included.
2021-07-23 03:02:15 +02:00
23 changed files with 62 additions and 185 deletions

View File

@ -1,11 +0,0 @@
# Optional, default `mock`
#SPOKE_MODEL=shell-scripts
# Optional, default `0`
#FLASK_DEBUG=0
# Optional, default `http://localhost:5000`
#BASE_URL=http://localhost:5000
# Optional, default `qemu:///system` if you're root, otherwise `qemu:///session`
#VIRSH_DEFAULT_CONNECT_URI=qemu:///system
#ADMIN_PANEL_ALLOW_EMAIL_ADDRESSES=3wc.capsul@doesthisthing.work
# Optional, default no theme
#THEME=yolocolo

1
.gitignore vendored
View File

@ -11,7 +11,6 @@ instance/
.pytest_cache/
.coverage
htmlcov/
/unittest-log-output.log
dist/
build/

3
app.py
View File

@ -1,5 +1,4 @@
from capsulflask import create_app
from capsulflask.http_client import MyHTTPClient
app = create_app(lambda timeout_seconds: MyHTTPClient(timeout_seconds=timeout_seconds))
app = create_app()

View File

@ -21,9 +21,11 @@ from apscheduler.schedulers.background import BackgroundScheduler
from capsulflask.shared import *
from capsulflask import hub_model, spoke_model, cli
from capsulflask.btcpay import client as btcpay
from capsulflask.http_client import MyHTTPClient
def create_app(http_client_factory):
def create_app():
for var_name in [
"SPOKE_HOST_TOKEN", "HUB_TOKEN", "STRIPE_SECRET_KEY",
"BTCPAY_PRIVATE_KEY", "MAIL_PASSWORD"
@ -59,7 +61,6 @@ def create_app(http_client_factory):
LOG_LEVEL=config.get("LOG_LEVEL", "INFO"),
SPOKE_HOST_ID=config.get("SPOKE_HOST_ID", "baikal"),
SPOKE_HOST_TOKEN=config.get("SPOKE_HOST_TOKEN", "changeme"),
SSH_USERNAME=os.environ.get("SSH_USERNAME", default="cyberian"),
HUB_TOKEN=config.get("HUB_TOKEN", "changeme"),
# https://www.postgresql.org/docs/9.1/libpq-ssl.html#LIBPQ-SSL-SSLMODE-STATEMENTS
@ -132,10 +133,7 @@ def create_app(http_client_factory):
mylog_warning(app, "No MAIL_SERVER configured. capsul will simply print emails to stdout.")
app.config['FLASK_MAIL_INSTANCE'] = StdoutMockFlaskMail()
# allow a mock http client to be injected by the test code.
app.config['HTTP_CLIENT'] = http_client_factory(int(app.config['INTERNAL_HTTP_TIMEOUT_SECONDS']))
app.config['HTTP_CLIENT'] = MyHTTPClient(timeout_seconds=int(app.config['INTERNAL_HTTP_TIMEOUT_SECONDS']))
app.config['BTCPAY_ENABLED'] = False
if app.config['BTCPAY_URL'] != "":
@ -162,7 +160,7 @@ def create_app(http_client_factory):
# debug mode (flask reloader) runs two copies of the app. When running in debug mode,
# we only want to start the scheduler one time.
if is_running_server and not app.config['TESTING'] and (not app.debug or config.get('WERKZEUG_RUN_MAIN') == 'true'):
if is_running_server and (not app.debug or config.get('WERKZEUG_RUN_MAIN') == 'true'):
scheduler = BackgroundScheduler()
heartbeat_task_url = f"{app.config['HUB_URL']}/hub/heartbeat-task"
heartbeat_task_headers = {'Authorization': f"Bearer {app.config['HUB_TOKEN']}"}

View File

@ -62,18 +62,6 @@ def sql_script(f, c):
model.connection.commit()
@bp.cli.command('account-balance')
@click.option('-u', help='users email address')
@with_appcontext
def account_balance(u):
vms = get_model().list_vms_for_account(u)
payments = get_model().list_payments_for_account(u)
click.echo(".")
click.echo(".")
click.echo(get_account_balance(vms, payments, datetime.utcnow()))
click.echo(".")
@bp.cli.command('cron-task')
@with_appcontext

View File

@ -108,8 +108,6 @@ def detail(id):
if vm is None:
return abort(404, f"{id} doesn't exist.")
vm['ssh_username'] = current_app.config['SSH_USERNAME']
if vm['deleted']:
return render_template("capsul-detail.html", vm=vm, delete=True, deleted=True)
@ -195,6 +193,13 @@ def detail(id):
@account_required
def create():
#raise "console.create()!"
# file_object = open('unittest-output.log', 'a')
# file_object.write("console.create()!\n")
# file_object.close()
mylog_error(current_app, "console.create()!")
vm_sizes = get_model().vm_sizes_dict()
operating_systems = get_model().operating_systems_dict()
public_keys_for_account = get_model().list_ssh_public_keys_for_account(session["account"])

View File

@ -43,7 +43,7 @@ def init_app(app, is_running_server):
hasSchemaVersionTable = False
actionWasTaken = False
schemaVersion = 0
desiredSchemaVersion = 19
desiredSchemaVersion = 18
cursor = connection.cursor()

View File

@ -196,6 +196,7 @@ class CapsulFlaskHub(VirtualizationInterface):
def create(self, email: str, id: str, os: str, size: str, template_image_file_name: str, vcpus: int, memory_mb: int, ssh_authorized_keys: list):
validate_capsul_id(id)
online_hosts = get_model().get_online_hosts()
mylog_debug(current_app, f"hub_model.create(): ${len(online_hosts)} hosts")
payload = json.dumps(dict(
type="create",
email=email,

View File

@ -23,7 +23,7 @@ def pricing():
@bp.route("/faq")
def faq():
return render_template("faq.html", ssh_username=current_app.config['SSH_USERNAME'])
return render_template("faq.html")
@bp.route("/about-ssh")
def about_ssh():

View File

@ -1,8 +0,0 @@
DELETE FROM os_images WHERE id = 'guixsystem130';
DELETE FROM os_images WHERE id = 'archlinux';
UPDATE os_images SET deprecated = FALSE WHERE id = 'guixsystem120';
UPDATE os_images SET deprecated = FALSE WHERE id = 'centos7';
UPDATE os_images SET deprecated = FALSE WHERE id = 'centos8';
UPDATE os_images SET description = 'Ubuntu 20.04 LTS (Fossa)' WHERE id = 'ubuntu20';
UPDATE schemaversion SET version = 18;

View File

@ -1,12 +0,0 @@
INSERT INTO os_images (id, template_image_file_name, description, deprecated)
VALUES ('guixsystem130', 'guixsystem/1.3.0/root.img.qcow2', 'Guix System 1.3.0', FALSE);
INSERT INTO os_images (id, template_image_file_name, description, deprecated)
VALUES ('archlinux', 'archlinux/root.img.qcow2', 'Arch Linux', FALSE);
UPDATE os_images SET deprecated = TRUE WHERE id = 'guixsystem120';
UPDATE os_images SET deprecated = TRUE WHERE id = 'centos7';
UPDATE os_images SET deprecated = TRUE WHERE id = 'centos8';
UPDATE os_images SET description = 'Ubuntu 20.04 (Focal)' WHERE id = 'ubuntu20';
UPDATE schemaversion SET version = 19;

View File

@ -2,7 +2,6 @@ import re
from flask import current_app, Flask
from typing import List
from threading import Lock
class OnlineHost:
def __init__(self, id: str, url: str):
@ -59,10 +58,8 @@ def my_exec_info_message(exec_info):
mylog_current_test_id_container = {
'value': '',
'mutex': Lock()
}
def set_mylog_test_id(test_id):
mylog_current_test_id_container['value'] = ".".join(test_id.split(".")[-2:])
@ -70,11 +67,10 @@ def set_mylog_test_id(test_id):
def log_output_for_tests(app: Flask, message: str):
if app.config['TESTING'] and mylog_current_test_id_container['value'] != "":
mylog_current_test_id_container['mutex'].acquire()
file_object = open('unittest-log-output.log', 'a')
file_object.write(f"{mylog_current_test_id_container['value']}: {message}\n")
file_object.close()
mylog_current_test_id_container['mutex'].release()
def mylog_debug(app: Flask, message: str):
log_output_for_tests(app, f"DEBUG: {message}")

View File

@ -30,6 +30,6 @@ if virsh domuuid "$vmname" | grep -vqE '^[\t\s\n]*$'; then
fi
# this gets the ipv4
ipv4="$(virsh domifaddr "$vmname" | awk '/ipv4/ {print $4}' | cut -d'/' -f1)"
ipv4="$(virsh domifaddr "$vmname" | awk '/vnet/ {print $4}' | cut -d'/' -f1)"
echo "$exists $state $ipv4"

View File

@ -43,10 +43,9 @@ def operation_without_id():
def operation_impl(operation_id: int):
if authorized_as_hub(request.headers):
request_body = request.json
if not isinstance(request.json, dict) and not isinstance(request.json, list):
request_body = json.loads(request.json)
request_body_json = request.json
request_body = json.loads(request_body_json)
#mylog_info(current_app, f"request.json: {request_body}")
handlers = {
"capacity_avaliable": handle_capacity_avaliable,
"get": handle_get,

View File

@ -97,7 +97,7 @@
</div>
<div class="row justify-start">
<label class="align" for="ssh_username">SSH Username</label>
<span id="ssh_username">{{ vm['ssh_username'] }}</span>
<span id="ssh_username">cyberian</span>
</div>
<div class="row justify-start">
<label class="align" for="ssh_authorized_keys">SSH Authorized Keys</label>

View File

@ -21,13 +21,13 @@
</li>
<li>
How do I log in?
<p>ssh to the ip provided to you using the "{{ ssh_username }}" user.</p>
<pre class='code'>$ ssh {{ ssh_username }}@1.2.3.4</pre>
<p>ssh to the ip provided to you using the cyberian user.</p>
<pre class='code'>$ ssh cyberian@1.2.3.4</pre>
<p>For more information, see <a href="/about-ssh">Understanding the Secure Shell Protocol (SSH)</a>.</p>
</li>
<li>
How do I change to the root user?
<p>The "{{ ssh_username }}" user has passwordless sudo access by default. This should work:</p>
<p>The cyberian user has passwordless sudo access by default. This should work:</p>
<pre class='code'>
# Linux
$ sudo su -

View File

@ -7,11 +7,18 @@
<h1>SUPPORT</h1>
</div>
<div class="row half-margin">
<a href="mailto:support@cyberia.club?subject=capsul%20support%20request">support@cyberia.club</a>
<a href="mailto:support@cyberia.club?subject=Please%20help!">support@cyberia.club</a>
</div>
{% endblock %}
{% block subcontent %}
<p>
Note: We maintain a searchable archive of all support emails at
<a href="https://lists.cyberia.club/~cyberia/support">https://lists.cyberia.club/~cyberia/support</a>
</p>
<p>
If you do not want your mail to appear in a public archive, email <a href="mailto:capsul@cyberia.club?subject=Please%20help!">capsul@cyberia.club</a> instead.
</p>
<p>
Please describe your problem or feature request, and we will do our best to get back to you promptly. Thank you very much.
</p>

View File

@ -12,6 +12,8 @@ class LoginTests(BaseTestCase):
response = client.get(url_for("auth.login"))
self.assert_200(response)
# FIXME test generated login link
def test_login_magiclink(self):
token, ignoreCaseMatches = get_model().login('test@example.com')

View File

@ -8,7 +8,6 @@ from flask import url_for
from capsulflask.db import get_model
from capsulflask.tests_base import BaseTestCase
from capsulflask.shared import *
from capsulflask.spoke_model import MockSpoke
@ -26,27 +25,6 @@ class ConsoleTests(BaseTestCase):
"content": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDntq1t8Ddsa2q4p+PM7W4CLYYmxakokRRVLlf7AQlsTJFPsgBe9u0zuoOaKDMkBr0dlnuLm4Eub1Mj+BrdqAokto0YDiAnxUKRuYQKuHySKK8bLkisi2k47jGBDikx/jihgiuFTawo1mYsJJepC7PPwZGsoCImJEgq1L+ug0p3Zrj3QkUx4h25MpCSs2yvfgWjDyN8hEC76O42P+4ETezYrzrd1Kj26hdzHRnrxygvIUOtfau+5ydlaz8xQBEPrEY6/+pKDuwtXg1pBL7GmoUxBXVfHQSgq5s9jIJH+G0CR0ZoHMB25Ln4X/bsCQbLOu21+IGYKSDVM5TIMLtkKUkERQMVWvnpOp1LZKir4dC0m7SW74wpA8+2b1IsURIr9ARYGJpCEv1Q1Wz/X3yTf6Mfey7992MjUc9HcgjgU01/+kYomoXHprzolk+22Gjfgo3a4dRIoTY82GO8kkUKiaWHvDkkVURCY5dpteLA05sk3Z9aRMYsNXPLeOOPfzTlDA0="
}
def setUp(self):
super().setUp()
get_model().cursor.execute("DELETE FROM host_operation")
get_model().cursor.execute("DELETE FROM operations")
get_model().cursor.execute("DELETE FROM vm_ssh_host_key")
get_model().cursor.execute("DELETE FROM vm_ssh_authorized_key")
get_model().cursor.execute("DELETE FROM ssh_public_keys")
get_model().cursor.execute("DELETE FROM login_tokens")
get_model().cursor.execute("DELETE FROM vms")
get_model().cursor.execute("DELETE FROM payments")
get_model().cursor.connection.commit()
self._login('test@example.com')
get_model().create_ssh_public_key('test@example.com', 'key', 'foo')
# heartbeat all the spokes so that the hub <--> spoke communication can work as normal.
host_ids = get_model().list_hosts_with_networks(None).keys()
for host_id in host_ids:
get_model().host_heartbeat(host_id)
def test_index(self):
self._login('test@example.com')
with self.client as client:
@ -77,6 +55,7 @@ class ConsoleTests(BaseTestCase):
0
)
def test_create_fails_capacity(self):
with self.client as client:
@ -105,6 +84,10 @@ class ConsoleTests(BaseTestCase):
0
)
file_object = open('unittest-output.log', 'a')
file_object.write(f"{self.id()} captured output:\n{self.logs_from_test.getvalue()}\n")
file_object.close()
def test_create_fails_invalid(self):
with self.client as client:
client.get(url_for("console.create"))
@ -138,8 +121,9 @@ class ConsoleTests(BaseTestCase):
response = client.post(url_for("console.create"), data=data)
# mylog_info(self.app, get_model().list_all_operations())
self.assertEqual(
len(get_model().list_all_operations()),
1
@ -158,6 +142,7 @@ class ConsoleTests(BaseTestCase):
url_for("console.index") + f'?created={vm_id}'
)
def test_keys_loads(self):
self._login('test@example.com')
with self.client as client:
@ -210,3 +195,17 @@ class ConsoleTests(BaseTestCase):
'A key with that name already exists',
category='message'
)
def setUp(self):
super().setUp()
self._login('test@example.com')
get_model().create_ssh_public_key('test@example.com', 'key', 'foo')
def tearDown(self):
super().tearDown()
get_model().cursor.execute("DELETE FROM ssh_public_keys")
get_model().cursor.execute("DELETE FROM login_tokens")
get_model().cursor.execute("DELETE FROM vms")
get_model().cursor.execute("DELETE FROM payments")
get_model().cursor.connection.commit()

View File

@ -1,26 +1,14 @@
from io import StringIO
import logging
import unittest
import os
import sys
import json
import itertools
import time
import threading
import asyncio
import traceback
from urllib.parse import urlparse
from typing import List
from nanoid import generate
from concurrent.futures import ThreadPoolExecutor
from flask_testing import TestCase
from flask import current_app
from capsulflask import create_app
from capsulflask.db import get_model
from capsulflask.http_client import *
from capsulflask.shared import *
class BaseTestCase(TestCase):
@ -31,9 +19,7 @@ class BaseTestCase(TestCase):
os.environ['LOG_LEVEL'] = 'DEBUG'
os.environ['SPOKE_MODEL'] = 'mock'
os.environ['HUB_MODEL'] = 'capsul-flask'
self1 = self
get_app = lambda: self1.app
self.app = create_app(lambda timeout_seconds: TestHTTPClient(get_app, timeout_seconds))
self.app = create_app()
return self.app
def setUp(self):
@ -47,73 +33,3 @@ class BaseTestCase(TestCase):
with self.client.session_transaction() as session:
session['account'] = user_email
session['csrf-token'] = generate()
class TestHTTPClient:
def __init__(self, get_app, timeout_seconds = 5):
self.timeout_seconds = timeout_seconds
self.get_app = get_app
self.executor = ThreadPoolExecutor()
def do_multi_http_sync(self, online_hosts: List[OnlineHost], url_suffix: str, body: str, authorization_header=None) -> List[HTTPResult]:
future = run_coroutine(self.do_multi_http(online_hosts=online_hosts, url_suffix=url_suffix, body=body, authorization_header=authorization_header))
fromOtherThread = future.result()
toReturn = []
for individualResult in fromOtherThread:
if individualResult.error != None and individualResult.error != "":
mylog_error(self.get_app(), individualResult.error)
toReturn.append(individualResult.http_result)
return toReturn
def do_http_sync(self, url: str, body: str, method="POST", authorization_header=None) -> HTTPResult:
future = run_coroutine(self.do_http(method=method, url=url, body=body, authorization_header=authorization_header))
fromOtherThread = future.result()
if fromOtherThread.error != None and fromOtherThread.error != "":
mylog_error(self.get_app(), fromOtherThread.error)
return fromOtherThread.http_result
async def do_http(self, url: str, body: str, method="POST", authorization_header=None) -> InterThreadResult:
path = urlparse(url).path
headers = {}
if authorization_header != None:
headers['Authorization'] = authorization_header
if body:
headers['Content-Type'] = "application/json"
#mylog_info(self.get_app(), f"path, data=body, headers=headers: {path}, {body}, {headers}")
do_request = None
if method == "POST":
do_request = lambda: self.get_app().test_client().post(path, data=body, headers=headers)
if method == "GET":
do_request = lambda: self.get_app().test_client().get(path, headers=headers)
response = None
try:
response = await get_event_loop().run_in_executor(self.executor, do_request)
except:
traceback.print_exc()
error_message = my_exec_info_message(sys.exc_info())
response_body = json.dumps({"error_message": f"do_http (HTTP {method} {url}) {error_message}"})
return InterThreadResult(
HTTPResult(-1, response_body),
f"""do_http (HTTP {method} {url}) failed with: {error_message}"""
)
return InterThreadResult(HTTPResult(response.status_code, response.get_data()), None)
async def do_multi_http(self, online_hosts: List[OnlineHost], url_suffix: str, body: str, authorization_header=None) -> List[InterThreadResult]:
tasks = []
# append to tasks in the same order as online_hosts
for host in online_hosts:
tasks.append(
self.do_http(url=f"{host.url}{url_suffix}", body=body, authorization_header=authorization_header)
)
# gather is like Promise.all from javascript, it returns a future which resolves to an array of results
# in the same order as the tasks that we passed in -- which were in the same order as online_hosts
results = await asyncio.gather(*tasks)
return results

View File

@ -15,8 +15,8 @@ services:
- "5000:5000"
environment:
- "POSTGRES_CONNECTION_PARAMETERS=host=db port=5432 user=capsul password=capsul dbname=capsul"
- SPOKE_MODEL
- FLASK_DEBUG
- SPOKE_MODEL=shell-scripts
#- FLASK_DEBUG=1
- BASE_URL=http://localhost:5000
- ADMIN_PANEL_ALLOW_EMAIL_ADDRESSES=3wc.capsul@doesthisthing.work
- VIRSH_DEFAULT_CONNECT_URI=qemu:///system

View File

@ -3,7 +3,6 @@
Create a `.env` file to set up the application configuration:
```
cp .env.sample .env
nano .env
```

View File

@ -6,7 +6,7 @@ To run tests:
1. create a Postgres database called `capsulflask_test`
- e.g.: `docker exec -it 98e1ddfbbffb createdb -U postgres -O postgres capsulflask_test`
- (`98e1ddfbbffb` is the docker container ID of the postgres container)
2. run `python3 -m unittest; cat unittest-log-output.log; rm unittest-log-output.log`
2. run `python3 -m unittest && cat unittest-log-output.log && rm unittest-log-output.log`
**NOTE** that right now we can't figure out how to get the tests to properly output the log messages that happened when they failed, (or passed), so for now we have hacked it to write to a file.