trying to fix logging

This commit is contained in:
forest 2020-05-15 23:19:01 -05:00
parent 3bd9d24a97
commit 26340f118f
9 changed files with 66 additions and 39 deletions

3
app.py
View File

@ -1 +1,2 @@
from capsulflask import app
from capsulflask import app

View File

@ -1,3 +1,5 @@
import logging
from logging.config import dictConfig as logging_dict_config
import os
@ -13,9 +15,12 @@ from capsulflask.btcpay import client as btcpay
load_dotenv(find_dotenv())
app = Flask(__name__)
app.config.from_mapping(
BASE_URL=os.environ.get("BASE_URL", default="http://localhost:5000"),
SECRET_KEY=os.environ.get("SECRET_KEY", default="dev"),
LOG_LEVEL=os.environ.get("LOG_LEVEL", default="INFO"),
DATABASE_URL=os.environ.get("DATABASE_URL", default="sql://postgres:dev@localhost:5432/postgres"),
DATABASE_SCHEMA=os.environ.get("DATABASE_SCHEMA", default="public"),
@ -37,6 +42,28 @@ app.config.from_mapping(
BTCPAY_URL=os.environ.get("BTCPAY_URL", default="https://btcpay.cyberia.club")
)
logging_dict_config({
'version': 1,
'formatters': {'default': {
'format': '[%(asctime)s] %(levelname)s in %(module)s: %(message)s',
}},
'handlers': {'wsgi': {
'class': 'logging.StreamHandler',
'stream': 'ext://flask.logging.wsgi_errors_stream',
'formatter': 'default'
}},
'root': {
'level': app.config['LOG_LEVEL'],
'handlers': ['wsgi']
}
})
# app.logger.critical("critical")
# app.logger.error("error")
# app.logger.warning("warning")
# app.logger.info("info")
# app.logger.debug("debug")
stripe.api_key = app.config['STRIPE_SECRET_KEY']
stripe.api_version = app.config['STRIPE_API_VERSION']
@ -59,5 +86,3 @@ app.register_blueprint(cli.bp)
app.add_url_rule("/", endpoint="index")
if __name__ == "__main__":
app.run(host='127.0.0.1')

View File

@ -66,19 +66,19 @@ def sql_script(f, c):
def cron_task():
# make sure btcpay payments get completed (in case we miss a webhook), otherwise invalidate the payment
print("cron_task: starting clean_up_unresolved_btcpay_invoices")
current_app.logger.info("cron_task: starting clean_up_unresolved_btcpay_invoices")
clean_up_unresolved_btcpay_invoices()
print("cron_task: finished clean_up_unresolved_btcpay_invoices")
current_app.logger.info("cron_task: finished clean_up_unresolved_btcpay_invoices")
# notify when funds run out
print("cron_task: starting notify_users_about_account_balance")
current_app.logger.info("cron_task: starting notify_users_about_account_balance")
notify_users_about_account_balance()
print("cron_task: finished notify_users_about_account_balance")
current_app.logger.info("cron_task: finished notify_users_about_account_balance")
# make sure vm system and DB are synced
print("cron_task: starting ensure_vms_and_db_are_synced")
current_app.logger.info("cron_task: starting ensure_vms_and_db_are_synced")
ensure_vms_and_db_are_synced()
print("cron_task: finished ensure_vms_and_db_are_synced")
current_app.logger.info("cron_task: finished ensure_vms_and_db_are_synced")
@ -90,13 +90,13 @@ def clean_up_unresolved_btcpay_invoices():
days = float((datetime.now() - unresolved_invoice['created']).total_seconds())/float(60*60*24)
if btcpay_invoice['status'] == "complete":
print(
current_app.logger.info(
f"resolving btcpay invoice {invoice_id} "
f"({unresolved_invoice['email']}, ${unresolved_invoice['dollars']}) as completed "
)
get_model().btcpay_invoice_resolved(invoice_id, True)
elif days >= 1:
print(
current_app.logger.info(
f"resolving btcpay invoice {invoice_id} "
f"({unresolved_invoice['email']}, ${unresolved_invoice['dollars']}) as invalidated, "
f"btcpay server invoice status: {btcpay_invoice['status']}"
@ -224,7 +224,7 @@ def notify_users_about_account_balance():
index_to_send = i
if index_to_send > -1:
print(f"cron_task: sending {warnings[index_to_send]['id']} warning email to {account['email']}.")
current_app.logger.info(f"cron_task: sending {warnings[index_to_send]['id']} warning email to {account['email']}.")
get_body = warnings[index_to_send]['get_body']
get_subject = warnings[index_to_send]['get_subject']
current_app.config["FLASK_MAIL_INSTANCE"].send(
@ -237,7 +237,7 @@ def notify_users_about_account_balance():
get_model().set_account_balance_warning(account['email'], warnings[index_to_send]['id'])
if index_to_send == len(warnings)-1:
for vm in vms:
print(f"cron_task: deleting {vm['id']} ( {account['email']} ) due to negative account balance.")
current_app.logger.warning(f"cron_task: deleting {vm['id']} ( {account['email']} ) due to negative account balance.")
current_app.config["VIRTUALIZATION_MODEL"].destroy(email=account["email"], id=vm['id'])
get_model().delete_vm(email=account["email"], id=vm['id'])

View File

@ -30,7 +30,7 @@ def double_check_capsul_address(id, ipv4):
ipv4 = result.ipv4
get_model().update_vm_ip(email=session["account"], id=id, ipv4=result.ipv4)
except:
print(f"""
current_app.logger.error(f"""
the virtualization model threw an error in double_check_capsul_address of {id}:
{my_exec_info_message(sys.exc_info())}"""
)
@ -82,7 +82,7 @@ def detail(id):
return render_template("capsul-detail.html", vm=vm, delete=True, deleted=False)
else:
print(f"deleting {vm['id']} per user request ({session['account']})")
current_app.logger.info(f"deleting {vm['id']} per user request ({session['account']})")
current_app.config["VIRTUALIZATION_MODEL"].destroy(email=session['account'], id=id)
get_model().delete_vm(email=session['account'], id=id)
@ -182,7 +182,7 @@ def create():
flash(error)
if not capacity_avaliable:
print(f"when capsul capacity is restored, send an email to {session['account']}")
current_app.logger.warning(f"when capsul capacity is restored, send an email to {session['account']}")
return render_template(
"create-capsul.html",
@ -299,8 +299,7 @@ def account_balance():
for vm in get_vms():
end_datetime = vm["deleted"] if vm["deleted"] else datetime.utcnow()
# print(end_datetime)
# print(vm["created"])
vm_months = (end_datetime - vm["created"]).days / average_number_of_days_in_a_month
vms_billed.append(dict(
id=vm["id"],

View File

@ -25,11 +25,11 @@ def init_app(app):
schemaMigrations = {}
schemaMigrationsPath = join(app.root_path, 'schema_migrations')
print("loading schema migration scripts from {}".format(schemaMigrationsPath))
app.logger.info("loading schema migration scripts from {}".format(schemaMigrationsPath))
for filename in listdir(schemaMigrationsPath):
result = re.search(r"^\d+_(up|down)", filename)
if not result:
print(f"schemaVersion {filename} must match ^\d+_(up|down). exiting.")
app.logger.error(f"schemaVersion {filename} must match ^\\d+_(up|down). exiting.")
exit(1)
key = result.group()
with open(join(schemaMigrationsPath, filename), 'rb') as file:
@ -54,12 +54,12 @@ def init_app(app):
hasSchemaVersionTable = True
if hasSchemaVersionTable == False:
print("no table named schemaversion found in the {} schema. running migration 01_up".format(app.config['DATABASE_SCHEMA']))
app.logger.info("no table named schemaversion found in the {} schema. running migration 01_up".format(app.config['DATABASE_SCHEMA']))
try:
cursor.execute(schemaMigrations["01_up"])
connection.commit()
except:
print("unable to create the schemaversion table because: {}".format(my_exec_info_message(sys.exc_info())))
app.logger.error("unable to create the schemaversion table because: {}".format(my_exec_info_message(sys.exc_info())))
exit(1)
actionWasTaken = True
@ -67,24 +67,24 @@ def init_app(app):
schemaVersion = cursor.fetchall()[0][0]
if schemaVersion > desiredSchemaVersion:
print("schemaVersion ({}) > desiredSchemaVersion ({}). schema downgrades are not supported yet. exiting.".format(
app.logger.critical("schemaVersion ({}) > desiredSchemaVersion ({}). schema downgrades are not supported yet. exiting.".format(
schemaVersion, desiredSchemaVersion
))
exit(1)
while schemaVersion < desiredSchemaVersion:
migrationKey = "%02d_up" % (schemaVersion+1)
print("schemaVersion ({}) < desiredSchemaVersion ({}). running migration {}".format(
app.logger.info("schemaVersion ({}) < desiredSchemaVersion ({}). running migration {}".format(
schemaVersion, desiredSchemaVersion, migrationKey
))
try:
cursor.execute(schemaMigrations[migrationKey])
connection.commit()
except KeyError:
print("missing schema migration script: {}_xyz.sql".format(migrationKey))
app.logger.critical("missing schema migration script: {}_xyz.sql".format(migrationKey))
exit(1)
except:
print("unable to execute the schema migration {} because: {}".format(migrationKey, my_exec_info_message(sys.exc_info())))
app.logger.critical("unable to execute the schema migration {} because: {}".format(migrationKey, my_exec_info_message(sys.exc_info())))
exit(1)
actionWasTaken = True
@ -93,7 +93,7 @@ def init_app(app):
versionFromDatabase = cursor.fetchall()[0][0]
if schemaVersion != versionFromDatabase:
print("incorrect schema version value \"{}\" after running migration {}, expected \"{}\". exiting.".format(
app.logger.critical("incorrect schema version value \"{}\" after running migration {}, expected \"{}\". exiting.".format(
versionFromDatabase,
migrationKey,
schemaVersion
@ -104,7 +104,7 @@ def init_app(app):
app.config['PSYCOPG2_CONNECTION_POOL'].putconn(connection)
print("{} current schemaVersion: \"{}\"".format(
app.logger.info("{} current schemaVersion: \"{}\"".format(
("schema migration completed." if actionWasTaken else "schema is already up to date. "), schemaVersion
))

View File

@ -1,5 +1,6 @@
from nanoid import generate
from flask import current_app
class DBModel:
@ -168,7 +169,7 @@ class DBModel:
row = self.cursor.fetchone()
if row:
if int(row[1]) != int(dollars):
print(f"""
current_app.logger.warning(f"""
{payment_type} gave us a completed payment session with a different dollar amount than what we had recorded!!
id: {id}
account: {row[0]}

View File

@ -1,5 +1,6 @@
from flask import Blueprint
from flask import render_template
from flask import current_app
bp = Blueprint("landing", __name__, url_prefix="/")

View File

@ -62,7 +62,7 @@ def btcpay_payment():
# print(invoice)
invoice_id = invoice["id"]
print(f"created btcpay invoice_id={invoice_id} ( {session['account']}, ${dollars} )")
current_app.logger.info(f"created btcpay invoice_id={invoice_id} ( {session['account']}, ${dollars} )")
get_model().create_payment_session("btcpay", invoice_id, session["account"], dollars)
@ -94,7 +94,7 @@ def btcpay_webhook():
success_account = get_model().consume_payment_session("btcpay", invoice_id, dollars)
if success_account:
print(f"{success_account} paid ${dollars} successfully (btcpay_invoice_id={invoice_id})")
current_app.logger.info(f"{success_account} paid ${dollars} successfully (btcpay_invoice_id={invoice_id})")
if invoice['status'] == "complete":
get_model().btcpay_invoice_resolved(invoice_id, True)
@ -119,7 +119,7 @@ def stripe_payment():
if len(errors) == 0:
print(f"creating stripe checkout session for {session['account']}, ${dollars}")
current_app.logger.info(f"creating stripe checkout session for {session['account']}, ${dollars}")
checkout_session = stripe.checkout.Session.create(
success_url=current_app.config['BASE_URL'] + "/payment/stripe/success?session_id={CHECKOUT_SESSION_ID}",
@ -138,7 +138,7 @@ def stripe_payment():
)
stripe_checkout_session_id = checkout_session['id']
print(f"stripe_checkout_session_id={stripe_checkout_session_id} ( {session['account']}, ${dollars} )")
current_app.logger.info(f"stripe_checkout_session_id={stripe_checkout_session_id} ( {session['account']}, ${dollars} )")
get_model().create_payment_session("stripe", stripe_checkout_session_id, session["account"], dollars)
@ -191,13 +191,13 @@ def validate_stripe_checkout_session(stripe_checkout_session_id):
def success():
stripe_checkout_session_id = request.args.get('session_id')
if not stripe_checkout_session_id:
print("/payment/stripe/success returned 400: missing required URL parameter session_id")
current_app.logger.info("/payment/stripe/success returned 400: missing required URL parameter session_id")
abort(400, "missing required URL parameter session_id")
else:
for _ in range(0, 5):
paid = validate_stripe_checkout_session(stripe_checkout_session_id)
if paid:
print(f"{paid['email']} paid ${paid['dollars']} successfully (stripe_checkout_session_id={stripe_checkout_session_id})")
current_app.logger.info(f"{paid['email']} paid ${paid['dollars']} successfully (stripe_checkout_session_id={stripe_checkout_session_id})")
return redirect(url_for("console.account_balance"))
else:
sleep(1)

View File

@ -47,11 +47,11 @@ class MockVirtualization(VirtualizationInterface):
def create(self, email: str, id: str, template_image_file_name: str, vcpus: int, memory_mb: int, ssh_public_keys: list):
validate_capsul_id(id)
print(f"mock create: {id} for {email}")
current_app.logger.info(f"mock create: {id} for {email}")
sleep(1)
def destroy(self, email: str, id: str):
print(f"mock destroy: {id} for {email}")
current_app.logger.info(f"mock destroy: {id} for {email}")
class ShellScriptVirtualization(VirtualizationInterface):
@ -74,7 +74,7 @@ class ShellScriptVirtualization(VirtualizationInterface):
completedProcess = run(my_args, capture_output=True)
if completedProcess.returncode != 0:
print(f"""
current_app.logger.error(f"""
capacity-avaliable.sh exited {completedProcess.returncode} with
stdout:
{completedProcess.stdout}
@ -85,7 +85,7 @@ class ShellScriptVirtualization(VirtualizationInterface):
lines = completedProcess.stdout.splitlines()
if not lines[len(lines)-1] == "yes":
print("capacity-avaliable.sh exited 0 but did not return \"yes\" ")
current_app.logger.error("capacity-avaliable.sh exited 0 but did not return \"yes\" ")
return False
return True