2020-05-16 04:19:01 +00:00
import logging
from logging . config import dictConfig as logging_dict_config
2020-05-14 23:03:00 +00:00
2021-01-04 19:32:52 +00:00
import atexit
2021-07-21 23:15:39 +00:00
import jinja2
2020-05-11 06:47:14 +00:00
import os
2020-05-22 20:20:26 +00:00
import hashlib
2021-01-04 19:32:52 +00:00
import requests
2021-06-27 15:46:48 +00:00
import sys
2020-05-10 00:13:20 +00:00
2020-05-14 23:03:00 +00:00
import stripe
2021-07-16 15:51:06 +00:00
from dotenv import find_dotenv , dotenv_values
2020-05-10 00:13:20 +00:00
from flask import Flask
2021-07-07 18:47:21 +00:00
from flask_mail import Mail , Message
2020-05-10 03:59:22 +00:00
from flask import render_template
2020-05-22 20:20:26 +00:00
from flask import url_for
from flask import current_app
2021-01-04 19:32:52 +00:00
from apscheduler . schedulers . background import BackgroundScheduler
2020-05-11 06:47:14 +00:00
2021-06-27 15:46:48 +00:00
from capsulflask . shared import my_exec_info_message
2021-01-02 23:10:01 +00:00
from capsulflask import hub_model , spoke_model , cli
2020-05-15 23:18:19 +00:00
from capsulflask . btcpay import client as btcpay
2021-01-03 21:19:29 +00:00
from capsulflask . http_client import MyHTTPClient
2020-05-10 00:13:20 +00:00
2021-07-16 15:51:06 +00:00
2021-01-04 19:32:52 +00:00
2021-07-16 15:51:06 +00:00
def create_app ( ) :
for var_name in [
" SPOKE_HOST_TOKEN " , " HUB_TOKEN " , " STRIPE_SECRET_KEY " ,
" BTCPAY_PRIVATE_KEY " , " MAIL_PASSWORD "
] :
var = os . environ . get ( f " { var_name } _FILE " )
if not var :
continue
if not os . path . isfile ( var ) :
continue
with open ( var ) as secret_file :
os . environ [ var_name ] = secret_file . read ( ) . rstrip ( ' \n ' )
del os . environ [ f " { var_name } _FILE " ]
config = {
* * dotenv_values ( find_dotenv ( ) ) ,
* * os . environ , # override loaded values with environment variables
}
2021-01-04 23:20:03 +00:00
2021-07-16 15:51:06 +00:00
app = Flask ( __name__ )
app . config . from_mapping (
TESTING = config . get ( " TESTING " , False ) ,
BASE_URL = config . get ( " BASE_URL " , " http://localhost:5000 " ) ,
SECRET_KEY = config . get ( " SECRET_KEY " , " dev " ) ,
HUB_MODE_ENABLED = config . get ( " HUB_MODE_ENABLED " , " True " ) . lower ( ) in [ ' true ' , ' 1 ' , ' t ' , ' y ' , ' yes ' ] ,
SPOKE_MODE_ENABLED = config . get ( " SPOKE_MODE_ENABLED " , " True " ) . lower ( ) in [ ' true ' , ' 1 ' , ' t ' , ' y ' , ' yes ' ] ,
INTERNAL_HTTP_TIMEOUT_SECONDS = config . get ( " INTERNAL_HTTP_TIMEOUT_SECONDS " , " 300 " ) ,
HUB_MODEL = config . get ( " HUB_MODEL " , " capsul-flask " ) ,
SPOKE_MODEL = config . get ( " SPOKE_MODEL " , " mock " ) ,
LOG_LEVEL = config . get ( " LOG_LEVEL " , " INFO " ) ,
SPOKE_HOST_ID = config . get ( " SPOKE_HOST_ID " , " baikal " ) ,
SPOKE_HOST_TOKEN = config . get ( " SPOKE_HOST_TOKEN " , " changeme " ) ,
HUB_TOKEN = config . get ( " HUB_TOKEN " , " changeme " ) ,
# https://www.postgresql.org/docs/9.1/libpq-ssl.html#LIBPQ-SSL-SSLMODE-STATEMENTS
# https://stackoverflow.com/questions/56332906/where-to-put-ssl-certificates-when-trying-to-connect-to-a-remote-database-using
# TLS example: sslmode=verify-full sslrootcert=letsencrypt-root-ca.crt host=db.example.com port=5432 user=postgres password=dev dbname=postgres
POSTGRES_CONNECTION_PARAMETERS = config . get (
" POSTGRES_CONNECTION_PARAMETERS " ,
" host=localhost port=5432 user=postgres password=dev dbname=postgres "
) ,
DATABASE_SCHEMA = config . get ( " DATABASE_SCHEMA " , " public " ) ,
MAIL_SERVER = config . get ( " MAIL_SERVER " , " " ) ,
MAIL_PORT = config . get ( " MAIL_PORT " , " 465 " ) ,
MAIL_USE_TLS = config . get ( " MAIL_USE_TLS " , " False " ) . lower ( ) in [ ' true ' , ' 1 ' , ' t ' , ' y ' , ' yes ' ] ,
MAIL_USE_SSL = config . get ( " MAIL_USE_SSL " , " True " ) . lower ( ) in [ ' true ' , ' 1 ' , ' t ' , ' y ' , ' yes ' ] ,
MAIL_USERNAME = config . get ( " MAIL_USERNAME " , " " ) ,
MAIL_PASSWORD = config . get ( " MAIL_PASSWORD " , " " ) ,
MAIL_DEFAULT_SENDER = config . get ( " MAIL_DEFAULT_SENDER " , " no-reply@capsul.org " ) ,
ADMIN_EMAIL_ADDRESSES = config . get ( " ADMIN_EMAIL_ADDRESSES " , " ops@cyberia.club " ) ,
ADMIN_PANEL_ALLOW_EMAIL_ADDRESSES = config . get ( " ADMIN_PANEL_ALLOW_EMAIL_ADDRESSES " , " forest.n.johnson@gmail.com,capsul@cyberia.club " ) ,
PROMETHEUS_URL = config . get ( " PROMETHEUS_URL " , " https://prometheus.cyberia.club " ) ,
STRIPE_API_VERSION = config . get ( " STRIPE_API_VERSION " , " 2020-03-02 " ) ,
STRIPE_SECRET_KEY = config . get ( " STRIPE_SECRET_KEY " , " " ) ,
STRIPE_PUBLISHABLE_KEY = config . get ( " STRIPE_PUBLISHABLE_KEY " , " " ) ,
#STRIPE_WEBHOOK_SECRET=config.get("STRIPE_WEBHOOK_SECRET", "")
BTCPAY_PRIVATE_KEY = config . get ( " BTCPAY_PRIVATE_KEY " , " " ) . replace ( " \\ n " , " \n " ) ,
BTCPAY_URL = config . get ( " BTCPAY_URL " , " https://btcpay.cyberia.club " )
)
app . config [ ' HUB_URL ' ] = config . get ( " HUB_URL " , app . config [ ' BASE_URL ' ] )
2021-07-27 18:56:58 +00:00
log_filters = {
' setLogLevelToDebugForHeartbeatRelatedMessages ' : {
' () ' : SetLogLevelToDebugForHeartbeatRelatedMessagesFilter ,
}
}
if app . config [ ' TESTING ' ] != False :
log_filters [ ' captureLogOutputDuringTests ' ] = {
' () ' : CaptureLogOutputDuringTestsFilter ,
}
2021-07-16 15:51:06 +00:00
logging_dict_config ( {
' version ' : 1 ,
' formatters ' : { ' default ' : {
' format ' : ' [ %(asctime)s ] %(levelname)s in %(module)s : %(message)s ' ,
} } ,
2021-07-27 18:56:58 +00:00
' filters ' : log_filters ,
2021-07-16 15:51:06 +00:00
' handlers ' : { ' wsgi ' : {
' class ' : ' logging.StreamHandler ' ,
' stream ' : ' ext://flask.logging.wsgi_errors_stream ' ,
' formatter ' : ' default ' ,
2021-07-27 18:56:58 +00:00
' filters ' : list ( log_filters . keys ( ) )
2021-07-16 15:51:06 +00:00
} } ,
' root ' : {
' level ' : app . config [ ' LOG_LEVEL ' ] ,
' handlers ' : [ ' wsgi ' ]
2021-01-04 23:20:03 +00:00
}
2021-07-16 15:51:06 +00:00
} )
2020-05-16 04:19:01 +00:00
2021-07-23 11:40:00 +00:00
# app.logger.critical("critical")
# app.logger.error("error")
# app.logger.warning("warning")
# app.logger.info("info")
# app.logger.debug("debug")
2020-05-16 04:19:01 +00:00
2021-07-16 15:51:06 +00:00
stripe . api_key = app . config [ ' STRIPE_SECRET_KEY ' ]
stripe . api_version = app . config [ ' STRIPE_API_VERSION ' ]
2020-05-12 17:38:36 +00:00
2021-07-16 15:51:06 +00:00
if app . config [ ' MAIL_SERVER ' ] != " " :
app . config [ ' FLASK_MAIL_INSTANCE ' ] = Mail ( app )
else :
app . logger . warning ( " No MAIL_SERVER configured. capsul will simply print emails to stdout. " )
app . config [ ' FLASK_MAIL_INSTANCE ' ] = StdoutMockFlaskMail ( )
2021-07-07 18:47:21 +00:00
2021-07-16 15:51:06 +00:00
app . config [ ' HTTP_CLIENT ' ] = MyHTTPClient ( timeout_seconds = int ( app . config [ ' INTERNAL_HTTP_TIMEOUT_SECONDS ' ] ) )
2021-06-27 15:41:46 +00:00
2021-07-23 11:40:00 +00:00
app . config [ ' BTCPAY_ENABLED ' ] = False
if app . config [ ' BTCPAY_URL ' ] != " " :
try :
app . config [ ' BTCPAY_CLIENT ' ] = btcpay . Client ( api_uri = app . config [ ' BTCPAY_URL ' ] , pem = app . config [ ' BTCPAY_PRIVATE_KEY ' ] )
app . config [ ' BTCPAY_ENABLED ' ] = True
except :
app . logger . warning ( " unable to create btcpay client. Capsul will work fine except cryptocurrency payments will not work. The error was: " + my_exec_info_message ( sys . exc_info ( ) ) )
# only start the scheduler and attempt to migrate the database if we are running the app.
# otherwise we are running a CLI command.
command_line = ' ' . join ( sys . argv )
is_running_server = (
( ' flask run ' in command_line ) or
( ' gunicorn ' in command_line ) or
( ' test ' in command_line )
)
app . logger . info ( f " is_running_server: { is_running_server } " )
if app . config [ ' HUB_MODE_ENABLED ' ] :
if app . config [ ' HUB_MODEL ' ] == " capsul-flask " :
app . config [ ' HUB_MODEL ' ] = hub_model . CapsulFlaskHub ( )
# debug mode (flask reloader) runs two copies of the app. When running in debug mode,
# we only want to start the scheduler one time.
if is_running_server and ( not app . debug or config . get ( ' WERKZEUG_RUN_MAIN ' ) == ' true ' ) :
scheduler = BackgroundScheduler ( )
heartbeat_task_url = f " { app . config [ ' HUB_URL ' ] } /hub/heartbeat-task "
heartbeat_task_headers = { ' Authorization ' : f " Bearer { app . config [ ' HUB_TOKEN ' ] } " }
heartbeat_task = lambda : requests . post ( heartbeat_task_url , headers = heartbeat_task_headers )
scheduler . add_job ( name = " heartbeat-task " , func = heartbeat_task , trigger = " interval " , seconds = 5 )
scheduler . start ( )
atexit . register ( lambda : scheduler . shutdown ( ) )
else :
app . config [ ' HUB_MODEL ' ] = hub_model . MockHub ( )
from capsulflask import db
db . init_app ( app , is_running_server )
from capsulflask import auth , landing , console , payment , metrics , cli , hub_api , admin
app . register_blueprint ( landing . bp )
app . register_blueprint ( auth . bp )
app . register_blueprint ( console . bp )
app . register_blueprint ( payment . bp )
app . register_blueprint ( metrics . bp )
app . register_blueprint ( cli . bp )
app . register_blueprint ( hub_api . bp )
app . register_blueprint ( admin . bp )
app . add_url_rule ( " / " , endpoint = " index " )
if app . config [ ' SPOKE_MODE_ENABLED ' ] :
if app . config [ ' SPOKE_MODEL ' ] == " shell-scripts " :
app . config [ ' SPOKE_MODEL ' ] = spoke_model . ShellScriptSpoke ( )
else :
app . config [ ' SPOKE_MODEL ' ] = spoke_model . MockSpoke ( )
from capsulflask import spoke_api
app . register_blueprint ( spoke_api . bp )
@app.after_request
def security_headers ( response ) :
response . headers [ ' X-Frame-Options ' ] = ' SAMEORIGIN '
if ' Content-Security-Policy ' not in response . headers :
response . headers [ ' Content-Security-Policy ' ] = " default-src ' self ' "
response . headers [ ' X-Content-Type-Options ' ] = ' nosniff '
return response
@app.context_processor
def override_url_for ( ) :
"""
override the url_for function built into flask
with our own custom implementation that busts the cache correctly when files change
"""
return dict ( url_for = url_for_with_cache_bust )
return app
2021-07-19 17:07:02 +00:00
def url_for_with_cache_bust ( endpoint , * * values ) :
"""
Add a query parameter based on the hash of the file , this acts as a cache bust
"""
if endpoint == ' static ' :
filename = values . get ( ' filename ' , None )
if filename :
if ' STATIC_FILE_HASH_CACHE ' not in current_app . config :
current_app . config [ ' STATIC_FILE_HASH_CACHE ' ] = dict ( )
if filename not in current_app . config [ ' STATIC_FILE_HASH_CACHE ' ] :
filepath = os . path . join ( current_app . root_path , endpoint , filename )
#print(filepath)
if os . path . isfile ( filepath ) and os . access ( filepath , os . R_OK ) :
with open ( filepath , ' rb ' ) as file :
hasher = hashlib . md5 ( )
hasher . update ( file . read ( ) )
current_app . config [ ' STATIC_FILE_HASH_CACHE ' ] [ filename ] = hasher . hexdigest ( ) [ - 6 : ]
2020-05-22 20:20:26 +00:00
2021-07-19 17:07:02 +00:00
values [ ' q ' ] = current_app . config [ ' STATIC_FILE_HASH_CACHE ' ] [ filename ]
2021-07-16 15:51:06 +00:00
2021-07-19 17:07:02 +00:00
return url_for ( endpoint , * * values )
2020-05-22 20:20:26 +00:00
2021-07-19 17:07:02 +00:00
class StdoutMockFlaskMail :
def send ( self , message : Message ) :
current_app . logger . info ( f " Email would have been sent if configured: \n \n to: { ' , ' . join ( message . recipients ) } \n subject: { message . subject } \n body: \n \n { message . body } \n \n " )
class SetLogLevelToDebugForHeartbeatRelatedMessagesFilter ( logging . Filter ) :
def isHeartbeatRelatedString ( self , thing ) :
# thing_string = "<error>"
is_in_string = False
try :
thing_string = " %s " % thing
is_in_string = ' heartbeat-task ' in thing_string or ' hub/heartbeat ' in thing_string or ' spoke/heartbeat ' in thing_string
except :
pass
# self.warning("isHeartbeatRelatedString(%s): %s", thing_string, is_in_string )
return is_in_string
def filter ( self , record ) :
if not current_app or current_app . config [ ' LOG_LEVEL ' ] == " DEBUG " :
return True
if self . isHeartbeatRelatedString ( record . msg ) :
return False
for arg in record . args :
if self . isHeartbeatRelatedString ( arg ) :
return False
2021-07-23 11:40:00 +00:00
return True
2021-07-27 18:56:58 +00:00
class CaptureLogOutputDuringTestsFilter ( logging . Filter ) :
def filter ( self , record ) :
file_object = open ( ' unittest-output.log ' , ' a ' )
file_object . write ( " %s " % record . msg )
for arg in record . args :
file_object . write ( " %s " % arg )
file_object . write ( " \n " )
file_object . close ( )
return True