2020-05-16 04:19:01 +00:00
import logging
from logging . config import dictConfig as logging_dict_config
2020-05-14 23:03:00 +00:00
2021-01-04 19:32:52 +00:00
import atexit
2021-07-21 23:15:39 +00:00
import jinja2
2020-05-11 06:47:14 +00:00
import os
2020-05-22 20:20:26 +00:00
import hashlib
2021-01-04 19:32:52 +00:00
import requests
2021-06-27 15:46:48 +00:00
import sys
2020-05-10 00:13:20 +00:00
2020-05-14 23:03:00 +00:00
import stripe
2020-05-12 06:42:50 +00:00
from dotenv import load_dotenv , find_dotenv
2020-05-10 00:13:20 +00:00
from flask import Flask
2021-07-07 18:47:21 +00:00
from flask_mail import Mail , Message
2020-05-10 03:59:22 +00:00
from flask import render_template
2020-05-22 20:20:26 +00:00
from flask import url_for
from flask import current_app
2021-01-04 19:32:52 +00:00
from apscheduler . schedulers . background import BackgroundScheduler
2020-05-11 06:47:14 +00:00
2021-06-27 15:46:48 +00:00
from capsulflask . shared import my_exec_info_message
2021-01-02 23:10:01 +00:00
from capsulflask import hub_model , spoke_model , cli
2020-05-15 23:18:19 +00:00
from capsulflask . btcpay import client as btcpay
2021-01-03 21:19:29 +00:00
from capsulflask . http_client import MyHTTPClient
2020-05-10 00:13:20 +00:00
2021-07-07 18:47:21 +00:00
class StdoutMockFlaskMail :
def send ( self , message : Message ) :
current_app . logger . info ( f " Email would have been sent if configured: \n \n to: { ' , ' . join ( message . recipients ) } \n subject: { message . subject } \n body: \n \n { message . body } \n \n " )
2021-01-04 19:32:52 +00:00
Docker image, & local development docker-compose.yml (#2)
Adds:
- a Docker image, which can be used in both development and production
- a `docker-compose.yml` file for local development (could probably be adapted for production deployments)
Testing:
- `git checkout docker`
- `docker-compose up`
- pray 🙏
- go to http://localhost:5000
## App architecture
I added the ability to load secret config variables (`HUB_TOKEN`, `STRIPE_SECRET_KEY` etc) from files, to support [Docker Secrets](https://docs.docker.com/engine/swarm/secrets/) natively. The code should probably log an error if a specified `HUB_TOKEN_FILE` doesn't exist instead of failing silently..
## Docker architecture
This uses a multi-stage build to reduce the size of the final image -- having pipenv install to a predefined virtualenv, and then copying that over.
The compose file doesn't include a definition for a cron runner service, and I haven't tested running one yet. Here be dragons!
You can rebuild the image locally using `docker-compose build`, but this isn't required for changes to the app code, only if you edit the `Dockerfile`, or want to publish your image for use on a swarm server (in which case you will need to edit the image name to put in your own Docker hub credentials).
Currently, the image is rebuilt (should set up auto-tagging..) and published with every push to this 3wordchant/capsul-flask fork.
Reviewed-on: https://git.autonomic.zone/3wordchant/capsul-flask/pulls/2
Co-authored-by: 3wordchant <3wordchant@noreply.git.autonomic.zone>
Co-committed-by: 3wordchant <3wordchant@noreply.git.autonomic.zone>
2021-07-21 23:18:10 +00:00
2020-05-12 06:42:50 +00:00
load_dotenv ( find_dotenv ( ) )
Docker image, & local development docker-compose.yml (#2)
Adds:
- a Docker image, which can be used in both development and production
- a `docker-compose.yml` file for local development (could probably be adapted for production deployments)
Testing:
- `git checkout docker`
- `docker-compose up`
- pray 🙏
- go to http://localhost:5000
## App architecture
I added the ability to load secret config variables (`HUB_TOKEN`, `STRIPE_SECRET_KEY` etc) from files, to support [Docker Secrets](https://docs.docker.com/engine/swarm/secrets/) natively. The code should probably log an error if a specified `HUB_TOKEN_FILE` doesn't exist instead of failing silently..
## Docker architecture
This uses a multi-stage build to reduce the size of the final image -- having pipenv install to a predefined virtualenv, and then copying that over.
The compose file doesn't include a definition for a cron runner service, and I haven't tested running one yet. Here be dragons!
You can rebuild the image locally using `docker-compose build`, but this isn't required for changes to the app code, only if you edit the `Dockerfile`, or want to publish your image for use on a swarm server (in which case you will need to edit the image name to put in your own Docker hub credentials).
Currently, the image is rebuilt (should set up auto-tagging..) and published with every push to this 3wordchant/capsul-flask fork.
Reviewed-on: https://git.autonomic.zone/3wordchant/capsul-flask/pulls/2
Co-authored-by: 3wordchant <3wordchant@noreply.git.autonomic.zone>
Co-committed-by: 3wordchant <3wordchant@noreply.git.autonomic.zone>
2021-07-21 23:18:10 +00:00
for var_name in [
" SPOKE_HOST_TOKEN " , " HUB_TOKEN " , " STRIPE_SECRET_KEY " ,
" BTCPAY_PRIVATE_KEY " , " MAIL_PASSWORD "
] :
var = os . environ . get ( f " { var_name } _FILE " )
if not var :
continue
if not os . path . isfile ( var ) :
continue
with open ( var ) as secret_file :
os . environ [ var_name ] = secret_file . read ( ) . rstrip ( ' \n ' )
del os . environ [ f " { var_name } _FILE " ]
2020-05-11 20:13:20 +00:00
app = Flask ( __name__ )
2020-05-16 04:19:01 +00:00
2020-05-11 20:13:20 +00:00
app . config . from_mapping (
BASE_URL = os . environ . get ( " BASE_URL " , default = " http://localhost:5000 " ) ,
SECRET_KEY = os . environ . get ( " SECRET_KEY " , default = " dev " ) ,
2021-07-07 18:47:21 +00:00
HUB_MODE_ENABLED = os . environ . get ( " HUB_MODE_ENABLED " , default = " True " ) . lower ( ) in [ ' true ' , ' 1 ' , ' t ' , ' y ' , ' yes ' ] ,
2021-01-02 23:10:01 +00:00
SPOKE_MODE_ENABLED = os . environ . get ( " SPOKE_MODE_ENABLED " , default = " True " ) . lower ( ) in [ ' true ' , ' 1 ' , ' t ' , ' y ' , ' yes ' ] ,
2021-02-16 03:00:34 +00:00
INTERNAL_HTTP_TIMEOUT_SECONDS = os . environ . get ( " INTERNAL_HTTP_TIMEOUT_SECONDS " , default = " 300 " ) ,
2021-07-07 18:47:21 +00:00
HUB_MODEL = os . environ . get ( " HUB_MODEL " , default = " capsul-flask " ) ,
2021-01-02 23:10:01 +00:00
SPOKE_MODEL = os . environ . get ( " SPOKE_MODEL " , default = " mock " ) ,
2020-05-16 04:19:01 +00:00
LOG_LEVEL = os . environ . get ( " LOG_LEVEL " , default = " INFO " ) ,
2021-07-07 20:17:00 +00:00
SPOKE_HOST_ID = os . environ . get ( " SPOKE_HOST_ID " , default = " baikal " ) ,
SPOKE_HOST_TOKEN = os . environ . get ( " SPOKE_HOST_TOKEN " , default = " changeme " ) ,
HUB_TOKEN = os . environ . get ( " HUB_TOKEN " , default = " changeme " ) ,
2020-05-16 04:19:01 +00:00
2021-01-20 22:43:10 +00:00
# https://www.postgresql.org/docs/9.1/libpq-ssl.html#LIBPQ-SSL-SSLMODE-STATEMENTS
2021-02-16 22:08:54 +00:00
# https://stackoverflow.com/questions/56332906/where-to-put-ssl-certificates-when-trying-to-connect-to-a-remote-database-using
# TLS example: sslmode=verify-full sslrootcert=letsencrypt-root-ca.crt host=db.example.com port=5432 user=postgres password=dev dbname=postgres
POSTGRES_CONNECTION_PARAMETERS = os . environ . get (
" POSTGRES_CONNECTION_PARAMETERS " ,
default = " host=localhost port=5432 user=postgres password=dev dbname=postgres "
) ,
2021-01-20 22:43:10 +00:00
2020-05-11 20:13:20 +00:00
DATABASE_SCHEMA = os . environ . get ( " DATABASE_SCHEMA " , default = " public " ) ,
2020-05-10 03:59:22 +00:00
2021-07-07 18:47:21 +00:00
MAIL_SERVER = os . environ . get ( " MAIL_SERVER " , default = " " ) ,
2021-06-27 15:40:34 +00:00
MAIL_PORT = os . environ . get ( " MAIL_PORT " , default = " 465 " ) ,
2021-07-11 15:28:47 +00:00
MAIL_USE_TLS = os . environ . get ( " MAIL_USE_TLS " , default = " False " ) . lower ( ) in [ ' true ' , ' 1 ' , ' t ' , ' y ' , ' yes ' ] ,
2021-07-09 19:44:48 +00:00
MAIL_USE_SSL = os . environ . get ( " MAIL_USE_SSL " , default = " True " ) . lower ( ) in [ ' true ' , ' 1 ' , ' t ' , ' y ' , ' yes ' ] ,
2021-07-07 18:47:21 +00:00
MAIL_USERNAME = os . environ . get ( " MAIL_USERNAME " , default = " " ) ,
2020-05-11 20:13:20 +00:00
MAIL_PASSWORD = os . environ . get ( " MAIL_PASSWORD " , default = " " ) ,
2021-07-07 18:47:21 +00:00
MAIL_DEFAULT_SENDER = os . environ . get ( " MAIL_DEFAULT_SENDER " , default = " no-reply@capsul.org " ) ,
2021-01-02 23:10:01 +00:00
ADMIN_EMAIL_ADDRESSES = os . environ . get ( " ADMIN_EMAIL_ADDRESSES " , default = " ops@cyberia.club " ) ,
2021-07-09 19:13:28 +00:00
ADMIN_PANEL_ALLOW_EMAIL_ADDRESSES = os . environ . get ( " ADMIN_PANEL_ALLOW_EMAIL_ADDRESSES " , default = " forest.n.johnson@gmail.com,capsul@cyberia.club " ) ,
2020-05-12 06:42:50 +00:00
2020-05-13 05:28:53 +00:00
PROMETHEUS_URL = os . environ . get ( " PROMETHEUS_URL " , default = " https://prometheus.cyberia.club " ) ,
2020-05-12 17:38:36 +00:00
STRIPE_API_VERSION = os . environ . get ( " STRIPE_API_VERSION " , default = " 2020-03-02 " ) ,
2020-05-12 06:42:50 +00:00
STRIPE_SECRET_KEY = os . environ . get ( " STRIPE_SECRET_KEY " , default = " " ) ,
2020-05-14 17:40:25 +00:00
STRIPE_PUBLISHABLE_KEY = os . environ . get ( " STRIPE_PUBLISHABLE_KEY " , default = " " ) ,
2020-05-12 17:38:36 +00:00
#STRIPE_WEBHOOK_SECRET=os.environ.get("STRIPE_WEBHOOK_SECRET", default="")
2020-05-14 17:40:25 +00:00
2020-05-15 23:49:17 +00:00
BTCPAY_PRIVATE_KEY = os . environ . get ( " BTCPAY_PRIVATE_KEY " , default = " " ) . replace ( " \\ n " , " \n " ) ,
2021-07-21 23:15:39 +00:00
BTCPAY_URL = os . environ . get ( " BTCPAY_URL " , default = " " ) ,
THEME = os . environ . get ( " THEME " , default = " " )
2020-05-11 20:13:20 +00:00
)
2020-05-10 00:13:20 +00:00
2021-01-04 19:32:52 +00:00
app . config [ ' HUB_URL ' ] = os . environ . get ( " HUB_URL " , default = app . config [ ' BASE_URL ' ] )
2021-01-04 23:20:03 +00:00
class SetLogLevelToDebugForHeartbeatRelatedMessagesFilter ( logging . Filter ) :
def isHeartbeatRelatedString ( self , thing ) :
# thing_string = "<error>"
is_in_string = False
try :
thing_string = " %s " % thing
is_in_string = ' heartbeat-task ' in thing_string or ' hub/heartbeat ' in thing_string or ' spoke/heartbeat ' in thing_string
except :
pass
# self.warning("isHeartbeatRelatedString(%s): %s", thing_string, is_in_string )
return is_in_string
def filter ( self , record ) :
if app . config [ ' LOG_LEVEL ' ] == " DEBUG " :
return True
if self . isHeartbeatRelatedString ( record . msg ) :
return False
for arg in record . args :
if self . isHeartbeatRelatedString ( arg ) :
return False
return True
2020-05-16 04:19:01 +00:00
logging_dict_config ( {
2020-05-22 20:20:26 +00:00
' version ' : 1 ,
' formatters ' : { ' default ' : {
' format ' : ' [ %(asctime)s ] %(levelname)s in %(module)s : %(message)s ' ,
} } ,
2021-01-04 23:20:03 +00:00
' filters ' : {
' setLogLevelToDebugForHeartbeatRelatedMessages ' : {
' () ' : SetLogLevelToDebugForHeartbeatRelatedMessagesFilter ,
}
} ,
2020-05-22 20:20:26 +00:00
' handlers ' : { ' wsgi ' : {
' class ' : ' logging.StreamHandler ' ,
' stream ' : ' ext://flask.logging.wsgi_errors_stream ' ,
2021-01-04 23:20:03 +00:00
' formatter ' : ' default ' ,
' filters ' : [ ' setLogLevelToDebugForHeartbeatRelatedMessages ' ]
2020-05-22 20:20:26 +00:00
} } ,
' root ' : {
' level ' : app . config [ ' LOG_LEVEL ' ] ,
' handlers ' : [ ' wsgi ' ]
}
2020-05-16 04:19:01 +00:00
} )
# app.logger.critical("critical")
# app.logger.error("error")
# app.logger.warning("warning")
# app.logger.info("info")
# app.logger.debug("debug")
2020-05-12 17:38:36 +00:00
stripe . api_key = app . config [ ' STRIPE_SECRET_KEY ' ]
stripe . api_version = app . config [ ' STRIPE_API_VERSION ' ]
2021-07-07 18:47:21 +00:00
if app . config [ ' MAIL_SERVER ' ] != " " :
app . config [ ' FLASK_MAIL_INSTANCE ' ] = Mail ( app )
else :
app . logger . warning ( " No MAIL_SERVER configured. capsul will simply print emails to stdout. " )
app . config [ ' FLASK_MAIL_INSTANCE ' ] = StdoutMockFlaskMail ( )
2021-02-16 03:00:34 +00:00
app . config [ ' HTTP_CLIENT ' ] = MyHTTPClient ( timeout_seconds = int ( app . config [ ' INTERNAL_HTTP_TIMEOUT_SECONDS ' ] ) )
2021-06-27 15:41:46 +00:00
2021-07-21 17:12:41 +00:00
app . config [ ' BTCPAY_ENABLED ' ] = False
2021-07-21 19:51:50 +00:00
if app . config [ ' BTCPAY_URL ' ] != " " :
2021-07-21 17:12:41 +00:00
try :
app . config [ ' BTCPAY_CLIENT ' ] = btcpay . Client ( api_uri = app . config [ ' BTCPAY_URL ' ] , pem = app . config [ ' BTCPAY_PRIVATE_KEY ' ] )
app . config [ ' BTCPAY_ENABLED ' ] = True
except :
app . logger . warning ( " unable to create btcpay client. Capsul will work fine except cryptocurrency payments will not work. The error was: " + my_exec_info_message ( sys . exc_info ( ) ) )
2021-06-27 15:41:46 +00:00
2021-07-09 19:13:28 +00:00
# only start the scheduler and attempt to migrate the database if we are running the app.
# otherwise we are running a CLI command.
command_line = ' ' . join ( sys . argv )
is_running_server = ( ' flask run ' in command_line ) or ( ' gunicorn ' in command_line )
app . logger . info ( f " is_running_server: { is_running_server } " )
2020-05-10 03:59:22 +00:00
2021-07-21 23:15:39 +00:00
if app . config [ ' THEME ' ] != " " :
my_loader = jinja2 . ChoiceLoader ( [
jinja2 . FileSystemLoader (
[ os . path . join ( ' capsulflask ' , ' theme ' , app . config [ ' THEME ' ] ) ,
' capsulflask/templates ' ] ) ,
] )
app . jinja_loader = my_loader
2021-01-02 23:10:01 +00:00
if app . config [ ' HUB_MODE_ENABLED ' ] :
if app . config [ ' HUB_MODEL ' ] == " capsul-flask " :
app . config [ ' HUB_MODEL ' ] = hub_model . CapsulFlaskHub ( )
2021-01-04 19:32:52 +00:00
# debug mode (flask reloader) runs two copies of the app. When running in debug mode,
# we only want to start the scheduler one time.
2021-07-09 19:13:28 +00:00
if is_running_server and ( not app . debug or os . environ . get ( ' WERKZEUG_RUN_MAIN ' ) == ' true ' ) :
2021-01-04 19:32:52 +00:00
scheduler = BackgroundScheduler ( )
heartbeat_task_url = f " { app . config [ ' HUB_URL ' ] } /hub/heartbeat-task "
heartbeat_task_headers = { ' Authorization ' : f " Bearer { app . config [ ' HUB_TOKEN ' ] } " }
heartbeat_task = lambda : requests . post ( heartbeat_task_url , headers = heartbeat_task_headers )
2021-01-04 23:20:03 +00:00
scheduler . add_job ( name = " heartbeat-task " , func = heartbeat_task , trigger = " interval " , seconds = 5 )
2021-01-04 19:32:52 +00:00
scheduler . start ( )
atexit . register ( lambda : scheduler . shutdown ( ) )
2021-01-02 23:10:01 +00:00
else :
app . config [ ' HUB_MODEL ' ] = hub_model . MockHub ( )
2021-07-09 19:13:28 +00:00
2021-01-02 23:10:01 +00:00
from capsulflask import db
2021-07-09 19:13:28 +00:00
db . init_app ( app , is_running_server )
2021-01-02 23:10:01 +00:00
2021-07-09 19:13:28 +00:00
from capsulflask import auth , landing , console , payment , metrics , cli , hub_api , admin
2021-01-02 23:10:01 +00:00
app . register_blueprint ( landing . bp )
app . register_blueprint ( auth . bp )
app . register_blueprint ( console . bp )
app . register_blueprint ( payment . bp )
app . register_blueprint ( metrics . bp )
app . register_blueprint ( cli . bp )
app . register_blueprint ( hub_api . bp )
2021-07-09 19:13:28 +00:00
app . register_blueprint ( admin . bp )
2021-01-02 23:10:01 +00:00
app . add_url_rule ( " / " , endpoint = " index " )
2020-05-10 00:13:20 +00:00
2021-01-04 19:32:52 +00:00
2021-01-02 23:10:01 +00:00
if app . config [ ' SPOKE_MODE_ENABLED ' ] :
2020-05-10 00:13:20 +00:00
2021-01-02 23:10:01 +00:00
if app . config [ ' SPOKE_MODEL ' ] == " shell-scripts " :
app . config [ ' SPOKE_MODEL ' ] = spoke_model . ShellScriptSpoke ( )
else :
app . config [ ' SPOKE_MODEL ' ] = spoke_model . MockSpoke ( )
2020-05-10 00:13:20 +00:00
2021-01-02 23:10:01 +00:00
from capsulflask import spoke_api
2020-05-10 00:13:20 +00:00
2021-01-02 23:10:01 +00:00
app . register_blueprint ( spoke_api . bp )
2020-05-10 03:59:22 +00:00
2020-05-22 20:20:26 +00:00
@app.after_request
def security_headers ( response ) :
response . headers [ ' X-Frame-Options ' ] = ' SAMEORIGIN '
if ' Content-Security-Policy ' not in response . headers :
response . headers [ ' Content-Security-Policy ' ] = " default-src ' self ' "
response . headers [ ' X-Content-Type-Options ' ] = ' nosniff '
return response
@app.context_processor
def override_url_for ( ) :
"""
override the url_for function built into flask
with our own custom implementation that busts the cache correctly when files change
"""
return dict ( url_for = url_for_with_cache_bust )
def url_for_with_cache_bust ( endpoint , * * values ) :
"""
Add a query parameter based on the hash of the file , this acts as a cache bust
"""
if endpoint == ' static ' :
filename = values . get ( ' filename ' , None )
if filename :
if ' STATIC_FILE_HASH_CACHE ' not in current_app . config :
current_app . config [ ' STATIC_FILE_HASH_CACHE ' ] = dict ( )
if filename not in current_app . config [ ' STATIC_FILE_HASH_CACHE ' ] :
filepath = os . path . join ( current_app . root_path , endpoint , filename )
#print(filepath)
if os . path . isfile ( filepath ) and os . access ( filepath , os . R_OK ) :
with open ( filepath , ' rb ' ) as file :
hasher = hashlib . md5 ( )
hasher . update ( file . read ( ) )
current_app . config [ ' STATIC_FILE_HASH_CACHE ' ] [ filename ] = hasher . hexdigest ( ) [ - 6 : ]
values [ ' q ' ] = current_app . config [ ' STATIC_FILE_HASH_CACHE ' ] [ filename ]
return url_for ( endpoint , * * values )