2020-05-16 04:19:01 +00:00
|
|
|
import logging
|
|
|
|
from logging.config import dictConfig as logging_dict_config
|
2020-05-14 23:03:00 +00:00
|
|
|
|
2021-01-04 19:32:52 +00:00
|
|
|
import atexit
|
2020-05-11 06:47:14 +00:00
|
|
|
import os
|
2020-05-22 20:20:26 +00:00
|
|
|
import hashlib
|
2021-01-04 19:32:52 +00:00
|
|
|
import requests
|
2020-05-10 00:13:20 +00:00
|
|
|
|
2020-05-14 23:03:00 +00:00
|
|
|
import stripe
|
2020-05-12 06:42:50 +00:00
|
|
|
from dotenv import load_dotenv, find_dotenv
|
2020-05-10 00:13:20 +00:00
|
|
|
from flask import Flask
|
2020-05-10 03:59:22 +00:00
|
|
|
from flask_mail import Mail
|
|
|
|
from flask import render_template
|
2020-05-22 20:20:26 +00:00
|
|
|
from flask import url_for
|
|
|
|
from flask import current_app
|
2021-01-04 19:32:52 +00:00
|
|
|
from apscheduler.schedulers.background import BackgroundScheduler
|
2020-05-11 06:47:14 +00:00
|
|
|
|
2021-01-02 23:10:01 +00:00
|
|
|
from capsulflask import hub_model, spoke_model, cli
|
2020-05-15 23:18:19 +00:00
|
|
|
from capsulflask.btcpay import client as btcpay
|
2021-01-03 21:19:29 +00:00
|
|
|
from capsulflask.http_client import MyHTTPClient
|
2020-05-10 00:13:20 +00:00
|
|
|
|
2021-01-04 19:32:52 +00:00
|
|
|
|
2020-05-12 06:42:50 +00:00
|
|
|
load_dotenv(find_dotenv())
|
|
|
|
|
2020-05-11 20:13:20 +00:00
|
|
|
app = Flask(__name__)
|
2020-05-16 04:19:01 +00:00
|
|
|
|
2020-05-11 20:13:20 +00:00
|
|
|
app.config.from_mapping(
|
2021-01-02 23:10:01 +00:00
|
|
|
|
2020-05-11 20:13:20 +00:00
|
|
|
BASE_URL=os.environ.get("BASE_URL", default="http://localhost:5000"),
|
|
|
|
SECRET_KEY=os.environ.get("SECRET_KEY", default="dev"),
|
2021-01-02 23:10:01 +00:00
|
|
|
HUB_MODE_ENABLED=os.environ.get("HUB_MODE_ENABLED", default="False").lower() in ['true', '1', 't', 'y', 'yes'],
|
|
|
|
SPOKE_MODE_ENABLED=os.environ.get("SPOKE_MODE_ENABLED", default="True").lower() in ['true', '1', 't', 'y', 'yes'],
|
2021-02-16 03:00:34 +00:00
|
|
|
INTERNAL_HTTP_TIMEOUT_SECONDS=os.environ.get("INTERNAL_HTTP_TIMEOUT_SECONDS", default="300"),
|
2021-01-02 23:10:01 +00:00
|
|
|
HUB_MODEL=os.environ.get("HUB_MODEL", default="mock"),
|
|
|
|
SPOKE_MODEL=os.environ.get("SPOKE_MODEL", default="mock"),
|
2020-05-16 04:19:01 +00:00
|
|
|
LOG_LEVEL=os.environ.get("LOG_LEVEL", default="INFO"),
|
2021-01-02 23:10:01 +00:00
|
|
|
SPOKE_HOST_ID=os.environ.get("SPOKE_HOST_ID", default="default"),
|
|
|
|
SPOKE_HOST_TOKEN=os.environ.get("SPOKE_HOST_TOKEN", default="default"),
|
|
|
|
HUB_TOKEN=os.environ.get("HUB_TOKEN", default="default"),
|
2020-05-16 04:19:01 +00:00
|
|
|
|
2020-05-11 20:13:20 +00:00
|
|
|
DATABASE_URL=os.environ.get("DATABASE_URL", default="sql://postgres:dev@localhost:5432/postgres"),
|
2021-01-20 22:43:10 +00:00
|
|
|
|
|
|
|
# https://www.postgresql.org/docs/9.1/libpq-ssl.html#LIBPQ-SSL-SSLMODE-STATEMENTS
|
|
|
|
DATABASE_SSLMODE=os.environ.get("DATABASE_SSLMODE", default="prefer"),
|
|
|
|
|
2020-05-11 20:13:20 +00:00
|
|
|
DATABASE_SCHEMA=os.environ.get("DATABASE_SCHEMA", default="public"),
|
2020-05-10 03:59:22 +00:00
|
|
|
|
2020-05-11 20:13:20 +00:00
|
|
|
MAIL_SERVER=os.environ.get("MAIL_SERVER", default="m1.nullhex.com"),
|
|
|
|
MAIL_PORT=os.environ.get("MAIL_PORT", default="587"),
|
|
|
|
MAIL_USE_TLS=os.environ.get("MAIL_USE_TLS", default="True").lower() in ['true', '1', 't', 'y', 'yes'],
|
|
|
|
MAIL_USERNAME=os.environ.get("MAIL_USERNAME", default="forest@nullhex.com"),
|
|
|
|
MAIL_PASSWORD=os.environ.get("MAIL_PASSWORD", default=""),
|
|
|
|
MAIL_DEFAULT_SENDER=os.environ.get("MAIL_DEFAULT_SENDER", default="forest@nullhex.com"),
|
2021-01-02 23:10:01 +00:00
|
|
|
ADMIN_EMAIL_ADDRESSES=os.environ.get("ADMIN_EMAIL_ADDRESSES", default="ops@cyberia.club"),
|
2020-05-12 06:42:50 +00:00
|
|
|
|
2020-05-13 05:28:53 +00:00
|
|
|
PROMETHEUS_URL=os.environ.get("PROMETHEUS_URL", default="https://prometheus.cyberia.club"),
|
|
|
|
|
2020-05-12 17:38:36 +00:00
|
|
|
STRIPE_API_VERSION=os.environ.get("STRIPE_API_VERSION", default="2020-03-02"),
|
2020-05-12 06:42:50 +00:00
|
|
|
STRIPE_SECRET_KEY=os.environ.get("STRIPE_SECRET_KEY", default=""),
|
2020-05-14 17:40:25 +00:00
|
|
|
STRIPE_PUBLISHABLE_KEY=os.environ.get("STRIPE_PUBLISHABLE_KEY", default=""),
|
2020-05-12 17:38:36 +00:00
|
|
|
#STRIPE_WEBHOOK_SECRET=os.environ.get("STRIPE_WEBHOOK_SECRET", default="")
|
2020-05-14 17:40:25 +00:00
|
|
|
|
2020-05-15 23:49:17 +00:00
|
|
|
BTCPAY_PRIVATE_KEY=os.environ.get("BTCPAY_PRIVATE_KEY", default="").replace("\\n", "\n"),
|
2020-05-14 23:03:00 +00:00
|
|
|
BTCPAY_URL=os.environ.get("BTCPAY_URL", default="https://btcpay.cyberia.club")
|
2020-05-11 20:13:20 +00:00
|
|
|
)
|
2020-05-10 00:13:20 +00:00
|
|
|
|
2021-01-04 19:32:52 +00:00
|
|
|
app.config['HUB_URL'] = os.environ.get("HUB_URL", default=app.config['BASE_URL'])
|
|
|
|
|
2021-01-04 23:20:03 +00:00
|
|
|
class SetLogLevelToDebugForHeartbeatRelatedMessagesFilter(logging.Filter):
|
|
|
|
def isHeartbeatRelatedString(self, thing):
|
|
|
|
# thing_string = "<error>"
|
|
|
|
is_in_string = False
|
|
|
|
try:
|
|
|
|
thing_string = "%s" % thing
|
|
|
|
is_in_string = 'heartbeat-task' in thing_string or 'hub/heartbeat' in thing_string or 'spoke/heartbeat' in thing_string
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
# self.warning("isHeartbeatRelatedString(%s): %s", thing_string, is_in_string )
|
|
|
|
return is_in_string
|
|
|
|
|
|
|
|
def filter(self, record):
|
|
|
|
if app.config['LOG_LEVEL'] == "DEBUG":
|
|
|
|
return True
|
|
|
|
|
|
|
|
if self.isHeartbeatRelatedString(record.msg):
|
|
|
|
return False
|
|
|
|
for arg in record.args:
|
|
|
|
if self.isHeartbeatRelatedString(arg):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2020-05-16 04:19:01 +00:00
|
|
|
logging_dict_config({
|
2020-05-22 20:20:26 +00:00
|
|
|
'version': 1,
|
|
|
|
'formatters': {'default': {
|
|
|
|
'format': '[%(asctime)s] %(levelname)s in %(module)s: %(message)s',
|
|
|
|
}},
|
2021-01-04 23:20:03 +00:00
|
|
|
'filters': {
|
|
|
|
'setLogLevelToDebugForHeartbeatRelatedMessages': {
|
|
|
|
'()': SetLogLevelToDebugForHeartbeatRelatedMessagesFilter,
|
|
|
|
}
|
|
|
|
},
|
2020-05-22 20:20:26 +00:00
|
|
|
'handlers': {'wsgi': {
|
|
|
|
'class': 'logging.StreamHandler',
|
|
|
|
'stream': 'ext://flask.logging.wsgi_errors_stream',
|
2021-01-04 23:20:03 +00:00
|
|
|
'formatter': 'default',
|
|
|
|
'filters': ['setLogLevelToDebugForHeartbeatRelatedMessages']
|
2020-05-22 20:20:26 +00:00
|
|
|
}},
|
|
|
|
'root': {
|
|
|
|
'level': app.config['LOG_LEVEL'],
|
|
|
|
'handlers': ['wsgi']
|
|
|
|
}
|
2020-05-16 04:19:01 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
# app.logger.critical("critical")
|
|
|
|
# app.logger.error("error")
|
|
|
|
# app.logger.warning("warning")
|
|
|
|
# app.logger.info("info")
|
|
|
|
# app.logger.debug("debug")
|
|
|
|
|
2020-05-12 17:38:36 +00:00
|
|
|
stripe.api_key = app.config['STRIPE_SECRET_KEY']
|
|
|
|
stripe.api_version = app.config['STRIPE_API_VERSION']
|
|
|
|
|
2020-05-11 20:13:20 +00:00
|
|
|
app.config['FLASK_MAIL_INSTANCE'] = Mail(app)
|
2021-02-16 03:00:34 +00:00
|
|
|
app.config['HTTP_CLIENT'] = MyHTTPClient(timeout_seconds=int(app.config['INTERNAL_HTTP_TIMEOUT_SECONDS']))
|
2020-05-14 23:03:00 +00:00
|
|
|
app.config['BTCPAY_CLIENT'] = btcpay.Client(api_uri=app.config['BTCPAY_URL'], pem=app.config['BTCPAY_PRIVATE_KEY'])
|
2020-05-10 03:59:22 +00:00
|
|
|
|
2021-01-02 23:10:01 +00:00
|
|
|
if app.config['HUB_MODE_ENABLED']:
|
|
|
|
|
|
|
|
if app.config['HUB_MODEL'] == "capsul-flask":
|
|
|
|
app.config['HUB_MODEL'] = hub_model.CapsulFlaskHub()
|
2021-01-04 19:32:52 +00:00
|
|
|
|
|
|
|
# debug mode (flask reloader) runs two copies of the app. When running in debug mode,
|
|
|
|
# we only want to start the scheduler one time.
|
|
|
|
if not app.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
|
|
|
|
scheduler = BackgroundScheduler()
|
|
|
|
heartbeat_task_url = f"{app.config['HUB_URL']}/hub/heartbeat-task"
|
|
|
|
heartbeat_task_headers = {'Authorization': f"Bearer {app.config['HUB_TOKEN']}"}
|
|
|
|
heartbeat_task = lambda: requests.post(heartbeat_task_url, headers=heartbeat_task_headers)
|
2021-01-04 23:20:03 +00:00
|
|
|
scheduler.add_job(name="heartbeat-task", func=heartbeat_task, trigger="interval", seconds=5)
|
2021-01-04 19:32:52 +00:00
|
|
|
scheduler.start()
|
|
|
|
|
|
|
|
atexit.register(lambda: scheduler.shutdown())
|
|
|
|
|
2021-01-02 23:10:01 +00:00
|
|
|
else:
|
|
|
|
app.config['HUB_MODEL'] = hub_model.MockHub()
|
|
|
|
|
|
|
|
from capsulflask import db
|
|
|
|
db.init_app(app)
|
|
|
|
|
|
|
|
from capsulflask import auth, landing, console, payment, metrics, cli, hub_api
|
|
|
|
|
|
|
|
app.register_blueprint(landing.bp)
|
|
|
|
app.register_blueprint(auth.bp)
|
|
|
|
app.register_blueprint(console.bp)
|
|
|
|
app.register_blueprint(payment.bp)
|
|
|
|
app.register_blueprint(metrics.bp)
|
|
|
|
app.register_blueprint(cli.bp)
|
|
|
|
app.register_blueprint(hub_api.bp)
|
|
|
|
|
|
|
|
app.add_url_rule("/", endpoint="index")
|
2020-05-10 00:13:20 +00:00
|
|
|
|
2021-01-04 19:32:52 +00:00
|
|
|
|
|
|
|
|
2021-01-02 23:10:01 +00:00
|
|
|
if app.config['SPOKE_MODE_ENABLED']:
|
2020-05-10 00:13:20 +00:00
|
|
|
|
2021-01-02 23:10:01 +00:00
|
|
|
if app.config['SPOKE_MODEL'] == "shell-scripts":
|
|
|
|
app.config['SPOKE_MODEL'] = spoke_model.ShellScriptSpoke()
|
|
|
|
else:
|
|
|
|
app.config['SPOKE_MODEL'] = spoke_model.MockSpoke()
|
2020-05-10 00:13:20 +00:00
|
|
|
|
2021-01-02 23:10:01 +00:00
|
|
|
from capsulflask import spoke_api
|
2020-05-10 00:13:20 +00:00
|
|
|
|
2021-01-02 23:10:01 +00:00
|
|
|
app.register_blueprint(spoke_api.bp)
|
2020-05-10 03:59:22 +00:00
|
|
|
|
2020-05-22 20:20:26 +00:00
|
|
|
@app.after_request
|
|
|
|
def security_headers(response):
|
|
|
|
response.headers['X-Frame-Options'] = 'SAMEORIGIN'
|
|
|
|
if 'Content-Security-Policy' not in response.headers:
|
|
|
|
response.headers['Content-Security-Policy'] = "default-src 'self'"
|
|
|
|
response.headers['X-Content-Type-Options'] = 'nosniff'
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
|
|
|
@app.context_processor
|
|
|
|
def override_url_for():
|
|
|
|
"""
|
|
|
|
override the url_for function built into flask
|
|
|
|
with our own custom implementation that busts the cache correctly when files change
|
|
|
|
"""
|
|
|
|
return dict(url_for=url_for_with_cache_bust)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def url_for_with_cache_bust(endpoint, **values):
|
|
|
|
"""
|
|
|
|
Add a query parameter based on the hash of the file, this acts as a cache bust
|
|
|
|
"""
|
|
|
|
|
|
|
|
if endpoint == 'static':
|
|
|
|
filename = values.get('filename', None)
|
|
|
|
if filename:
|
|
|
|
if 'STATIC_FILE_HASH_CACHE' not in current_app.config:
|
|
|
|
current_app.config['STATIC_FILE_HASH_CACHE'] = dict()
|
|
|
|
|
|
|
|
if filename not in current_app.config['STATIC_FILE_HASH_CACHE']:
|
|
|
|
filepath = os.path.join(current_app.root_path, endpoint, filename)
|
|
|
|
#print(filepath)
|
|
|
|
if os.path.isfile(filepath) and os.access(filepath, os.R_OK):
|
|
|
|
|
|
|
|
with open(filepath, 'rb') as file:
|
|
|
|
hasher = hashlib.md5()
|
|
|
|
hasher.update(file.read())
|
|
|
|
current_app.config['STATIC_FILE_HASH_CACHE'][filename] = hasher.hexdigest()[-6:]
|
|
|
|
|
|
|
|
values['q'] = current_app.config['STATIC_FILE_HASH_CACHE'][filename]
|
|
|
|
|
|
|
|
return url_for(endpoint, **values)
|
|
|
|
|
|
|
|
|
2021-01-04 23:20:03 +00:00
|
|
|
|
2020-05-22 20:20:26 +00:00
|
|
|
|