diff --git a/.gitignore b/.gitignore index 8645a1c..0926ff6 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ !.elasticbeanstalk/*.cfg.yml !.elasticbeanstalk/*.global.yml *__pycache__* +.env \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 0241750..8127eda 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,4 +6,4 @@ COPY src /src/ WORKDIR /src/ EXPOSE 5050 -CMD ["conda", "run", "--no-capture-output", "-n", "app", "gunicorn", "-c", "gunicorn_config.py", "app:app"] +CMD ["conda", "run", "--no-capture-output", "-n", "app", "gunicorn", "-c", "gunicorn_config.py", "wsgi"] diff --git a/bin/start_dev.sh b/bin/start_dev.sh new file mode 100644 index 0000000..a399f6d --- /dev/null +++ b/bin/start_dev.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +cd ./src + +conda run --no-capture-output -n app gunicorn -c gunicorn_config.py wsgi \ No newline at end of file diff --git a/conda_env.yaml b/conda_env.yaml index ff3ca5a..dd0d865 100755 --- a/conda_env.yaml +++ b/conda_env.yaml @@ -5,29 +5,56 @@ channels: dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=4.5=1_gnu + - aiohttp=3.7.4.post0=py39h3811e60_0 - alembic=1.6.5=pyhd8ed1ab_0 + - anyio=3.5.0=py39hf3d152e_0 + - async-timeout=3.0.1=py_1000 - attrs=21.2.0=pyhd8ed1ab_0 - backcall=0.2.0=pyhd3eb1b0_0 + - base58=2.1.1=pyhd8ed1ab_0 + - bitarray=1.2.2=py39hbd71b63_1 + - blinker=1.4=py_1 - brotlipy=0.7.0=py39h27cfd23_1003 - - ca-certificates=2021.7.5=h06a4308_1 + - ca-certificates=2021.10.8=ha878542_0 - cachetools=4.2.2=pyhd3eb1b0_0 - - certifi=2021.5.30=py39h06a4308_0 + - certifi=2021.10.8=py39hf3d152e_1 - cffi=1.14.6=py39h400218f_0 - chardet=4.0.0=py39h06a4308_1003 + - charset-normalizer=2.0.10=pyhd8ed1ab_0 - click=8.0.1=pyhd3eb1b0_0 - conda=4.10.3=py39hf3d152e_0 - conda-package-handling=1.7.3=py39h27cfd23_1 - cryptography=3.4.7=py39hd23ed53_0 + - cytoolz=0.11.0=py39h3811e60_3 - decorator=5.0.9=pyhd3eb1b0_0 + - eth-abi=2.1.1=py_2 + - eth-account=0.5.6=pyhd8ed1ab_0 + - eth-hash=0.3.2=pyhd8ed1ab_0 + - eth-keyfile=0.5.1=pyh9f0ad1d_1 + - eth-keys=0.3.3=pyh9f0ad1d_0 + - eth-rlp=0.2.1=py_0 + - eth-typing=2.2.2=py_0 + - eth-utils=1.10.0=py39hf3d152e_1 - flasgger=0.9.5=pyhd8ed1ab_1 - flask=1.1.2=pyhd3eb1b0_0 - flask-jwt-extended=4.3.0=pyhd8ed1ab_0 + - flask-mail=0.9.1=py_2 - flask-migrate=3.1.0=pyhd8ed1ab_0 - flask-sqlalchemy=2.5.1=pyhd3eb1b0_0 + - gmp=6.2.1=h58526e2_0 - greenlet=1.1.1=py39h295c915_0 - gunicorn=20.1.0=py39h06a4308_0 + - h11=0.12.0=pyhd8ed1ab_0 + - h2=4.1.0=py39hf3d152e_0 + - hexbytes=0.2.2=pyhd8ed1ab_0 + - hpack=4.0.0=pyh9f0ad1d_0 + - httpcore=0.14.5=pyhd8ed1ab_0 + - httpx=0.21.3=py39hf3d152e_0 + - hyperframe=6.0.1=pyhd8ed1ab_0 - idna=2.10=pyhd3eb1b0_0 - importlib-metadata=3.10.0=py39h06a4308_0 + - iniconfig=1.1.1=pyhd3eb1b0_0 + - ipfshttpclient=0.7.0=pyhd8ed1ab_0 - ipython=7.26.0=py39hb070fc8_0 - ipython_genutils=0.2.0=pyhd3eb1b0_1 - itsdangerous=2.0.1=pyhd3eb1b0_0 @@ -41,27 +68,41 @@ dependencies: - libgcc-ng=9.3.0=h5101ec6_17 - libgomp=9.3.0=h5101ec6_17 - libpq=12.2=h553bfba_1 + - libprotobuf=3.17.2=h780b84a_1 - libstdcxx-ng=9.3.0=hd4cf53a_17 + - lru-dict=1.1.7=py39h3811e60_0 - mako=1.1.5=pyhd8ed1ab_0 - markupsafe=2.0.1=py39h27cfd23_0 - matplotlib-inline=0.1.2=pyhd3eb1b0_2 - mistune=0.8.4=py39h3811e60_1004 + - more-itertools=8.8.0=pyhd3eb1b0_0 + - multiaddr=0.0.9=py_0 + - multidict=5.1.0=py39h3811e60_1 - ncurses=6.2=he6710b0_1 - - openssl=1.1.1l=h7f8727e_0 + - netaddr=0.7.19=py_0 + - openssl=1.1.1k=h7f98852_0 + - packaging=21.0=pyhd3eb1b0_0 + - parsimonious=0.8.1=py_0 - parso=0.8.2=pyhd3eb1b0_0 - pexpect=4.8.0=pyhd3eb1b0_3 - pickleshare=0.7.5=pyhd3eb1b0_1003 - pip=21.1.3=py39h06a4308_0 + - pluggy=0.13.1=py39h06a4308_0 - prompt-toolkit=3.0.17=pyh06a4308_0 + - protobuf=3.17.2=py39he80948d_0 - psycopg2=2.8.6=py39h3c74f83_1 - ptyprocess=0.7.0=pyhd3eb1b0_2 + - py=1.10.0=pyhd3eb1b0_0 - pycosat=0.6.3=py39h27cfd23_0 - pycparser=2.20=py_2 + - pycryptodome=3.10.1=py39h81fec48_0 - pygments=2.10.0=pyhd3eb1b0_0 - pyjwt=2.1.0=pyhd8ed1ab_0 - pyopenssl=20.0.1=pyhd3eb1b0_1 + - pyparsing=2.4.7=pyhd3eb1b0_0 - pyrsistent=0.17.3=py39h3811e60_2 - pysocks=1.7.1=py39h06a4308_0 + - pytest=6.2.4=py39h06a4308_2 - python=3.9.5=h12debd9_4 - python-dateutil=2.8.2=pyhd8ed1ab_0 - python-editor=1.0.4=py_0 @@ -69,21 +110,33 @@ dependencies: - pyyaml=5.4.1=py39h3811e60_0 - readline=8.1=h27cfd23_0 - requests=2.25.1=pyhd3eb1b0_0 + - requests-mock=1.9.3=pyhd8ed1ab_0 + - rfc3986=1.5.0=pyhd8ed1ab_0 + - rlp=1.2.0=pyh9f0ad1d_0 - ruamel_yaml=0.15.100=py39h27cfd23_0 - setuptools=52.0.0=py39h06a4308_0 - six=1.16.0=pyhd3eb1b0_0 + - sniffio=1.2.0=py39hf3d152e_2 - sqlalchemy=1.4.22=py39h7f8727e_0 - sqlite=3.36.0=hc218d9a_0 - tk=8.6.10=hbc83047_0 + - toml=0.10.2=pyhd3eb1b0_0 + - toolz=0.11.2=pyhd8ed1ab_0 - tqdm=4.61.2=pyhd3eb1b0_1 - traitlets=5.0.5=pyhd3eb1b0_0 + - typing-extensions=4.0.1=hd8ed1ab_0 + - typing_extensions=4.0.1=pyha770c72_0 - tzdata=2021a=h52ac0ba_0 - urllib3=1.26.6=pyhd3eb1b0_1 + - varint=1.0.2=pyh9f0ad1d_0 - wcwidth=0.2.5=py_0 + - web3=5.26.0=py39hf3d152e_0 + - websockets=9.1=py39h3811e60_0 - werkzeug=1.0.1=pyhd3eb1b0_0 - wheel=0.36.2=pyhd3eb1b0_0 - xz=5.2.5=h7b6447c_0 - yaml=0.2.5=h7b6447c_0 + - yarl=1.6.3=py39h3811e60_2 - zipp=3.5.0=pyhd3eb1b0_0 - zlib=1.2.11=h7b6447c_3 prefix: /opt/conda/envs/app diff --git a/deploy.yaml b/deploy-dev.yaml similarity index 83% rename from deploy.yaml rename to deploy-dev.yaml index dacbecf..acd7c2e 100644 --- a/deploy.yaml +++ b/deploy-dev.yaml @@ -9,6 +9,9 @@ name: chainedmetrics-api-dev region: nyc services: - cors: + allow_headers: + - content-type + - Access-Control-Allow-Origin allow_methods: - GET - POST @@ -18,9 +21,7 @@ services: - prefix: http://localhost:3000 - prefix: https://dev.chainedmetrics.com - prefix: https://chained-metrics-dev.web.app - allow_headers: - - content-type - - Access-Control-Allow-Origin + - prefix: https://chainedmetrics.com dockerfile_path: Dockerfile envs: - key: DB_PASS @@ -34,6 +35,10 @@ services: scope: RUN_AND_BUILD_TIME type: SECRET value: EV[1:IUYabgwKB3RE9WCzpJzCQUcUdT1cxjNf:4KhS56CiRkTWHrpzZhnUiYXeDBqa5HnHP0unAmeorJmsG11QGApFjOqQzvUWbum9Tr4Ujw==] + - key: MAILCHIMP_API_KEY + scope: RUN_AND_BUILD_TIME + type: SECRET + value: EV[1:QuiE786jSb9WfuqVCM4r624gQtWtlllY:h19Vx2GGQz/hDGokp6X8XFtBWej/yg+xrOx6MnWDR9rvR7+PjSDoYZZVwEgWXun/9TOksw==] github: branch: development deploy_on_push: true @@ -45,3 +50,4 @@ services: routes: - path: / source_dir: / + \ No newline at end of file diff --git a/deploy-prod.yaml b/deploy-prod.yaml new file mode 100644 index 0000000..b37c5b2 --- /dev/null +++ b/deploy-prod.yaml @@ -0,0 +1,51 @@ +alerts: +- rule: DEPLOYMENT_FAILED +- rule: DOMAIN_FAILED +domains: +- domain: api.chainedmetrics.com + type: PRIMARY + zone: chainedmetrics.com +name: chainedmetrics-api +region: nyc +services: +- cors: + allow_headers: + - content-type + - Access-Control-Allow-Origin + allow_methods: + - GET + - POST + - PUT + - DELETE + allow_origins: + - prefix: http://localhost:3000 + - prefix: https://chained-metrics-dev.web.app + - prefix: https://chainedmetrics.com + dockerfile_path: Dockerfile + envs: + - key: CHAINEDMETRICS_ENV + scope: RUN_AND_BUILD_TIME + value: Production + - key: DB_PASS + scope: RUN_AND_BUILD_TIME + type: SECRET + value: EV[1:oL9RznBP04XtV0A4v0QZIICLbD4hE0iU:+mnWEsYZA6TgLsT3AdP4/Va+Ag+SN+PxoqAgDUmbOGc=] + - key: MAILCHIMP_API_KEY + scope: RUN_AND_BUILD_TIME + type: SECRET + value: EV[1:Gz0OK0yc1CmrJ/Wo1Mz2jtRnNmqlfbJU:lRiGqqvH1/nz7eQtPOiNLde1PWZEerTKyvTloMnWtrWjWbOsNufjnJw6Z+oEryAA0Ti2IQ==] + - key: JWT_SECRET_KEY + scope: RUN_AND_BUILD_TIME + type: SECRET + value: EV[1:hNFG6i2Bq25K1knNEAmxmcImLQMOhbto:y4OKwwcCjj0bGjvU3AhXhOZjfyyq5BfbsGCTFoWBDLvfZeKq] + github: + branch: main + deploy_on_push: true + repo: detroitcoder/chainedmetrics_api + http_port: 5050 + instance_count: 1 + instance_size_slug: basic-xs + name: chainedmetrics-api + routes: + - path: / + source_dir: / diff --git a/scripts/local_bash.sh b/scripts/local_bash.sh deleted file mode 100755 index 4c8a52b..0000000 --- a/scripts/local_bash.sh +++ /dev/null @@ -1,7 +0,0 @@ -docker run \ --p 5050:5050 -it \ --e DB_PASS=$DEV_DB_PASS \ - -v ~/chained_metrics_api:/src/chainedmetrics_api \ - -w /src/chainedmetrics_api/src \ - registry.digitalocean.com/chainedmetrics/chainedmetrics_api:$1 \ - /bin/bash \ No newline at end of file diff --git a/scripts/local_server.sh b/scripts/local_server.sh index 99d35c5..6902513 100755 --- a/scripts/local_server.sh +++ b/scripts/local_server.sh @@ -1,7 +1,7 @@ docker run \ -a stdout -a stderr -p 5050:5050 -it \ --e DB_PASS=$DEV_DB_PASS -e JWT_SECRET_KEY=stay_safe_out_there\ - -v ~/chained_metrics_api:/src/chainedmetrics_api \ +-e DB_PASS=$DEV_DB_PASS -e JWT_SECRET_KEY=stay_safe_out_there -e MAILCHIMP_API_KEY=$MAILCHIMP_API_KEY\ + -e GMAIL_PASS=$MAILCHIMP_API_KEY -e POLYGONSCAN_TOKEN=$POLYGONSCAN_TOKEN -v ~/chained_metrics_api:/src/chainedmetrics_api \ -w /src/chainedmetrics_api/src \ --entrypoint /opt/conda/bin/conda \ registry.digitalocean.com/chainedmetrics/chainedmetrics_api:$1 \ diff --git a/scripts/run_container.sh b/scripts/run_container.sh new file mode 100755 index 0000000..683c42d --- /dev/null +++ b/scripts/run_container.sh @@ -0,0 +1,5 @@ +docker run \ +-a stdout -a stderr -p 5050:5050 -it \ +-e DB_PASS=$DEV_DB_PASS -e JWT_SECRET_KEY=stay_safe_out_there -e MAILCHIMP_API_KEY=$MAILCHIMP_API_KEY\ + -e GMAIL_PASS=$MAILCHIMP_API_KEY \ + registry.digitalocean.com/chainedmetrics/chainedmetrics_api:$1 diff --git a/scripts/run_local_bash.sh b/scripts/run_local_bash.sh new file mode 100755 index 0000000..3804209 --- /dev/null +++ b/scripts/run_local_bash.sh @@ -0,0 +1,9 @@ +docker run \ +-it \ +-e CHAINEDMETRICS_ENV=Development \ +-e DB_PASS=$DEV_DB_PASS -e JWT_SECRET_KEY=stay_safe_out_there -e MAILCHIMP_API_KEY=$MAILCHIMP_API_KEY\ + -e GMAIL_PASS=$MAILCHIMP_API_KEY -v ~/chained_metrics_api:/src/chainedmetrics_api \ + -v ~/chained_metrics_api:/src/chainedmetrics_api \ + -w /src/chainedmetrics_api/src \ + registry.digitalocean.com/chainedmetrics/chainedmetrics_api:$1 \ + /bin/bash diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh index 3e9931e..7acb8a7 100755 --- a/scripts/run_tests.sh +++ b/scripts/run_tests.sh @@ -1,10 +1,14 @@ -docker build -t test:$1 . + + +tag=$RANDOM +echo $tag +docker build -t test:$tag . docker run \ -a stdout -a stderr -p 5050:5050 -it \ --e DB_PASS=invalid_pass -e JWT_SECRET_KEY=stay_safe_out_there\ - -v ~/chained_metrics_api:/src/chainedmetrics_api \ +-e DB_PASS=$DEV_DB_PASS -e JWT_SECRET_KEY=stay_safe_out_there -e MAILCHIMP_API_KEY=$MAILCHIMP_API_KEY\ + -e GMAIL_PASS=$MAILCHIMP_API_KEY -e TEST=true -v ~/chained_metrics_api:/src/chainedmetrics_api \ -w /src/chainedmetrics_api/src \ --entrypoint /opt/conda/bin/conda \ - test:$1 \ - run --no-capture-output -n app python -m unittest tests \ No newline at end of file + test:$tag \ + run --no-capture-output -n app pytest -rx diff --git a/src/app/__init__.py b/src/app/__init__.py index c3bf047..6ad0caf 100644 --- a/src/app/__init__.py +++ b/src/app/__init__.py @@ -7,43 +7,67 @@ from .markets import markets_bp from .auth import auth_bp, jwt +from .faucet import faucet_bp from .models import db -from .utilities import CustomJSONEncoder, SWAGGER_TEMPLATE +from .utilities import CustomJSONEncoder, SWAGGER_TEMPLATE, mail -app = Flask(__name__) -app.config.from_object('flask_config.{}'.format( - os.environ.get('CHAINEDMETRICS_ENV', 'Development') -)) -app.json_encoder = CustomJSONEncoder +def create_app(testing_config=None): + ''' + Top level function for creating the flask app -app.config['SWAGGER'] = dict(openapi='3.0.2') -swagger = Swagger(app, template=SWAGGER_TEMPLATE) + Args: + testing (bool): Indicates if the app is running for the testsuite -db.init_app(app) -jwt.init_app(app) -migrate = Migrate() -migrate.init_app(app, db) + ''' + app = Flask(__name__) -app.register_blueprint(markets_bp, url_prefix='/markets') -app.register_blueprint(auth_bp, url_prefix='/auth') + if testing_config: + app.config.update(testing_config) + else: + app.config.from_object('flask_config.{}'.format( + os.environ.get('CHAINEDMETRICS_ENV', 'Development') + )) + app.json_encoder = CustomJSONEncoder + app.config['SWAGGER'] = dict(openapi='3.0.2') + swagger = Swagger(app, template=SWAGGER_TEMPLATE) -@app.errorhandler(500) -def handle_500(err): - - if isinstance(err.original_exception, NoAuthorizationError): - return jsonify(dict( - message=( - 'Invalid or Unset JWT token. Ensure your session ' - 'has a valid token by logging in and the access_token_cookie is set.' - ) - )), 401 + db.init_app(app) + jwt.init_app(app) + mail.init_app(app) - else: - try: - err.with_traceback() - except Exception: - app.logger.exception('500 Unhandled Error') - return jsonify(dict(message='There was an unhandled server excption. See server logs for details')), 500 + migrate = Migrate() + migrate.init_app(app, db) + + + app.register_blueprint(markets_bp, url_prefix='/markets') + app.register_blueprint(auth_bp, url_prefix='/auth') + app.register_blueprint(faucet_bp, url_prefix='/faucet') + app.url_map.strict_slashes = False + + + @app.errorhandler(500) + def handle_500(err): + + if isinstance(err.original_exception, NoAuthorizationError): + return jsonify(dict( + message=( + 'Invalid or Unset JWT token. Ensure your session ' + 'has a valid token by logging in and the access_token_cookie is set.' + ) + )), 401 + + else: + try: + err.with_traceback() + except Exception: + app.logger.exception('500 Unhandled Error') + return jsonify(dict(message='There was an unhandled server excption. See server logs for details')), 500 + + return app + +if __name__ == '__main__': + + app = create_app() \ No newline at end of file diff --git a/src/app/analytics.py b/src/app/analytics.py new file mode 100644 index 0000000..1e0642b --- /dev/null +++ b/src/app/analytics.py @@ -0,0 +1,267 @@ +import os +import requests +from collections import defaultdict +import datetime + +class AMM(object): + + def __init__(self, high, low): + + self.high = high + self.low = low + self.long = 0 + self.short = 0 + self.transactions = [] + + def process_transaction(self, transaction): + ''' + Processes the transactions in the AMM + ''' + + delta_long, delta_short = transaction.transferAmount() + if (delta_long, delta_short) == (0, 0): + return None + + if transaction.isCompleteTransaction(): + transactionInfo = transaction.getTransactionInfo() + else: + print(transaction.all_transactions[0]['timeStamp']) + transactionInfo = { + 'time': str(datetime.datetime.fromtimestamp(int(transaction.all_transactions[0]['timeStamp']))) + } + + self.long += delta_long + self.short += delta_short + + longPrice = self.short / (self.long + self.short) + forecastedValue = float(self.low) + longPrice * float(self.high - self.low) + + transactionInfo['longPrice'] = longPrice + transactionInfo['forecastedValue'] = forecastedValue + transactionInfo['longBalance'] = self.long + transactionInfo['shortBalance'] = self.short + + self.transactions.append(transactionInfo) + + +class Transaction(object): + ''' + This is an example of how to calculate the the + ''' + + def __init__(self, amm, collateral, long, short, _hash): + + self.collateral_transfer = None + self.long_mint = None + self.short_mint = None + self.return_transfer = None + + self.amm_address = amm + self.collateral_address = collateral + self.long_address = long + self.short_address = short + self.all_transactions = [] + self.hash = _hash + + self.null_address = '0x0000000000000000000000000000000000000000' + + def isCompleteTransaction(self): + ''' + Checks if the internal transactions occured within this transaction to be complete + ''' + + if all((self.collateral_transfer, self.long_mint, self.short_mint, self.return_transfer)): + return True + else: + return False + + def isCollateralTransfer(self, txn): + ''' + Checks if this is a collateral transfer transaction from a user to the amm + ''' + + if (self.collateral_address == txn['contractAddress'] and + self.amm_address == txn['to']): + return True + else: + return False + + def isLongMint(self, txn): + ''' + Check if this the minting of the long token + ''' + + if (self.null_address == txn['from'] and self.amm_address == txn['to'] + and txn['contractAddress'] == self.long_address): + return True + else: + return False + + def isShortMint(self, txn): + ''' + Check if this is the minting of the short token + ''' + + if (self.null_address == txn['from'] and self.amm_address == txn['to'] + and txn['contractAddress'] == self.short_address): + return True + else: + return False + + def isReturnTransfer(self, txn): + ''' + Checks if the user is returning the tokens + ''' + + if (self.amm_address == txn['from'] and txn['contractAddress'] in (self.long_address, self.short_address)): + return True + else: + return False + + def classifyTransactionType(self, txn): + ''' + Classifies the transaction type + ''' + + if self.isCollateralTransfer(txn): + self.collateral_transfer = txn + elif self.isLongMint(txn): + self.long_mint = txn + elif self.isShortMint(txn): + self.short_mint = txn + elif self.isReturnTransfer(txn): + self.return_transfer = txn + self.all_transactions.append(txn) + + def getTransactionInfo(self): + ''' + Calcualtes information about the transaction based on info about the transaction + ''' + + assert self.isCompleteTransaction(), 'Incomplete transaction' + + time = str(datetime.datetime.fromtimestamp(int(self.collateral_transfer['timeStamp']))) + investmentAmount = int(self.collateral_transfer['value']) + orderType = 'long' if self.return_transfer['contractAddress'] == self.long_address else 'short' + + try: + pricePerToken = float(investmentAmount) / int(self.return_transfer['value']) + except ZeroDivisionError: + pricePerToken = 0 + + return { + 'account': self.collateral_transfer['from'], + 'time': time, + 'investmentAmount': investmentAmount, + 'orderType': orderType, + 'pricePerToken': pricePerToken, + 'transactionType': 'buy', + 'returnAmount': int(self.return_transfer['value']), + 'isComplete': 'True' + } + + def transferAmount(self): + ''' + Calculates the long and short tokens that at transfered in and out of the AMM for the short and long tokens + ''' + + long, short = 0, 0 + for txn in self.all_transactions: + if txn.get('from') == self.amm_address: + if txn.get('contractAddress') == self.long_address: + long -= int(txn['value']) + elif txn.get('contractAddress') == self.short_address: + short -= int(txn['value']) + + elif txn.get('to') == self.amm_address: + if txn.get('contractAddress') == self.long_address: + long += int(txn['value']) + elif txn.get('contractAddress') == self.short_address: + short += int(txn['value']) + + return long, short + +def calc_pnl(transactions, long_price=None, short_price=None): + ''' + Calculates the PnL from the transactions for a market for each address + + Arguments: + transactions (list): Output from get_historical_transactions for a market + long_price (float): Price of the long token + short_price (float): Price of the short token + + Returns: + dict: Dictionary of PnL for each address + + Notes: + If long_price and short_price are not provided, the prices from the last transaction are used + ''' + + pnl = defaultdict(lambda: 0) + balances = {'long': defaultdict(lambda: 0), 'short': defaultdict(lambda: 0)} + + if long_price is None: + long_price, short_price = transactions[-1]['longPrice'], 1 - transactions[-1]['longPrice'] + + for txn in transactions: + account = txn['account'] + if txn.get('isComplete') == 'True': + if txn['transactionType'] == 'buy': + if txn['orderType'] == 'long': + pnl[account] -= txn['investmentAmount'] + balances['long'][account] += txn['returnAmount'] + elif txn['orderType'] == 'short': + pnl[account] -= txn['investmentAmount'] + balances['short'][account] += txn['returnAmount'] + elif txn['transactionType'] == 'sell': + if txn['orderType'] == 'long': + pnl[account] += txn['returnAmount'] + balances['long'][account] -= txn['investmentAmount'] + elif txn['orderType'] == 'short': + pnl[account] += txn['returnAmount'] + balances['long'][account] += txn['investmentAmount'] + + for (acc, bal) in balances['long'].items(): + pnl[acc] += bal * long_price + + for (acc, bal) in balances['short'].items(): + pnl[acc] += bal * short_price + + return pnl + +def get_historical_transactions(amm_address, high, low, collateral, long, short, api_token, date=None): + + amm = AMM(high, low) + transaction_dict = call_polyscan_api(amm_address, api_token) + transaction = Transaction(amm_address, collateral, long, short, transaction_dict['result'][0]['hash']) + for txn in transaction_dict['result']: + # Filter out transacitons before date + if date is not None and int(txn['timeStamp']) < date: + continue + + if txn['hash'] != transaction.hash: + amm.process_transaction(transaction) + transaction = Transaction(amm_address, collateral, long, short, txn['hash']) + + transaction.classifyTransactionType(txn) + + amm.process_transaction(transaction) + + return amm.transactions + + +def call_polyscan_api(token_address, api_token): + ''' + Queries polyscan for the historical transactions of the AMM + ''' + + print(f'Getting Issuances for {token_address}') + url = ( + "https://api.polygonscan.com/api?module=account&action=tokentx&" + f"address={token_address}&sort=asc&" + f"apikey={api_token}" + ) + + print(url) + resp = requests.get(url).json() + return resp \ No newline at end of file diff --git a/src/app/auth.py b/src/app/auth.py old mode 100644 new mode 100755 index e504ed4..c859126 --- a/src/app/auth.py +++ b/src/app/auth.py @@ -1,9 +1,19 @@ -from flask import Blueprint, jsonify, request, Response +import os +import time + +from flask import Blueprint, jsonify, request, current_app from datetime import timedelta -from flask_jwt_extended import create_access_token, current_user, jwt_required +from flask_jwt_extended import create_access_token, current_user, jwt_required, decode_token +from jwt.exceptions import ExpiredSignatureError, DecodeError, InvalidTokenError from flask_jwt_extended import JWTManager +from sqlalchemy.exc import MultipleResultsFound +from web3.auto import w3 +from eth_account.messages import defunct_hash_message +from eth_keys.exceptions import BadSignature, ValidationError from .models import RequestAccess, User, db +from .utilities import (send_verification_email, send_resetpassword_email, + subscribe_to_mailchimp_async) jwt = JWTManager() @@ -12,8 +22,12 @@ @jwt.user_lookup_loader def user_lookup_callback(_jwt_header, jwt_data): - email = jwt_data["sub"] - return User.query.filter_by(email=email).one_or_none() + address = jwt_data["sub"] + + if isinstance(address, int) or len(address) < 42: + return None + + return User.query.filter_by(address=address).one_or_none() @jwt.expired_token_loader def exipred_token_callback(jwt_header, jwt_payload): @@ -52,13 +66,88 @@ def login(): email = request.json.get('email') password = request.json.get('password') - user = User.query.filter_by(email=email).one_or_none() + user = User.query.filter_by(email=email.lower()).one_or_none() if not user or not user.check_password(password): return jsonify(dict(message='Wrong username or password')), 401 else: access_token = create_access_token( - identity=email, + identity=user.id, + expires_delta=timedelta(days=7) + ) + + return jsonify(dict(message='Success', access_token=access_token)) + +@auth_bp.route('/login2', methods=['POST']) +def login2(): + ''' + Logs a user into chainedmetrics with web3 signature and retuns a JWT token valid for 7 days + + Endpoint for Logging into chained metrics using a web3 signature and their \ + address. A JWT token is returned and is then required to be in \ + the header of all restricted endpoints. + + --- + requestBody: + required: true + description: The user's email and passsword for chained metrics + content: + application/json: + schema: + type: object + properties: + address: + type: string + signature: + type: string + message: + type: string + responses: + 200: + description: Successful response with the JWT as access_token + 401: + description: Incorrect address or signature + ''' + + address = request.json.get('address') + signature = request.json.get('signature') + user_message = request.json.get('message') + + if not address or not signature: + return jsonify(dict(message='No signature provided')), 401 + + # Construct the expected message hash from the address and signature + now = time.time() + rounded_now = now - (now % 600) + domain = request.headers.get('Host') + message = 'By signing this message you are signing into {} at {:.0f}.'.format(domain, rounded_now) + message_hash = defunct_hash_message(text=message) + print('expected_message:', message) + print('user_message:', user_message) + print('messages equal:', message == user_message) + print('user_address:', address) + # Verify the signature matches the address and message hash + try: + signer = w3.eth.account.recoverHash(message_hash, signature=signature) + except (BadSignature, ValidationError): + return jsonify(dict(message='Invalid signature')), 401 + + print('signer_address:', signer) + if signer != address: + return jsonify(dict(message='Wrong signature')), 401 + else: + try: + user = User.query.filter_by(address=address).one_or_none() + except MultipleResultsFound: + return jsonify(dict(message='Multiple users found with this address')), 401 + + if not user: + user = User(address=address, active=True) + db.session.add(user) + db.session.commit() + + access_token = create_access_token( + identity=user.address, expires_delta=timedelta(days=7) ) @@ -86,16 +175,21 @@ def user(): first_name=current_user.first_name, last_name=current_user.last_name, active=current_user.active, - created_on=current_user.created_on + created_on=current_user.created_on, + address=current_user.address, + matic_recieved=current_user.matic_recieved, + matic_recieved_date=current_user.matic_recieved_date, + notifications_portfolio_events=current_user.notifications_portfolio_events, + notifications_market_events=current_user.notifications_market_events ) @auth_bp.route('/user', methods=['POST']) -@jwt_required() def add_user(): ''' - REQUIRES ADMIN PRIVLEGES FOR TESTING ONLY: Endpoint to create a new user + Add new user + Requests to add a user and if the payload is correct an email is sent to the user which requires them to verify the email address. - Enables programatic access to create new users. Must be authenticated with a user with admin privleges + Used to initially create the account --- requestBody: required: true @@ -109,14 +203,15 @@ def add_user(): type: string password: type: string - first_name: + username: type: string - last_name: + address: type: string - admin: + notifications_portfolio_events: type: boolean - security: - - bearerAuth: [] + notifications_market_events: + type: boolean + responses: 200: description: Success message @@ -126,73 +221,333 @@ def add_user(): description: Not an Admin user ''' - if not current_user.admin: - return jsonify(message="User does not have admin privleges to create users"), 401 + email = request.json.get('email').lower().strip() + password = request.json.get('password') + username = request.json.get('username', '') + address = request.json.get('address', '').strip() + notifications_market_events = request.json.get('notifications_market_events', False) + notifications_portfolio_events = request.json.get('notifications_portfolio_events', False) + if not all((email, password)): + return jsonify(message="All required arguments must be filled out"), 400 + elif not all((isinstance(notifications_market_events, bool), isinstance(notifications_portfolio_events, bool))): + return jsonify(message="Notification flags muss be boolean") + elif User.query.filter_by(email=email).one_or_none(): + return jsonify(message="An account with this email already exists"), 400 else: - email = request.json.get('email') - admin = request.json.get('admin') - first_name = request.json.get('first_name') - last_name = request.json.get('last_name') - password = request.json.get('password') - - if not all((email, first_name, last_name, password)): - return jsonify(message="All required arguments must be filled out"), 400 - elif admin not in (True, False): - return jsonify(message="All required arguments must be filled out"), 400 - - user = User(email=email, admin=admin, active=True, first_name=first_name, last_name=last_name) + user = User( + email=email, admin=False, active=False, username=username, + address=address, + notifications_market_events = notifications_market_events, + notifications_portfolio_events=notifications_portfolio_events, + ) + user.set_password(password) db.session.add(user) db.session.commit() - return jsonify(message='Success') + verify_token = create_access_token(identity=email, expires_delta=timedelta(days=1)) + send_verification_email(email, verify_token) + subscribe_to_mailchimp_async(email) + + return jsonify(message=f'Email verification sent to {email}') -@auth_bp.route('/requestaccess', methods=['POST']) -def request_access(): +@auth_bp.route('/user', methods=['PUT']) +@jwt_required() +def update_user(): ''' - Endpoint for requesting access to the Exchange UI + Update User Settings + Requests to update a user's settings - This endpoint is used to request access to chainedmetrics.com + Used to initially create the account --- requestBody: - description: Information about the user that is requesting access + required: true + description: Required fields for the user content: application/json: schema: type: object properties: email: - required: true type: string - full_name: - required: true + password: type: string - reason: + username: type: string - company: + address: type: string + notifications_portfolio_events: + type: boolean + notifications_market_events: + type: boolean + responses: 200: - description: Success Response + description: Success message 400: - description: Validation error on arguments. See Response + description: Validation Error + 401: + description: Not an Admin user ''' - full_name = request.json.get('full_name') email = request.json.get('email') - reason = request.json.get('reason') - company = request.json.get('company') + username = request.json.get('username') + address = request.json.get('address') + notifications_market_events = request.json.get('notifications_market_events') + notifications_portfolio_events = request.json.get('notifications_portfolio_events') + + if email: + email = email.lower().strip() + if User.query.filter_by(email=email).one_or_none() and current_user.email != email: + return jsonify(message='Email is already in use'), 400 + current_user.email = email + + if username: + username = username.strip().lower() + if User.query.filter_by(username=username).one_or_none() and current_user.username != username: + return jsonify(message='Username is already in use'), 400 + current_user.username = username - if email and full_name: + if address: + address = address.strip() + if User.query.filter_by(address=address).one_or_none() and current_user.address != address: + return jsonify(message='Address is already in use'), 400 + current_user.address = address - request_access = RequestAccess(full_name=full_name, email=email, reason=reason, company=company) - db.session.add(request_access) - db.session.commit() + if isinstance(notifications_market_events, bool): + current_user.notifications_market_events = notifications_market_events + + if isinstance(notifications_portfolio_events, bool): + current_user.notifications_portfolio_events = notifications_portfolio_events + + db.session.commit() + + return jsonify(message='Updated user settings') + + +@auth_bp.route('/user//find', methods=['GET']) +def find_user(wallet_address): + ''' + Check if a record exists for a user in a database table + Returns True if a user record exists in the user table and False if it does not + + It DOES NOT require an valid token to be in the header in order to see this information + --- + responses: + 200: + description: True if the user does exist in the table + 204: + description: False value if the user does not exist in the table + ''' + + user = User.query.filter_by(address=wallet_address).one_or_none() + + if user: + return jsonify(message=True), 200 + + return jsonify(message=False), 204 + + +@auth_bp.route('/verifyuser', methods=['POST']) +def verify_user(): + ''' + Used to verify a user after they authenticate validate their email address. They were given a token that lasts for 24 hours + and if this token is returned to this API it will return a 200 and a valid JWT token just like the one returned + from the login endpoint + + Verifies the user's email is accurate and activates the user + --- + requestBody: + required: true + description: The token body + content: + application/json: + schema: + type: object + properties: + verifytoken: + type: string + responses: + 200: + description: Success message with the JWT token + 400: + description: Verify token is not found in the payload + 404: + description: There is not a user registered for this email yet. Possibly malicious + 410: + description: The user already verified this account but is valid + 403: + description: The token is expired or invalid. See message + ''' + + verifytoken = request.json.get('verifytoken') + if not verifytoken: + return jsonify(message="verifytoken is missing"), 400 + + try: + email = decode_token(verifytoken)['sub'] + + access_token = create_access_token( + identity=email, + expires_delta=timedelta(days=7) + ) + + user = User.query.filter_by(email=email).one_or_none() + if not user: + return jsonify('No registered user for this email'), 404 + elif user.active: + return jsonify('User has has already activated account'), 410 + else: + + user.active = True + db.session.commit() + + return jsonify(dict(message='Success', access_token=access_token)) + + except ExpiredSignatureError: + return jsonify(message='Expired verifytoken'), 403 + except (DecodeError, InvalidTokenError): + return jsonify(message='Invalied verifytoken'), 403 + +@auth_bp.route('/forgotpassword', methods=['POST']) +def forgot_password(): + ''' + Requests an email to be sent to request a password reset + + Sends an email to reset the password to the email if it exists + --- + requestBody: + required: true + description: The token body + content: + application/json: + schema: + type: object + properties: + email: + type: string + responses: + 200: + description: Success confirmation that the email is + 400: + description: Verify token is not found in the payload + ''' + + email = request.json.get('email').lower().strip() + if not email: + return jsonify(message="Email is missing"), 400 + + user = User.query.filter_by(email=email).one_or_none() + if not user: + print('No User found') + pass + else: + ('email sent') + reset_token = create_access_token(identity=email, expires_delta=timedelta(hours=1)) + send_resetpassword_email(email, reset_token) - return jsonify(dict(message='Success')) + return jsonify(message="If the email exists, a reset password link has been sent") + + +@auth_bp.route('/resetpassword', methods=['POST']) +def reset_password(): + ''' + Endpoint to reset the password. It requires both the token and the new password and runs validation on both. If successful it returns the valid JWT token + + Checks if the reset password is valid and if so resets the password to the new password + --- + requestBody: + required: true + description: The token body + content: + application/json: + schema: + type: object + properties: + resettoken: + type: string + password: + type: string + responses: + 200: + description: Success message with the JWT token + 400: + description: Verify token is not found in the payload + 404: + description: There is not a user registered for this email yet. Possibly malicious + 410: + description: The user already verified this account but is valid + 403: + description: The token is expired or invalid. See message + ''' + + resettoken = request.json.get('resettoken') + password = request.json.get('password') + + if not resettoken: + return jsonify(message="resettoken is missing"), 400 + elif not password: + return jsonify(message="missing password") + + try: + email = decode_token(resettoken)['sub'] + + user = User.query.filter_by(email=email).one_or_none() + if not user: + return jsonify('No registered user for this email'), 404 + else: + user.set_password(password) + db.session.commit() + + access_token = create_access_token( + identity=email, + expires_delta=timedelta(days=7) + ) + + return jsonify(message='Success', token=access_token) + + except ExpiredSignatureError: + return jsonify(message='Expired verifytoken'), 403 + except (DecodeError, InvalidTokenError): + return jsonify(message='Invalied verifytoken'), 403 + + +@auth_bp.route('/subscribe', methods=['POST']) +def subscribe(): + ''' + Endpoint for subscribing to the newsletter using MailChimp + + Subscribes to MailChimp NewsLetter from Chained Metrics team + --- + requestBody: + description: Information about the user that is requesting access + content: + application/json: + schema: + type: object + properties: + email: + required: true + type: string + + responses: + 200: + description: Success Response + 400: + description: Validation error on arguments. See Response + ''' + + email = request.json.get('email') + if not email: + return jsonify(dict(message="Email is missing")) else: + subscription_result = subscribe_to_mailchimp(email) - return jsonify(dict(message='Email and Full Name must be filled out.')), 400 + if subscription_result is True: + return jsonify(dict(message="Success")), 200 + else: + return jsonify(dict(message=subscription_result)), 400 + \ No newline at end of file diff --git a/src/app/faucet.py b/src/app/faucet.py new file mode 100755 index 0000000..5e09808 --- /dev/null +++ b/src/app/faucet.py @@ -0,0 +1,72 @@ +import os + +from flask import Blueprint, jsonify, request +from datetime import timedelta +from flask_jwt_extended import current_user, jwt_required +from sqlalchemy.exc import IntegrityError + +from .models import User, MaticFaucetQueue, db +from .auth import jwt + + +faucet_bp = Blueprint('faucet', __name__) + + +@faucet_bp.route('/requestmatic', methods=['POST']) +@jwt_required() +def user(): + ''' + A one time request of MATIC for this account + This is a one time requst for MATIC for this account. The address will be stored for future notification and associated with this account + + It does require an valid token to be in the header in order to see this information + --- + security: + - bearerAuth: [] + requestBody: + required: true + description: Required fields for the faucet + content: + application/json: + schema: + type: object + properties: + address: + type: string + + responses: + 200: + description: The request has been submitted successfully + 400: + description: User has already recieved MATIC or other issue. See message + 401: + description: Invalid token + 406: + description: User already recieved MATIC + ''' + + address = request.json.get('address') + if not address or not isinstance(address, str) or len(address) != 42: + return jsonify(message='Invalid or address'), 400 + + elif current_user.matic_recieved: + return jsonify(message='User already recieved matic'), 406 + + elif not current_user.active: + return jsonify(message='User has not verified email address'), 406 + + else: + try: + queue_entry = MaticFaucetQueue( + email=current_user.email, + address=address + ) + + db.session.add(queue_entry) + db.session.execute(f"NOTIFY faucet_request, '{current_user.email}';") + db.session.commit() + + return jsonify(message="Successfully requested Matic Faucet"), 200 + + except IntegrityError: + return jsonify(message='Request is already pending. If problem persists over 24hrs message us on Discord'), 406 diff --git a/src/app/markets.py b/src/app/markets.py index 313857f..4541c38 100644 --- a/src/app/markets.py +++ b/src/app/markets.py @@ -1,6 +1,11 @@ -from flask import Blueprint, jsonify +from flask import Blueprint, jsonify, current_app, request from cachetools import cached, TTLCache from .models import Market +from .analytics import get_historical_transactions, calc_pnl +from collections import defaultdict +from dateutil.parser import parse +from random import randint +from datetime import datetime, timedelta markets_bp = Blueprint('markets', __name__) @@ -20,6 +25,185 @@ def metrics(): return jsonify(dict(markets=markets)) +@markets_bp.route('////') +def get_metric_value(market, ticker, fiscal_period, metric): + ''' + Endpoint for returning KPI values + --- + parameters: + - name: market + in: path + type: integer + required: true + - name: ticker + in: path + type: string + required: true + - name: fiscal_period + in: path + type: string + required: true + - name: metric + in: path + type: string + required: true + responses: + 200: + description: The value of the KPI + 202: + description: The market does exist but the Value has not been set yet + 400: + description: The request arguments in the URL were invalid + 404: + description: The requested market does not exist + ''' + + if not all([market, ticker, fiscal_period, metric]): + return jsonify(message='All paramaters must be specified'), 400 + + value = lookup_result(market, ticker, fiscal_period, metric) + if value is None: + return jsonify(message='This KPI Market does not exist'), 404 + elif value is False: + return jsonify(messsage='This KPI Market has not resolved', value=False), 202 + else: + return jsonify(message='Success', value=value) + + +@markets_bp.route('/historical_prices/') +def historical_prices(market): + ''' + Endpoint for returning KPI values + --- + parameters: + - name: market + in: path + type: integer + required: true + responses: + 200: + description: An array of historical prices and transactions + 202: + description: The market does exist but the Value has not been set yet + 400: + description: The request arguments in the URL were invalid + 404: + description: The requested market does not exist + ''' + + if not market: + return jsonify(message='All paramaters must be specified'), 400 + + market = Market.query.filter_by(id=market).one_or_none() + if market is None: + return jsonify(message='This KPI Market does not exist'), 404 + else: + historical_data = get_fake_historical_transactions( + market.high, + market.low, + market.beat_price + ) + return jsonify(message='Success', value=historical_data) + # Comment out historical trasnsaction to better simulate data + # historical_data = get_historical_transactions( + # market.broker_address.strip().lower(), + # market.high, + # market.low, + # '0x79ec35384829ba7a75759a057693ce103b077bb1', #collateral_token + # market.beat_address.strip().lower(), + # market.miss_address.strip().lower(), + # current_app.config['POLYGONSCAN_TOKEN']) + + # return jsonify(message='Success', value=historical_data) + +@markets_bp.route('/pnl') +def pnl(): + ''' + Endpoint for querrying PNL values based on different filters such as ticker, date and market + --- + parameters: + - name: ticker + in: query + type: string + required: false + - name: date + in: query + type: string + required: false + - name: marketId + in: query + type: integer + required: false + responses: + 200: + description: A sorted array of PNL based on the filter with the address and PnL + 400: + description: The request arguments in the URL were invalid + 404: + description: The requested market does not exist + ''' + + ticker = request.args.get('ticker') + date = request.args.get('date') + marketId = request.args.get('marketId') + + print(ticker, date, marketId) + + if date: + try: + date = parse(date).timestamp() + except ValueError: + return jsonify(message='Invalid date format'), 400 + + markets = Market.query.filter(Market.low > 0) + if ticker: + markets = markets.filter(Market.ticker==ticker) + if marketId: + markets = markets.filter(Market.id==marketId) + + pnl = defaultdict(lambda: 0) + for market in markets: + historical_data = get_historical_transactions( + market.broker_address.strip().lower(), + market.high, + market.low, + '0x79ec35384829ba7a75759a057693ce103b077bb1', #collateral_token + market.beat_address.strip().lower(), + market.miss_address.strip().lower(), + current_app.config['POLYGONSCAN_TOKEN'], + date) + + print(historical_data) + + transaction_pnl = calc_pnl(historical_data) + for address, pnl_value in transaction_pnl.items(): + pnl[address] += pnl_value + + results = sorted(pnl.items(), key=lambda x: x[1], reverse=True) + + return jsonify(message='Success', value=results) + +# set ttl = 5 minutes (300 seconds) +@cached(cache=TTLCache(maxsize=10, ttl=300)) +def lookup_result(market, ticker, fiscal_period, metric): + ''' + Returns the value for the metric + + Ags: + None + + Retrns: + metric_value (float): A singleton float if the KPI is resolved or None + ''' + + market = Market.query.filter_by(id=market, ticker=ticker, fiscal_period=fiscal_period, metric=metric).one_or_none() + if market is None: + return None + if market.resolved_value is None: + return False + else: + return market.resolved_value + # set ttl = 5 minutes (300 seconds) @cached(cache=TTLCache(maxsize=10, ttl=300)) def get_markets(): @@ -48,6 +232,76 @@ def get_markets(): ''' markets = Market.query.all() + markets = Market.query.filter(Market.beat_address.isnot(None)).all() markets = [{k: v for k, v in row.__dict__.items() if not k.startswith('_')} for row in markets] + for m in markets: + m['chartData'] = get_historical_data_for_spark() return markets + +def get_historical_data_for_spark(): + ''' + Retuns random data for the spark charts on the web page + ''' + + lst = [50] + for i in range(100): + if randint(0, 4) == 0: + lst.append(lst[-1] + randint(-35, 30)) + else: + lst.append(lst[-1] + randint(-6, 10)) + return lst + +def get_fake_historical_transactions(high, low, beat_price): + ''' + Generates fake historical transaction data for more realistic simulations + + ''' + high = float(high) + low = float(low) + start = datetime.now() - timedelta(hours=randint(1, 3)) + transactions = [{ + 'time': start.strftime('%Y-%m-%d %T'), + 'forecastedValue': beat_price, + 'investmentAmount': randint(1000, 50000) + }] + + for i in range(50): + + up = randint(0, 9) <= 3 + for i in range(7): + next_trans = transactions[-1] + next_time = parse(next_trans['time']) + next_value = float(next_trans['forecastedValue']) + + if randint(0, 6) == 0: + last_time = next_time - timedelta(hours=randint(2,48), minutes=randint(1, 15)) + else: + last_time = next_time - timedelta(minutes=randint(10,120)) + + percent = (randint(1, 4) / 100) + if up: + last_value = (high - next_value) * percent + next_value + else: + last_value = next_value - (next_value - low) * percent + + investment_amount = int(150000 * percent) + + transactions.append({ + 'time': last_time.strftime('%Y-%m-%d %T'), + 'forecastedValue': last_value, + 'investmentAmount': investment_amount + }) + print(percent, up, last_time, last_value, investment_amount) + + transactions.reverse() + return transactions + + + + + + + + + diff --git a/src/app/models.py b/src/app/models.py old mode 100644 new mode 100755 index 27b41fd..c7f6cc7 --- a/src/app/models.py +++ b/src/app/models.py @@ -1,4 +1,6 @@ from sqlalchemy.sql.expression import null +from sqlalchemy import CheckConstraint +from sqlalchemy.sql.schema import ForeignKey from flask_sqlalchemy import SQLAlchemy from werkzeug.security import generate_password_hash, check_password_hash @@ -12,7 +14,7 @@ class Market(db.Model): fiscal_period = db.Column(db.String, nullable=False) metric = db.Column(db.String, nullable=False) ticker = db.Column(db.String, nullable=False) - value_string = db.Column(db.String, nullable=False) + value_string = db.Column(db.String, nullable=True) value = db.Column(db.Numeric) beat_address = db.Column(db.String) beat_price = db.Column(db.Numeric) @@ -21,6 +23,25 @@ class Market(db.Model): miss_price = db.Column(db.Numeric) issued = db.Column(db.Integer) highlight_market = db.Column(db.Boolean, nullable=False, default=False) + resolved_value = db.Column(db.Numeric) + metric_symbol = db.Column(db.String) + expected_reporting_date = db.Column(db.Date) + high = db.Column(db.Numeric) + high_string = db.Column(db.String) + low = db.Column(db.Numeric) + low_string = db.Column(db.String) + tick_size = db.Column(db.Numeric) + unit_abbr = db.Column(db.String) + unit_desc = db.Column(db.String) + company_name = db.Column(db.String) + about = db.Column(db.String) + + + CheckConstraint( + '''(high is not null and high_string is not null and low is not null and low_string is not null and value is null and value_string is null) + OR (value_string is not null and value is not null and high is null and high_string is null and low is null and low_string is null)''', + 'binary_vs_scalar_check' + ) def __repr__(self): return f' {self.ticker} | {self.fiscal_period} | {self.metric}' @@ -43,11 +64,16 @@ class User(db.Model): email = db.Column(db.Text, nullable=False, unique=True) admin = db.Column(db.Boolean, default=False, nullable=False) password = db.Column(db.String(255), nullable=False) - first_name = db.Column(db.Text, nullable=False) - last_name = db.Column(db.Text, nullable=False) + first_name = db.Column(db.Text) + last_name = db.Column(db.Text) active = db.Column(db.Boolean, nullable=False) created_on = db.Column(db.DateTime, nullable=True) - + address = db.Column(db.String, nullable=True) + matic_recieved = db.Column(db.Numeric, nullable=True) + matic_recieved_date = db.Column(db.DateTime, nullable=True) + notifications_portfolio_events = db.Column(db.Boolean, default=False) + notifications_market_events = db.Column(db.Boolean, default=False) + username = db.Column(db.Text, nullable=True) def set_password(self, password): """Create hashed password.""" @@ -64,3 +90,13 @@ def check_password(self, password): def __repr__(self): return ''.format(self.email) +class MaticFaucetQueue(db.Model): + + id = db.Column(db.BigInteger, primary_key=True, autoincrement=True) + email = db.Column(db.Text, db.ForeignKey('user.email'), nullable=False, unique=True) + address = db.Column(db.String, nullable=False) + error_msg = db.Column(db.Text, nullable=True) + error_time = db.Column(db.DateTime, nullable=True) + + def __repr__(self): + return f'' diff --git a/src/app/utilities.py b/src/app/utilities.py deleted file mode 100644 index aec0ec0..0000000 --- a/src/app/utilities.py +++ /dev/null @@ -1,40 +0,0 @@ -import decimal -import json - -from datetime import date, datetime - - -class CustomJSONEncoder(json.JSONEncoder): - def default(self, obj): - - if isinstance(obj, date) or isinstance(obj, datetime): - return obj.isoformat() - elif isinstance(obj, decimal.Decimal): - return float(obj) - else: - return json.JSONEncoder.default(self, obj) - -SWAGGER_TEMPLATE = { - "swagger": "3.4.0", - "info": { - "title": "Chained Metrics API", - "description": ("This is the backend API for all REST API calls. " - "For all requests that require authentication, " - "a JWT token needs to be added to the header. See the /auth/login endpoint for details"), - "contact": { - "responsibleOrganization": "ChainedMetrics", - "responsibleDeveloper": "Michael Watson", - "email": "michael@chainedmetrics.com" - }, - "termsOfService": "https://chainedmetrics.com/termsofservice", - }, - "components": { - "securitySchemes": { - "bearerAuth": { # arbitrary name for the security scheme - "type": "http", - "scheme": "bearer", - "bearerFormat": "JWT" - } - } - } -} \ No newline at end of file diff --git a/src/app/utilities/__init__.py b/src/app/utilities/__init__.py new file mode 100644 index 0000000..6acb726 --- /dev/null +++ b/src/app/utilities/__init__.py @@ -0,0 +1,4 @@ +from .utilities import ( + mail, send_verification_email, send_verification_email, send_resetpassword_email, + subscribe_to_mailchimp_async, subscribe_to_mailchimp, CustomJSONEncoder, SWAGGER_TEMPLATE +) \ No newline at end of file diff --git a/src/app/utilities/templates/email-verification.html b/src/app/utilities/templates/email-verification.html new file mode 100644 index 0000000..a86bd54 --- /dev/null +++ b/src/app/utilities/templates/email-verification.html @@ -0,0 +1,168 @@ + + + + + + + + Email Verification + + + + + +
+
+
+
+
+
+ +
+ + +

Verify your email

+ +

+ Please click the button below to complete the verification process. +

+ + Verify email + +

+ Or copy and paste this link directly into your browser. +

+ https://chainedmetrics.com/verifyemail?token=fsdljkfhsdklfsd87f6sd +

Please note this link will expire in 24 hours

+ +

+ If you did not create an account recently please feel free to ignore + this message. +

+
+ + +
+ + diff --git a/src/app/utilities/templates/images/chained-metrics-light.png b/src/app/utilities/templates/images/chained-metrics-light.png new file mode 100644 index 0000000..66e3b0b Binary files /dev/null and b/src/app/utilities/templates/images/chained-metrics-light.png differ diff --git a/src/app/utilities/templates/notification-summary.html b/src/app/utilities/templates/notification-summary.html new file mode 100644 index 0000000..1fb67c0 --- /dev/null +++ b/src/app/utilities/templates/notification-summary.html @@ -0,0 +1,217 @@ + + + + + + + + Notification Summary + + + + + +
+
+
+
+
+
+ +
+ + +

Notifications Summary

+

+ A summary of email subscription notifications appears below: +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TypeDescriptionDate
Beat Win!SNAP exceeds 10MM signups2021-12-27
Beat Loss!TSLA exceeds 10MM signups2021-12-27
Basic notificationA regular notification2021-12-27
Basic notificationA regular notification2021-12-27
Basic notificationA regular notification2021-12-27
+ +
+ Please click + here if you no + longer wish to receive these notifications. +
+
+ + +
+ + diff --git a/src/app/utilities/utilities.py b/src/app/utilities/utilities.py new file mode 100644 index 0000000..5328e95 --- /dev/null +++ b/src/app/utilities/utilities.py @@ -0,0 +1,197 @@ +import decimal +import json +import requests +import os + +from threading import Thread +from datetime import date, datetime +from flask_mail import Mail, Message +from flask import current_app + +mail = Mail() + +def send_async_email(app, msg): + + with app.app_context(): + mail.send(msg) + +def send_verification_email(email, token): + ''' + Sends an email for verification of the email address when signing up + + Args: + email (str): The email to send to + token (str): The JWT token created for this verificaiton + + Returns: + None + ''' + msg = Message() + + msg.subject = "Verify Chained Metrics Email" + msg.recipients = [email] + msg.sender = 'serviceaccount@chainedmetrics.com' + + verify_url = current_app.config["URL"] + f"/verify?verifytoken={token}" + + msg.body = ('Thank you for signing up for Chained metrics. Please go to the below url ' + f'in your browser to verify your email\n\n{verify_url}') + + + msg.html = ( + '

Thank you for joining Chained Metrics!

' + '

Click here to verify your email and begin trading KPIs! This link expires in 24 hours.

' + f'

{verify_url}

' + ) + + Thread(target=send_async_email, args=(current_app._get_current_object(), msg)).start() + +def test_message(email): + ''' + Sends an email for verification of the email address when signing up + + Args: + email (str): The email to send to + token (str): The JWT token created for this verificaiton + + Returns: + None + ''' + msg = Message() + + msg.subject = "Verify Chained Metrics Email" + msg.recipients = [email] + msg.sender = 'serviceaccount@chainedmetrics.com' + + template_path = os.path.join(os.path.dirname(__file__), 'templates', 'email-verification.html') + with open(template_path) as fil: + html = fil.read() + + msg.html = html + + image_name = 'chained-metrics-light.png' + image_location = os.path.join(os.path.dirname(__file__), 'templates', 'images', image_name) + with open(image_location, 'rb') as fil: + image_content = fil.read() + + msg.attach(image_name, 'image/png', image_content, 'inline', headers=(('Content-ID', ''),)) + + msg.body = ('Thank you for signing up for Chained metrics. Please go to the below url ' + f'in your browser to verify your email\n\n') + + + Thread(target=send_async_email, args=(current_app._get_current_object(), msg)).start() + +def send_resetpassword_email(email, token): + ''' + Email used to reset a user's password + + Args: + email (str): The email to send to + token (str): The JWT token created for this verificaiton + + Returns: + None + ''' + + msg = Message() + + msg.subject = "Reset Chained Metrics Password" + msg.recipients = [email] + msg.sender = 'serviceaccount@chainedmetrics.com' + + verify_url = current_app.config["URL"] + f"/resetpassword?resettoken={token}" + + msg.body = ('Follow the link below to reset your password. If you did not request this please email' + ' us at info@chainedmetrics.com\n\n{verify_url}') + + + msg.html = ( + '

Click here to reset your password.

' + '

If you did not request a password reset, email us at info@chainedmetrics.com

' + f'

{verify_url}

' + ) + + Thread(target=send_async_email, args=(current_app._get_current_object(), msg)).start() + + +def subscribe_to_mailchimp_async(email): + ''' + Async call to subscribe to Mailchimp with the email which happens when a user signs up + + Args: + email (str): The email to signup with + ''' + + Thread(target=subscribe_to_mailchimp, args=(email,)).start() + + +def subscribe_to_mailchimp(email): + ''' + Adds a user's email to the Mail Chimp News letter + + Args: + email (str): The email to Add + + Returns: + result (True or Error msg): True if successful or a string of an error message + ''' + + mailchimp_url = '{url}/lists/{list}/members'.format( + url=current_app.config['MAILCHIMP_URL'], + list=current_app.config['MAILCHIMP_LIST'] + ) + + r = requests.post( + mailchimp_url, + auth=('key', current_app.config['MAILCHIMP_API_KEY']), + json=dict( + email_address=email, + status='subscribed' + ), + timeout=1 + ) + + if r.status_code < 300: + return True + else: + data = r.json() + if 'detail' in data: + return data['detail'] + else: + return 'Unable to subscribe at this time, please try again later' + +class CustomJSONEncoder(json.JSONEncoder): + def default(self, obj): + + if isinstance(obj, date) or isinstance(obj, datetime): + return obj.isoformat() + elif isinstance(obj, decimal.Decimal): + return float(obj) + else: + return json.JSONEncoder.default(self, obj) + +SWAGGER_TEMPLATE = { + "swagger": "3.4.0", + "info": { + "title": "Chained Metrics API", + "description": ("This is the backend API for all REST API calls. " + "For all requests that require authentication, " + "a JWT token needs to be added to the header. See the /auth/login endpoint for details"), + "contact": { + "responsibleOrganization": "ChainedMetrics", + "responsibleDeveloper": "Michael Watson", + "email": "michael@chainedmetrics.com" + }, + "termsOfService": "https://chainedmetrics.com/termsofservice", + }, + "components": { + "securitySchemes": { + "bearerAuth": { # arbitrary name for the security scheme + "type": "http", + "scheme": "bearer", + "bearerFormat": "JWT" + } + } + } +} diff --git a/src/flask_config.py b/src/flask_config.py index 91afa12..4a82903 100644 --- a/src/flask_config.py +++ b/src/flask_config.py @@ -4,12 +4,31 @@ class Config(object): DEBUG = True CHAINEDMETRICS_ENV = os.environ.get('CHAINEDMETRICS_ENV', 'Development') SQLALCHEMY_TRACK_MODIFICATIONS = False + MAILCHIMP_URL = 'https://us5.api.mailchimp.com/3.0' + MAILCHIMP_LIST = '10165fe090' + MAILCHIMP_API_KEY = os.environ['MAILCHIMP_API_KEY'] + + MAIL_SERVER = 'smtp.gmail.com' + MAIL_PORT = 465 + MAIL_USE_SSL = True + MAIL_USERNAME = 'serviceaccount@chainedmetrics.com' + MAIL_PASSWORD = os.environ['GMAIL_PASS'] + POLYGONSCAN_TOKEN = os.environ['POLYGONSCAN_TOKEN'] + class Production(Config): DEVELOPMENT = False DEBUG = False - DB_HOST = 'my.production.database' + DB_HOST = 'chainedmetrics-prod-do-user-9754357-0.b.db.ondigitalocean.com' + DB_USER = 'flask_app' + DB_PORT = 25060 + DATABASE = 'metrics' + DB_PASS = os.environ['DB_PASS'] JWT_SECRET_KEY = os.environ['JWT_SECRET_KEY'] + JWT_TOKEN_LOCATION = "headers" + URL = 'https://chainedmetrics.com' + + SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASS}@{DB_HOST}:{DB_PORT}/{DATABASE}' class Development(Config): DEVELOPMENT = True @@ -21,5 +40,6 @@ class Development(Config): DB_PASS = os.environ['DB_PASS'] JWT_SECRET_KEY = os.environ['JWT_SECRET_KEY'] JWT_TOKEN_LOCATION = "headers" + URL = 'https://dev.chainedmetrics.com' SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASS}@{DB_HOST}:{DB_PORT}/{DATABASE}' diff --git a/src/migrations/versions/1d7f92b5108f_add_in_support_for_scalar_markets_with_.py b/src/migrations/versions/1d7f92b5108f_add_in_support_for_scalar_markets_with_.py new file mode 100755 index 0000000..b50087c --- /dev/null +++ b/src/migrations/versions/1d7f92b5108f_add_in_support_for_scalar_markets_with_.py @@ -0,0 +1,50 @@ +"""Add in support for scalar markets with a high and low value + +Revision ID: 1d7f92b5108f +Revises: 4586c849120f +Create Date: 2021-12-17 17:54:55.847417 + +""" +from alembic import op +import sqlalchemy as sa +import os + + +# revision identifiers, used by Alembic. +revision = '1d7f92b5108f' +down_revision = '4586c849120f' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('market', sa.Column('high', sa.Numeric(), nullable=True)) + op.add_column('market', sa.Column('high_string', sa.String(), nullable=True)) + op.add_column('market', sa.Column('low', sa.Numeric(), nullable=True)) + op.add_column('market', sa.Column('low_string', sa.String(), nullable=True)) + + # This fails in sqllite which is used for testing + if os.getenv('TEST') != 'true': + op.create_check_constraint( + 'binary_vs_scalar_check', + 'market', + '''(high is not null and high_string is not null and low is not null and low_string is not null and value is null and value_string is null) + OR (value_string is not null and value is not null and high is null and high_string is null and low is null and low_string is null)''' + + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + # This fails in sqllite which is used for testing + if os.getenv('TEST') != 'true': + op.drop_constraint('binary_vs_scalar_check') + + op.drop_column('market', 'low_string') + op.drop_column('market', 'low') + op.drop_column('market', 'high_string') + op.drop_column('market', 'high') + # ### end Alembic commands ### diff --git a/src/migrations/versions/4586c849120f_add_expected_reporting_date.py b/src/migrations/versions/4586c849120f_add_expected_reporting_date.py new file mode 100644 index 0000000..8d391f9 --- /dev/null +++ b/src/migrations/versions/4586c849120f_add_expected_reporting_date.py @@ -0,0 +1,28 @@ +"""Add expected reporting date + +Revision ID: 4586c849120f +Revises: 8de1ab3414af +Create Date: 2021-10-18 19:44:47.813301 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '4586c849120f' +down_revision = '8de1ab3414af' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('market', sa.Column('expected_reporting_date', sa.Date(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('market', 'expected_reporting_date') + # ### end Alembic commands ### diff --git a/src/migrations/versions/48ce5780e562_add_user_db.py b/src/migrations/versions/48ce5780e562_add_user_db.py index 65df846..624661e 100755 --- a/src/migrations/versions/48ce5780e562_add_user_db.py +++ b/src/migrations/versions/48ce5780e562_add_user_db.py @@ -23,29 +23,17 @@ def upgrade(): sa.Column('email', sa.Text(), nullable=False), sa.Column('admin', sa.Boolean(), nullable=False), sa.Column('password', sa.String(length=255), nullable=False), - sa.Column('first_name', sa.Text(), nullable=False), - sa.Column('last_name', sa.Text(), nullable=False), + sa.Column('first_name', sa.Text()), + sa.Column('last_name', sa.Text()), sa.Column('active', sa.Boolean(), nullable=False), sa.Column('created_on', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email') ) - op.alter_column('request_access', 'full_name', - existing_type=sa.VARCHAR(length=500), - nullable=False) - op.alter_column('request_access', 'email', - existing_type=sa.VARCHAR(length=100), - nullable=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('request_access', 'email', - existing_type=sa.VARCHAR(length=100), - nullable=True) - op.alter_column('request_access', 'full_name', - existing_type=sa.VARCHAR(length=500), - nullable=True) op.drop_table('user') # ### end Alembic commands ### diff --git a/src/migrations/versions/4c2be755eff7_add_resolved_value_column.py b/src/migrations/versions/4c2be755eff7_add_resolved_value_column.py new file mode 100644 index 0000000..858c1cc --- /dev/null +++ b/src/migrations/versions/4c2be755eff7_add_resolved_value_column.py @@ -0,0 +1,28 @@ +"""Add resolved_value column + +Revision ID: 4c2be755eff7 +Revises: 599c122f2c73 +Create Date: 2021-10-06 18:14:54.387954 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '4c2be755eff7' +down_revision = '599c122f2c73' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('market', sa.Column('resolved_value', sa.Numeric(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('market', 'resolved_value') + # ### end Alembic commands ### diff --git a/src/migrations/versions/65d911b95a1f_.py b/src/migrations/versions/65d911b95a1f_.py new file mode 100644 index 0000000..fb7fb54 --- /dev/null +++ b/src/migrations/versions/65d911b95a1f_.py @@ -0,0 +1,28 @@ +"""empty message + +Revision ID: 65d911b95a1f +Revises: 9baea9dac1a5 +Create Date: 2022-01-04 18:48:15.751491 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '65d911b95a1f' +down_revision = '9baea9dac1a5' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('user', sa.Column('username', sa.Text(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('user', 'username') + # ### end Alembic commands ### diff --git a/src/migrations/versions/8ab5c8a42848_.py b/src/migrations/versions/8ab5c8a42848_.py index 7ff5c4d..256d7e1 100644 --- a/src/migrations/versions/8ab5c8a42848_.py +++ b/src/migrations/versions/8ab5c8a42848_.py @@ -24,7 +24,7 @@ def upgrade(): sa.Column('fiscal_period', sa.String(), nullable=False), sa.Column('metric', sa.String(), nullable=False), sa.Column('ticker', sa.String(), nullable=False), - sa.Column('value_string', sa.String(), nullable=False), + sa.Column('value_string', sa.String()), sa.Column('value', sa.Numeric(), nullable=True), sa.Column('beat_address', sa.String(), nullable=True), sa.Column('beat_price', sa.Numeric(), nullable=True), diff --git a/src/migrations/versions/8de1ab3414af_add_a_symbol_column_for_each_market.py b/src/migrations/versions/8de1ab3414af_add_a_symbol_column_for_each_market.py new file mode 100755 index 0000000..154847e --- /dev/null +++ b/src/migrations/versions/8de1ab3414af_add_a_symbol_column_for_each_market.py @@ -0,0 +1,28 @@ +"""Add a symbol column for each market and expected Reporting Date + +Revision ID: 8de1ab3414af +Revises: 4c2be755eff7 +Create Date: 2021-10-18 16:33:28.578703 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '8de1ab3414af' +down_revision = '4c2be755eff7' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('market', sa.Column('metric_symbol', sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('market', 'metric_symbol') + # ### end Alembic commands ### diff --git a/src/migrations/versions/9baea9dac1a5_.py b/src/migrations/versions/9baea9dac1a5_.py new file mode 100644 index 0000000..2eeda72 --- /dev/null +++ b/src/migrations/versions/9baea9dac1a5_.py @@ -0,0 +1,30 @@ +"""empty message + +Revision ID: 9baea9dac1a5 +Revises: c864421f8ab1 +Create Date: 2022-01-04 18:23:37.351499 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '9baea9dac1a5' +down_revision = 'c864421f8ab1' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('user', sa.Column('notifications_portfolio_events', sa.Boolean(), nullable=True)) + op.add_column('user', sa.Column('notifications_market_events', sa.Boolean(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('user', 'notifications_market_events') + op.drop_column('user', 'notifications_portfolio_events') + # ### end Alembic commands ### diff --git a/src/migrations/versions/c864421f8ab1_.py b/src/migrations/versions/c864421f8ab1_.py new file mode 100755 index 0000000..89eb1d7 --- /dev/null +++ b/src/migrations/versions/c864421f8ab1_.py @@ -0,0 +1,43 @@ +"""empty message + +Revision ID: c864421f8ab1 +Revises: 1d7f92b5108f +Create Date: 2021-12-22 16:56:18.913612 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'c864421f8ab1' +down_revision = '1d7f92b5108f' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('matic_faucet_queue', + sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), + sa.Column('email', sa.Text(), nullable=False), + sa.Column('address', sa.String(), nullable=False), + sa.Column('error_msg', sa.Text(), nullable=True), + sa.Column('error_time', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['email'], ['user.email'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('email') + ) + op.add_column('user', sa.Column('address', sa.String(), nullable=True)) + op.add_column('user', sa.Column('matic_recieved', sa.Numeric(), nullable=True)) + op.add_column('user', sa.Column('matic_recieved_date', sa.DateTime(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('user', 'matic_recieved_date') + op.drop_column('user', 'matic_recieved') + op.drop_column('user', 'address') + op.drop_table('matic_faucet_queue') + # ### end Alembic commands ### diff --git a/src/migrations/versions/cf2a212b5ceb_.py b/src/migrations/versions/cf2a212b5ceb_.py new file mode 100755 index 0000000..599d45d --- /dev/null +++ b/src/migrations/versions/cf2a212b5ceb_.py @@ -0,0 +1,36 @@ +"""empty message + +Revision ID: cf2a212b5ceb +Revises: 65d911b95a1f +Create Date: 2022-03-03 18:18:45.852564 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'cf2a212b5ceb' +down_revision = '65d911b95a1f' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('market', sa.Column('tick_size', sa.Numeric(), nullable=True)) + op.add_column('market', sa.Column('unit_abbr', sa.String(), nullable=True)) + op.add_column('market', sa.Column('unit_desc', sa.String(), nullable=True)) + op.add_column('market', sa.Column('company_name', sa.String(), nullable=True)) + op.add_column('market', sa.Column('about', sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('market', 'about') + op.drop_column('market', 'company_name') + op.drop_column('market', 'unit_desc') + op.drop_column('market', 'unit_abbr') + op.drop_column('market', 'tick_size') + # ### end Alembic commands ### diff --git a/src/notifications/binary_market_abi.json b/src/notifications/binary_market_abi.json new file mode 100644 index 0000000..bdc6323 --- /dev/null +++ b/src/notifications/binary_market_abi.json @@ -0,0 +1 @@ +[{"inputs": [{"internalType": "uint256", "name": "_strikeValue", "type": "uint256"}, {"internalType": "string", "name": "_chainedMetricUrl", "type": "string"}, {"internalType": "address", "name": "_oracle", "type": "address"}, {"internalType": "string", "name": "_jobId", "type": "string"}, {"internalType": "uint256", "name": "_feeDecimals", "type": "uint256"}, {"internalType": "address", "name": "_chainLinkAddress", "type": "address"}, {"internalType": "address", "name": "_tokenAddress", "type": "address"}, {"internalType": "address", "name": "_beatAddress", "type": "address"}, {"internalType": "address", "name": "_missAddress", "type": "address"}], "stateMutability": "nonpayable", "type": "constructor", "name": "constructor"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "bytes32", "name": "id", "type": "bytes32"}], "name": "ChainlinkCancelled", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "bytes32", "name": "id", "type": "bytes32"}], "name": "ChainlinkFulfilled", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "bytes32", "name": "id", "type": "bytes32"}], "name": "ChainlinkRequested", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": false, "internalType": "string", "name": "message", "type": "string"}, {"indexed": false, "internalType": "string", "name": "url", "type": "string"}, {"indexed": false, "internalType": "uint256", "name": "value", "type": "uint256"}], "name": "GetMetricLog", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "uint256", "name": "eventType", "type": "uint256"}, {"indexed": true, "internalType": "address", "name": "orderAddress", "type": "address"}, {"indexed": true, "internalType": "bool", "name": "beat", "type": "bool"}, {"indexed": false, "internalType": "uint256", "name": "price", "type": "uint256"}, {"indexed": false, "internalType": "uint256", "name": "quantity", "type": "uint256"}], "name": "OrderEvent", "type": "event"}, {"inputs": [], "name": "beatAddress", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "beatContract", "outputs": [{"internalType": "contract MetricInterface", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "beat", "type": "bool"}, {"internalType": "uint256", "name": "price", "type": "uint256"}, {"internalType": "uint256", "name": "quantity", "type": "uint256"}], "name": "cancelOrder", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "chainLinkAddress", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "chainedMetricUrl", "outputs": [{"internalType": "string", "name": "", "type": "string"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "creatorAddress", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "fee", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "_requestId", "type": "bytes32"}, {"internalType": "uint256", "name": "_value", "type": "uint256"}], "name": "fulfill", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "getMetricData", "outputs": [{"internalType": "bytes32", "name": "requestId", "type": "bytes32"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "getOutcome", "outputs": [{"internalType": "uint16", "name": "result", "type": "uint16"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "isScalar", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "jobId", "outputs": [{"internalType": "string", "name": "", "type": "string"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "maxOrderDepth", "outputs": [{"internalType": "uint16", "name": "", "type": "uint16"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "metricValue", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "missAddress", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "missContract", "outputs": [{"internalType": "contract MetricInterface", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "userAddress", "type": "address"}, {"internalType": "uint256[]", "name": "prices", "type": "uint256[]"}], "name": "openOrders", "outputs": [{"components": [{"internalType": "address", "name": "orderAddress", "type": "address"}, {"internalType": "uint256", "name": "quantity", "type": "uint256"}, {"internalType": "uint256", "name": "price", "type": "uint256"}, {"internalType": "bool", "name": "beat", "type": "bool"}], "internalType": "struct ChainedLibrary.OpenOrder[20]", "name": "addressOrders", "type": "tuple[20]"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "oracle", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "", "type": "bool"}, {"internalType": "uint256", "name": "", "type": "uint256"}, {"internalType": "uint256", "name": "", "type": "uint256"}], "name": "orders", "outputs": [{"internalType": "address", "name": "orderAddress", "type": "address"}, {"internalType": "uint256", "name": "quantity", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "", "type": "bool"}, {"internalType": "uint256", "name": "", "type": "uint256"}], "name": "ordersQuantity", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "outcome", "outputs": [{"internalType": "uint8", "name": "", "type": "uint8"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "payout", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "beat", "type": "bool"}], "name": "resolveContract", "outputs": [{"internalType": "uint16", "name": "outcome", "type": "uint16"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "strikeValue", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "beat", "type": "bool"}, {"internalType": "uint256", "name": "price", "type": "uint256"}, {"internalType": "uint256", "name": "quantity", "type": "uint256"}], "name": "submitOrder", "outputs": [{"internalType": "bool", "name": "success", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "beat", "type": "bool"}, {"internalType": "uint256", "name": "price", "type": "uint256"}, {"internalType": "uint256", "name": "quantity", "type": "uint256"}], "name": "submitOrderPrice", "outputs": [{"internalType": "bool", "name": "success", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "tokenAddress", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "tokenContract", "outputs": [{"internalType": "contract tokenERC20Interface", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "tokenConversion", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "valueSet", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "view", "type": "function"}] \ No newline at end of file diff --git a/src/notifications/conda_env.yaml b/src/notifications/conda_env.yaml new file mode 100755 index 0000000..877c72d --- /dev/null +++ b/src/notifications/conda_env.yaml @@ -0,0 +1,103 @@ +name: app-worker +channels: + - conda-forge + - defaults +dependencies: + - _libgcc_mutex=0.1=main + - _openmp_mutex=4.5=1_gnu + - alembic=1.6.5=pyhd8ed1ab_0 + - attrs=21.2.0=pyhd8ed1ab_0 + - backcall=0.2.0=pyhd3eb1b0_0 + - blinker=1.4=py_1 + - brotlipy=0.7.0=py39h27cfd23_1003 + - ca-certificates=2021.7.5=h06a4308_1 + - cachetools=4.2.2=pyhd3eb1b0_0 + - certifi=2021.5.30=py39h06a4308_0 + - cffi=1.14.6=py39h400218f_0 + - chardet=4.0.0=py39h06a4308_1003 + - click=8.0.1=pyhd3eb1b0_0 + - conda=4.10.3=py39hf3d152e_0 + - conda-package-handling=1.7.3=py39h27cfd23_1 + - cryptography=3.4.7=py39hd23ed53_0 + - decorator=5.0.9=pyhd3eb1b0_0 + - flasgger=0.9.5=pyhd8ed1ab_1 + - flask=1.1.2=pyhd3eb1b0_0 + - flask-jwt-extended=4.3.0=pyhd8ed1ab_0 + - flask-mail=0.9.1=py_2 + - flask-migrate=3.1.0=pyhd8ed1ab_0 + - flask-sqlalchemy=2.5.1=pyhd3eb1b0_0 + - greenlet=1.1.1=py39h295c915_0 + - gunicorn=20.1.0=py39h06a4308_0 + - idna=2.10=pyhd3eb1b0_0 + - importlib-metadata=3.10.0=py39h06a4308_0 + - iniconfig=1.1.1=pyhd3eb1b0_0 + - ipython=7.26.0=py39hb070fc8_0 + - ipython_genutils=0.2.0=pyhd3eb1b0_1 + - itsdangerous=2.0.1=pyhd3eb1b0_0 + - jedi=0.18.0=py39h06a4308_1 + - jinja2=3.0.1=pyhd3eb1b0_0 + - jsonschema=3.2.0=pyhd8ed1ab_3 + - krb5=1.19.2=hac12032_0 + - ld_impl_linux-64=2.35.1=h7274673_9 + - libedit=3.1.20210714=h7f8727e_0 + - libffi=3.3=he6710b0_2 + - libgcc-ng=9.3.0=h5101ec6_17 + - libgomp=9.3.0=h5101ec6_17 + - libpq=12.2=h553bfba_1 + - libstdcxx-ng=9.3.0=hd4cf53a_17 + - mako=1.1.5=pyhd8ed1ab_0 + - markupsafe=2.0.1=py39h27cfd23_0 + - matplotlib-inline=0.1.2=pyhd3eb1b0_2 + - mistune=0.8.4=py39h3811e60_1004 + - more-itertools=8.8.0=pyhd3eb1b0_0 + - ncurses=6.2=he6710b0_1 + - openssl=1.1.1l=h7f8727e_0 + - packaging=21.0=pyhd3eb1b0_0 + - parso=0.8.2=pyhd3eb1b0_0 + - pexpect=4.8.0=pyhd3eb1b0_3 + - pickleshare=0.7.5=pyhd3eb1b0_1003 + - pip=21.1.3=py39h06a4308_0 + - pluggy=0.13.1=py39h06a4308_0 + - prompt-toolkit=3.0.17=pyh06a4308_0 + - psycopg2=2.8.6=py39h3c74f83_1 + - ptyprocess=0.7.0=pyhd3eb1b0_2 + - py=1.10.0=pyhd3eb1b0_0 + - pycosat=0.6.3=py39h27cfd23_0 + - pycparser=2.20=py_2 + - pygments=2.10.0=pyhd3eb1b0_0 + - pyjwt=2.1.0=pyhd8ed1ab_0 + - pyopenssl=20.0.1=pyhd3eb1b0_1 + - pyparsing=2.4.7=pyhd3eb1b0_0 + - pyrsistent=0.17.3=py39h3811e60_2 + - pysocks=1.7.1=py39h06a4308_0 + - pytest=6.2.4=py39h06a4308_2 + - python=3.9.5=h12debd9_4 + - python-dateutil=2.8.2=pyhd8ed1ab_0 + - python-editor=1.0.4=py_0 + - python_abi=3.9=2_cp39 + - pyyaml=5.4.1=py39h3811e60_0 + - readline=8.1=h27cfd23_0 + - requests=2.25.1=pyhd3eb1b0_0 + - requests-mock=1.9.3=pyhd8ed1ab_0 + - ruamel_yaml=0.15.100=py39h27cfd23_0 + - setuptools=52.0.0=py39h06a4308_0 + - six=1.16.0=pyhd3eb1b0_0 + - sqlalchemy=1.4.22=py39h7f8727e_0 + - sqlite=3.36.0=hc218d9a_0 + - tk=8.6.10=hbc83047_0 + - toml=0.10.2=pyhd3eb1b0_0 + - tqdm=4.61.2=pyhd3eb1b0_1 + - traitlets=5.0.5=pyhd3eb1b0_0 + - tzdata=2021a=h52ac0ba_0 + - urllib3=1.26.6=pyhd3eb1b0_1 + - wcwidth=0.2.5=py_0 + - werkzeug=1.0.1=pyhd3eb1b0_0 + - wheel=0.36.2=pyhd3eb1b0_0 + - xz=5.2.5=h7b6447c_0 + - yaml=0.2.5=h7b6447c_0 + - zipp=3.5.0=pyhd3eb1b0_0 + - zlib=1.2.11=h7b6447c_3 + - pip: + - eth-brownie==1.17.2 + +prefix: /opt/conda/envs/app diff --git a/src/notifications/contract_analyzer.py b/src/notifications/contract_analyzer.py new file mode 100644 index 0000000..34aaa05 --- /dev/null +++ b/src/notifications/contract_analyzer.py @@ -0,0 +1,183 @@ +import logging +import sys +import os +import json +import psycopg2 +import requests +import logging + +from brownie import accounts, network, project, Contract +from collections import defaultdict + +logging.basicConfig( + stream=sys.stdout, level=logging.INFO, + format='%(asctime)s.%(msecs)03d %(levelname)s: %(message)s', + datefmt='%Y-%m-%d %H:%M:%S' +) + +DEV_DATABASE = "chainedmetrics-dev-do-user-9754357-0.b.db.ondigitalocean.com" +PROD_DATABASE = "chainedmetrics-prod-do-user-9754357-0.b.db.ondigitalocean.com" +MAX_DEPTH = 20 +POLYSCAN_API_KEY = "WC476C15XFJK19P3HS9UQFBYY38W4JGBFZ" + +def open_orders(broker): + ''' + Returns the open orders for the broker + + Arguments: + broker (brownie.Contract): A broker for a given exchange + + Returns: + orders (dict): Open orders for each beat/miss at each price including with + the corresponding address and quantity + + eg. + orders = {True:{ + 10: [0x8..98f4, 100], {0x7..8fa2, 30}}, + 50: [0x8f2..b4cc, 70] + }, + False:{ + 10: [0x6..32bc, 200] + } + ''' + + logging.info(f'Getting Open Orders for {broker}') + max_depth = MAX_DEPTH if MAX_DEPTH else broker.maxOrderDepth() + prices = [10, 20, 30, 40, 50, 60, 70, 80, 90] + + order_book = defaultdict(lambda: defaultdict(list)) + + orders = broker.orders + for beat in (True, False): + for p in prices: + for i in range(MAX_DEPTH): + try: + address, quantity = orders(beat, p, i) + order_book[beat][p].append((address, quantity)) + except ValueError: + break + + return order_book + + +def issued_tokens(token_address): + ''' + Returns what addresses own the corresponding tokens and how much + + Arguments: + token_address (string): The address for the token + + Returns: + owners (dict): A dictionary of addresses and the quantity owned + + Notes: + Cannot hit this endpoint more thean 5x/second + ''' + + logging.info(f'Getting Issuances for {token_address}') + url = ( + "https://api.polygonscan.com/api?module=account&action=tokentx&" + f"contractaddress={token_address}&sort=asc&" + f"apikey={POLYSCAN_API_KEY}" + ) + + resp = requests.get(url) + resp.raise_for_status() + transaction_dict = resp.json() + + if len(transaction_dict['result']) > 1000: + raise Exception('Need to iterate :)') + + transaction_history = defaultdict(lambda: 0) + for txn in transaction_dict['result']: + if txn['from'] == '0x0000000000000000000000000000000000000000': + transaction_history[txn['to']] += int(txn['value']) + + return transaction_history + + +def get_all_markets(conn): + + cursor = conn.cursor() + cursor.execute('SELECT * FROM market') + + headers = [desc[0] for desc in cursor.description] + market_dict = [dict(zip(headers, row)) for row in cursor] + + return market_dict + +def setup_network(): + + account = accounts.add(os.environ['PRIVATE_KEY']) + project.load('.', 'NotificationAnalysis') + network.connect('polygon-main') + + return account + +def get_contracts(broker_address, beat_address, miss_address, market='binary', broker_abi=None, erc20_abi=None): + + abi_dict = { + 'binary': 'broker_abi.json', + 'scalar': 'scalar_abi.json' + } + + assert market in abi_dict, 'market must be binary OR scalar' + + if not broker_abi: + with open(abi_dict[market]) as fil: + broker_abi = json.load(fil) + + if not erc20_abi: + with open('erc20_abi.json') as fil: + erc20_abi = json.load(fil) + + broker = Contract.from_abi('Broker', broker_address, broker_abi) + beat = Contract.from_abi('Beat', beat_address, erc20_abi) + miss = Contract.from_abi('Miss', miss_address, erc20_abi) + + + return broker, beat, miss + +def get_connection(): + + host = PROD_DATABASE if os.getenv('CHAINDEMETRICS_ENV') == 'Production' else DEV_DATABASE + + logging.info(f'Connecting to {host}') + + conn = psycopg2.connect( + host=host, + port=25060, + dbname="metrics", + user="flask_app", + password=os.getenv("DB_PASS") + ) + + return conn + +def main(): + + logging.info('Begginning analysis and connecting to database') + conn = get_connection() + + logging.info('Accessing All Markets') + markets = get_all_markets(conn) + + logging.info('Connecting to polygon network') + account = setup_network() + + logging.info('Iterating over all Markets') + all_data = [] + for m in markets: + market_data = {} + market_data['meta_data'] = m + if m['broker_address']: + broker, beat, miss = get_contracts( + m['broker_address'], m['beat_address'], m['miss_address'] + ) + market_data['data'] = (open_orders(broker), issued_tokens(beat), issued_tokens(miss)) + else: + market_data['data'] = None + + all_data.append(market_data) + + return all_data \ No newline at end of file diff --git a/src/notifications/erc20_abi.json b/src/notifications/erc20_abi.json new file mode 100644 index 0000000..668d697 --- /dev/null +++ b/src/notifications/erc20_abi.json @@ -0,0 +1,222 @@ +[ + { + "constant": true, + "inputs": [], + "name": "name", + "outputs": [ + { + "name": "", + "type": "string" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "_spender", + "type": "address" + }, + { + "name": "_value", + "type": "uint256" + } + ], + "name": "approve", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "totalSupply", + "outputs": [ + { + "name": "", + "type": "uint256" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "_from", + "type": "address" + }, + { + "name": "_to", + "type": "address" + }, + { + "name": "_value", + "type": "uint256" + } + ], + "name": "transferFrom", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "decimals", + "outputs": [ + { + "name": "", + "type": "uint8" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "_owner", + "type": "address" + } + ], + "name": "balanceOf", + "outputs": [ + { + "name": "balance", + "type": "uint256" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "symbol", + "outputs": [ + { + "name": "", + "type": "string" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "name": "_to", + "type": "address" + }, + { + "name": "_value", + "type": "uint256" + } + ], + "name": "transfer", + "outputs": [ + { + "name": "", + "type": "bool" + } + ], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "name": "_owner", + "type": "address" + }, + { + "name": "_spender", + "type": "address" + } + ], + "name": "allowance", + "outputs": [ + { + "name": "", + "type": "uint256" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "payable": true, + "stateMutability": "payable", + "type": "fallback" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "name": "spender", + "type": "address" + }, + { + "indexed": false, + "name": "value", + "type": "uint256" + } + ], + "name": "Approval", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "name": "from", + "type": "address" + }, + { + "indexed": true, + "name": "to", + "type": "address" + }, + { + "indexed": false, + "name": "value", + "type": "uint256" + } + ], + "name": "Transfer", + "type": "event" + } +] \ No newline at end of file diff --git a/src/notifications/scalar_market_abi.json b/src/notifications/scalar_market_abi.json new file mode 100644 index 0000000..8568777 --- /dev/null +++ b/src/notifications/scalar_market_abi.json @@ -0,0 +1 @@ +[{"inputs": [{"internalType": "uint256", "name": "_high", "type": "uint256"}, {"internalType": "uint256", "name": "_low", "type": "uint256"}, {"internalType": "string", "name": "_chainedMetricUrl", "type": "string"}, {"internalType": "address", "name": "_oracle", "type": "address"}, {"internalType": "string", "name": "_jobId", "type": "string"}, {"internalType": "uint256", "name": "_feeDecimals", "type": "uint256"}, {"internalType": "address", "name": "_chainLinkAddress", "type": "address"}, {"internalType": "address", "name": "_tokenAddress", "type": "address"}, {"internalType": "address", "name": "_beatAddress", "type": "address"}, {"internalType": "address", "name": "_missAddress", "type": "address"}], "stateMutability": "nonpayable", "type": "constructor", "name": "constructor"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "bytes32", "name": "id", "type": "bytes32"}], "name": "ChainlinkCancelled", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "bytes32", "name": "id", "type": "bytes32"}], "name": "ChainlinkFulfilled", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "bytes32", "name": "id", "type": "bytes32"}], "name": "ChainlinkRequested", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": false, "internalType": "string", "name": "message", "type": "string"}, {"indexed": false, "internalType": "string", "name": "url", "type": "string"}, {"indexed": false, "internalType": "uint256", "name": "value", "type": "uint256"}], "name": "GetMetricLog", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "uint256", "name": "eventType", "type": "uint256"}, {"indexed": true, "internalType": "address", "name": "orderAddress", "type": "address"}, {"indexed": true, "internalType": "bool", "name": "beat", "type": "bool"}, {"indexed": false, "internalType": "uint256", "name": "price", "type": "uint256"}, {"indexed": false, "internalType": "uint256", "name": "quantity", "type": "uint256"}], "name": "OrderEvent", "type": "event"}, {"inputs": [], "name": "beatAddress", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "beatContract", "outputs": [{"internalType": "contract MetricInterface", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "beat", "type": "bool"}, {"internalType": "uint256", "name": "price", "type": "uint256"}, {"internalType": "uint256", "name": "quantity", "type": "uint256"}], "name": "cancelOrder", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "chainLinkAddress", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "chainedMetricUrl", "outputs": [{"internalType": "string", "name": "", "type": "string"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "creatorAddress", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "fee", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "bytes32", "name": "_requestId", "type": "bytes32"}, {"internalType": "uint256", "name": "_value", "type": "uint256"}], "name": "fulfill", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "getMetricData", "outputs": [{"internalType": "bytes32", "name": "requestId", "type": "bytes32"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "beat", "type": "bool"}, {"internalType": "uint256", "name": "metricValue", "type": "uint256"}], "name": "getScalarPayout", "outputs": [{"internalType": "uint256", "name": "scalarPayout", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "high", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "isScalar", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "jobId", "outputs": [{"internalType": "string", "name": "", "type": "string"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "low", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "maxOrderDepth", "outputs": [{"internalType": "uint16", "name": "", "type": "uint16"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "metricValue", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "missAddress", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "missContract", "outputs": [{"internalType": "contract MetricInterface", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "userAddress", "type": "address"}, {"internalType": "uint256[]", "name": "prices", "type": "uint256[]"}], "name": "openOrders", "outputs": [{"components": [{"internalType": "address", "name": "orderAddress", "type": "address"}, {"internalType": "uint256", "name": "quantity", "type": "uint256"}, {"internalType": "uint256", "name": "price", "type": "uint256"}, {"internalType": "bool", "name": "beat", "type": "bool"}], "internalType": "struct ChainedLibrary.OpenOrder[20]", "name": "addressOrders", "type": "tuple[20]"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "oracle", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "", "type": "bool"}, {"internalType": "uint256", "name": "", "type": "uint256"}, {"internalType": "uint256", "name": "", "type": "uint256"}], "name": "orders", "outputs": [{"internalType": "address", "name": "orderAddress", "type": "address"}, {"internalType": "uint256", "name": "quantity", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "", "type": "bool"}, {"internalType": "uint256", "name": "", "type": "uint256"}], "name": "ordersQuantity", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "outcome", "outputs": [{"internalType": "uint8", "name": "", "type": "uint8"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "payout", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "beat", "type": "bool"}], "name": "resolveContract", "outputs": [{"internalType": "bool", "name": "success", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "beat", "type": "bool"}, {"internalType": "uint256", "name": "price", "type": "uint256"}, {"internalType": "uint256", "name": "quantity", "type": "uint256"}], "name": "submitOrder", "outputs": [{"internalType": "bool", "name": "success", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "bool", "name": "beat", "type": "bool"}, {"internalType": "uint256", "name": "price", "type": "uint256"}, {"internalType": "uint256", "name": "quantity", "type": "uint256"}], "name": "submitOrderPrice", "outputs": [{"internalType": "bool", "name": "success", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "tokenAddress", "outputs": [{"internalType": "address", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "tokenContract", "outputs": [{"internalType": "contract tokenERC20Interface", "name": "", "type": "address"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "tokenConversion", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "valueSet", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "view", "type": "function"}] \ No newline at end of file diff --git a/src/tests/__init__.py b/src/tests/__init__.py index 52847d7..e69de29 100644 --- a/src/tests/__init__.py +++ b/src/tests/__init__.py @@ -1 +0,0 @@ -from . import test_models \ No newline at end of file diff --git a/src/tests/test_markets.csv b/src/tests/test_markets.csv new file mode 100755 index 0000000..3466b7b --- /dev/null +++ b/src/tests/test_markets.csv @@ -0,0 +1,5 @@ +id,closed,fiscal_period,metric,ticker,value_string,value,beat_address,beat_price,miss_address,broker_address,miss_price,issued,resolved_value,highlight_market,metric_symbol,expected_reporting_date,tick_size,unit_abbr,unit_desc,company_name,about +3,False,"""FQ2 2022""","""Global Subscribers""","""NFLX""","""2.3 M""",2300000,"""XYZ""",0,"""ABC""","""123""",0,10,,True,"""NFLX/Q222/GS""",2021-12-31,0.001,B,Billions,Apple,This is a long form description +2,False,"""FQ1 2022""","""Revenue""","""GOOG""","""$1,000 B""",1000000000,"""XYZ""",0,"""ABC""","""123""",0,10,,False,"""GOOG/Q122/R""",2021-12-31,0.001,M,Millions,Microsoft,That can be any length +1,False,"""FQ1 2022""","""Revenue""","""AMZN""","""$1,000 B""",1000000000,"""XYZ""",0,"""ABC""","""123""",0,10,1100000000,False,"""AMZN/Q122/R""",2021-12-31,1,USD,US Dollars,Swiss Cheese,And it describes this market +4,False,"""FQ3 2022""","""Revenue""","""AMZN""","""$1,000 B""",1000000000,,0,,,0,10,1100000000,False,"""AMZN/Q322/R""",2021-12-31,100,People,People,General Motors,With plenty of details \ No newline at end of file diff --git a/src/tests/test_markets.py b/src/tests/test_markets.py new file mode 100755 index 0000000..0977bae --- /dev/null +++ b/src/tests/test_markets.py @@ -0,0 +1,128 @@ +import os +import csv +import pytest + +from dateutil import parser +from app.markets import get_markets + + + +from app import create_app +from app.models import db, Market +from flask_migrate import Migrate, command + +config_dict = dict( + MAILCHIMP_URL = 'https://na.com', + MAILCHIMP_LIST = 'NA', + MAILCHIMP_API_KEY = 'NA', + DEVELOPMENT = True, + DEBUG = True, + DB_HOST = 'NA', + DB_USER = 'NA', + DB_PORT = 123, + DATABASE = 'metrics', + DB_PASS = 'NA', + JWT_SECRET_KEY = 'NA', + JWT_TOKEN_LOCATION = "headers", + URL = 'https://dev.chainedmetrics.com', + SQLALCHEMY_DATABASE_URI = f'sqlite:///:memory:', +) + +@pytest.fixture +def client(): + + app = create_app(config_dict) + m = Migrate(app) + db.init_app(app) + with app.app_context(): + command.upgrade(m.get_config(), 'head') + for market in get_test_markets(): + db.session.add(market) + db.session.commit() + + with app.test_client() as client: + yield client + +def test_basic_request(client): + """Request markets""" + + resp = client.get('/markets/') + + for k in ('ticker', 'fiscal_period', 'id', 'metric', 'value', 'value_string', 'broker_address'): + for m in resp.json['markets']: + assert k in m, f'{k} is missing from {m}' + +def test_missing_broker_address_are_skipped(client): + """Ensure that the test case for "AMZN/Q322/R" is not returned by + the markets call because the broker address is missing + """ + + resp = client.get('/markets/') + + assert len(resp.json['markets']) == 3, 'Three markets are expected' + for m in resp.json['markets']: + assert m['metric_symbol'] != "AMZN/Q322/R" + +def test_market_resolution_without_market(client): + """Request markets""" + + resp = client.get('/markets/99999/AMZN/FQ1 2020/Revenue') + + assert resp.status_code == 404 + assert resp.json['message'] == 'This KPI Market does not exist' + +def test_market_resolution_with_invalid_market_type(client): + """Request markets""" + + resp = client.get('/markets/a/AMZN/FQ1 2020/Revenue') + + assert resp.status_code == 404 + +def test_market_resolution(client): + """Request markets""" + + resp = client.get('/markets/1/AMZN/FQ1 2022/Revenue') + + print(resp.json) + assert resp.status_code == 200 + assert resp.json['value'] == 1100000000 + +def test_unresolved_market(client): + """Request markets""" + + resp = client.get('/markets/2/GOOG/FQ1 2022/Revenue') + + print(resp.json) + assert resp.status_code == 202 + assert resp.json['value'] == False + +def get_test_markets(): + '''Returns a set of markets to add''' + + market_list = [] + + dir_path = os.path.dirname(os.path.abspath(__file__)) + with open(os.path.join(dir_path, 'test_markets.csv')) as csv_fil: + + headers = [h.strip() for h in csv_fil.readlines(1)[0].split(',')] + date_indices = [i for i, h in enumerate(headers) if h in ('expected_reporting_date')] + for row in csv.reader(csv_fil, quotechar='"'): + values = [] + for i, v in enumerate(row): + if not v: + values.append(None) + elif i in date_indices: + values.append(parser.parse(v.strip()).date()) + else: + try: + values.append(eval(v.strip())) + except (NameError, SyntaxError): + values.append(v.strip()) + + row_dict = dict(zip(headers,values)) + row_dict = {k:v for k, v in row_dict.items() if v is not None} + + market = Market(**row_dict) + market_list.append(market) + + return market_list \ No newline at end of file diff --git a/src/workers/Dockerfile b/src/workers/Dockerfile new file mode 100644 index 0000000..ff6cba4 --- /dev/null +++ b/src/workers/Dockerfile @@ -0,0 +1,8 @@ +FROM python:3.8.12-buster +RUN pip install eth-brownie==1.17.2 +RUN pip install psycopg2==2.8.6 + +COPY . /src/ +WORKDIR /src/ + +CMD ["python", "-u", "faucet_worker.py"] \ No newline at end of file diff --git a/src/workers/README.md b/src/workers/README.md new file mode 100644 index 0000000..e51e231 --- /dev/null +++ b/src/workers/README.md @@ -0,0 +1,6 @@ +## Matic Faucet Workers + +This process sends matic to a requesting user via the MaticFaucet.sol contract. It is deployed in Digital Ocean at: + +Dev - root@104.131.12.211 +Prod - TBD diff --git a/src/workers/__init__.py b/src/workers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/workers/faucet_worker.py b/src/workers/faucet_worker.py new file mode 100644 index 0000000..1b3ecde --- /dev/null +++ b/src/workers/faucet_worker.py @@ -0,0 +1,207 @@ +import asyncio +import psycopg2 +import traceback +import brownie +import os +import json +import logging +import sys + + +DEV_ADDRESS = '0x3BEBd505f2418ba2Fac94110F4E1D76b01B262a7' +PROD_ADDRESS = '0x35FE7a74668EC648038E615659710f140e591B82' +DEV_PAYOUT = 0.00025 +PROD_PAYOUT = 0.2 +ACCOUNT = brownie.accounts.add(os.environ['PRIVATE_KEY']) + +def clear_queue(connection): + ''' + Iterates through the queue until nothing is left and then + it returns + ''' + + while True: + cursor = connection.cursor() + cursor.execute('BEGIN') + try: + id, email, address = select_and_lock_row(cursor) + except Exception as e: + cursor.execute('ROLLBACK TRANSACTION;') + raise + + if not id: + cursor.execute('ROLLBACK TRANSACTION;') + return + try: + logging.info(f'Processing {id} {email}') + success, msg, transfer_quantity = process_row(id, email, address) + except Exception as e: + success = False + msg = traceback.format_exc() + logging.exception(f"Error processing row for {id} {email} {address}") + + if success: + update_user_and_delete_row(cursor, id, email, transfer_quantity) + else: + update_row_with_error(cursor, id, msg) + + +def get_matic_contract_and_send(email, address): + + faucet = get_faucet() + faucet.requestMatic(email.lower(), address, {'from': ACCOUNT}) + + payout = PROD_PAYOUT if os.getenv('CHAINEDMETRICS_ENV') == 'Production' else DEV_PAYOUT + return (True, '', payout) + +def get_faucet(): + + if os.getenv('CHAINEDMETRICS_ENV') == 'Production': + matic_address = PROD_ADDRESS + else: + matic_address = DEV_ADDRESS + + with open('matic_faucet_abi.json') as fil: + faucet_abi = json.load(fil) + + faucet = brownie.Contract.from_abi('MaticFaucet', matic_address, faucet_abi) + + return faucet + +def select_and_lock_row(cursor): + ''' + Selects and locks a row to process in the queue and returns + the corresponding email and address + + Arguments: + None + + Returns: + email (str): The email that is being processed + address (str): The address that is being processed + ''' + + cursor.execute(''' + SELECT id, email, address FROM public.matic_faucet_queue + WHERE error_msg is null + ORDER BY id + LIMIT 1 + FOR UPDATE SKIP LOCKED + ''') + + result = cursor.fetchone() + + if result: + return result + else: + return (None, None, None) + +def process_row(id, email, address): + + success, msg, amount_transfered = get_matic_contract_and_send(email, address) + + return (success, msg, amount_transfered) + + +def update_user_and_delete_row(cursor, id, email, quantity): + ''' + Updates the users table to indicate the user recieved matic and + deletes the row after it has been processed from the queue. Once + this is done it commits the transaction. + + Arguments: + cursor (sqlalchemy.conection.cursor): The currsor for the current connection + id (int): The id for the reccord to delete in queue table + email (str): The email in the user table + quantity (float): The quantity of matic sent + + Returns: + None + ''' + + cursor.execute(f''' + UPDATE public.user + SET matic_recieved = %s, matic_recieved_date = NOW() + WHERE email = %s; + + DELETE FROM public.matic_faucet_queue + WHERE id = %s; + COMMIT; + ''', (quantity, email, id) + ) + +def update_row_with_error(cursor, id, msg): + ''' + Updates a record if there is an error + + Arguments: + cursor (sqlalchemy.conection.cursor): The currsor for the current connection + id (int): The id for the reccord to delete + msg (string): The error string to store in the table + + Returns: + None + ''' + + cursor.execute(''' + UPDATE public.matic_faucet_queue + SET error_msg = %s, error_time = NOW() + WHERE id = %s; + COMMIT; + ''', (msg, id) + ) + +def handle_notify(): + logging.info('handle notify') + + try: + conn.poll() + except psycopg2.OperationalError as e: + logging.exception('Saw error, clearing queue, and exiting') + clear_queue(conn) + return + + for notify in conn.notifies: + clear_queue(conn) + + conn.notifies.clear() + logging.info('Exiting notification handling') + +if __name__ == "__main__": + + logging.basicConfig( + stream=sys.stdout, level=logging.INFO, + format='%(asctime)s.%(msecs)03d %(levelname)s: %(message)s', + datefmt='%Y-%m-%d %H:%M:%S' + ) + logging.info("Starting Worker and Connecting to Database") + # dbname should be the same for the notifying process + conn = psycopg2.connect( + host="chainedmetrics-dev-do-user-9754357-0.b.db.ondigitalocean.com", + port=25060, + dbname="metrics", + user="flask_app", + password=os.getenv("DB_PASS") + ) + + conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) + + logging.info('Loading project and connecting to network') + brownie.project.load('.', 'MaticFaucetProject') + brownie.network.connect('polygon-main') + logging.info('Clearing queue') + clear_queue(conn) + + logging.info('Queue cleared') + + cursor = conn.cursor() + cursor.execute(f"LISTEN faucet_request;") + + logging.info('Starting server') + loop = asyncio.get_event_loop() + loop.add_reader(conn, handle_notify) + try: + loop.run_forever() + except Exception: + logging.exception('Closing Connection') + conn.close() \ No newline at end of file diff --git a/src/workers/matic_faucet_abi.json b/src/workers/matic_faucet_abi.json new file mode 100644 index 0000000..57e7c1c --- /dev/null +++ b/src/workers/matic_faucet_abi.json @@ -0,0 +1 @@ +[{"inputs": [{"internalType": "uint256", "name": "_transferAmount", "type": "uint256"}], "stateMutability": "nonpayable", "type": "constructor", "name": "constructor"}, {"inputs": [], "name": "drainFaucet", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "fundFaucet", "outputs": [], "stateMutability": "payable", "type": "function"}, {"inputs": [{"internalType": "string", "name": "email", "type": "string"}, {"internalType": "address payable", "name": "recieverAddress", "type": "address"}], "name": "requestMatic", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "string", "name": "source", "type": "string"}], "name": "stringToBytes32", "outputs": [{"internalType": "bytes32", "name": "result", "type": "bytes32"}], "stateMutability": "pure", "type": "function"}, {"inputs": [], "name": "transferAmount", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "uint256", "name": "_transferAmount", "type": "uint256"}], "name": "updateTransferAmount", "outputs": [], "stateMutability": "nonpayable", "type": "function"}] \ No newline at end of file diff --git a/src/workers/run_worker.sh b/src/workers/run_worker.sh new file mode 100755 index 0000000..1b5bf72 --- /dev/null +++ b/src/workers/run_worker.sh @@ -0,0 +1,7 @@ +docker run \ + -i --log-driver=none -a stdin -a stdout -a stderr \ + -e CHAINEDMETRICS_ENV=$CHAINEDMETRICS_ENV \ + -e PRIVATE_KEY=$PRIVATE_KEY \ + -e WEB3_INFURA_PROJECT_ID=$WEB3_INFURA_PROJECT_ID \ + -e DB_PASS=$DEV_DB_PASS \ + faucet_worker:1 \ No newline at end of file diff --git a/src/wsgi.py b/src/wsgi.py new file mode 100644 index 0000000..44ddc0b --- /dev/null +++ b/src/wsgi.py @@ -0,0 +1,3 @@ +from app import create_app + +application = create_app() \ No newline at end of file