Skip to content

Commit

Permalink
Merge pull request #201 from dom-jiang/main
Browse files Browse the repository at this point in the history
Add burrow liquidate interface
  • Loading branch information
WillaGao1 authored Sep 14, 2024
2 parents 5caf103 + bf93051 commit 9d7263c
Show file tree
Hide file tree
Showing 3 changed files with 251 additions and 4 deletions.
118 changes: 114 additions & 4 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@
from redis_provider import list_pools_by_id_list, list_token_metadata, list_pools_by_tokens, get_pool, list_token_metadata_v2
from redis_provider import list_token_price_by_id_list, get_proposal_hash_by_id, get_24h_pool_volume, get_account_pool_assets
from redis_provider import get_dcl_pools_volume_list, get_24h_pool_volume_list, get_dcl_pools_tvl_list, get_token_price_ratio_report, get_history_token_price_report, get_market_token_price
from utils import combine_pools_info, compress_response_content, get_ip_address, pools_filter, get_tx_id, combine_dcl_pool_log, handle_dcl_point_bin, handle_point_data, handle_top_bin_fee, handle_dcl_point_bin_by_account, get_circulating_supply, get_lp_lock_info
from utils import combine_pools_info, compress_response_content, get_ip_address, pools_filter, is_base64, combine_dcl_pool_log, handle_dcl_point_bin, handle_point_data, handle_top_bin_fee, handle_dcl_point_bin_by_account, get_circulating_supply, get_lp_lock_info
from config import Cfg
from db_provider import get_history_token_price, query_limit_order_log, query_limit_order_swap, get_liquidity_pools, get_actions, query_dcl_pool_log
from db_provider import get_history_token_price, query_limit_order_log, query_limit_order_swap, get_liquidity_pools, get_actions, query_dcl_pool_log, query_burrow_liquidate_log, update_burrow_liquidate_log
from db_provider import query_recent_transaction_swap, query_recent_transaction_dcl_swap, \
query_recent_transaction_liquidity, query_recent_transaction_dcl_liquidity, query_recent_transaction_limit_order, query_dcl_points, query_dcl_points_by_account, \
query_dcl_user_unclaimed_fee, query_dcl_user_claimed_fee, query_dcl_user_unclaimed_fee_24h, query_dcl_user_claimed_fee_24h, \
query_dcl_user_tvl, query_dcl_user_change_log, query_burrow_log, get_history_token_price_by_token, add_orderly_trading_data, \
add_liquidation_result, get_liquidation_result, update_liquidation_result, add_user_wallet_info
add_liquidation_result, get_liquidation_result, update_liquidation_result, add_user_wallet_info, get_pools_volume_24h
import re
# from flask_limiter import Limiter
from loguru import logger
Expand All @@ -30,8 +30,9 @@
from auth.crypto_utl import decrypt
import time
import bleach
import requests

service_version = "20240812.01"
service_version = "20240913.01"
Welcome = 'Welcome to ref datacenter API server, version ' + service_version + ', indexer %s' % \
Cfg.NETWORK[Cfg.NETWORK_ID]["INDEXER_HOST"][-3:]
# Instantiation, which can be regarded as fixed format
Expand Down Expand Up @@ -1004,6 +1005,115 @@ def handel_user_wallet():
return ret


@app.route('/get-total-fee', methods=['GET'])
def handel_get_total_fee():
total_fee = 0
not_pool_id_list = ""
try:
pool_volume_data = get_pools_volume_24h(Cfg.NETWORK_ID)
pool_list = list_pools(Cfg.NETWORK_ID)
pool_data = {}
for pool in pool_list:
pool_data[pool["id"]] = pool["total_fee"] / 10000
for pool_volume in pool_volume_data:
if pool_volume["pool_id"] in pool_data:
pool_fee = float(pool_volume["volume_24h"]) * pool_data[pool_volume["pool_id"]]
total_fee += pool_fee
else:
not_pool_id_list = not_pool_id_list + "," + pool_volume["pool_id"]
if not_pool_id_list != "":
url = "https://api.ref.finance/pool/search?pool_id_list=" + not_pool_id_list
search_pool_json = requests.get(url).text
search_pool_data = json.loads(search_pool_json)
search_pool_list = search_pool_data["data"]["list"]
for search_pool in search_pool_list:
pool_fee = float(search_pool["fee_volume_24h"])
total_fee += pool_fee
ret = {
"total_fee": str(total_fee),
}
ret_data = {
"code": 0,
"msg": "success",
"data": ret
}
except Exception as e:
logger.info("handel_get_total_fee error:{}", e.args)
ret_data = {
"code": -1,
"msg": "error",
"data": e.args
}
return jsonify(ret_data)


@app.route('/get-total-revenue', methods=['GET'])
def handel_get_total_revenue():
ret = json.loads(handel_get_total_fee().data)
if ret["code"] != 0:
ret_data = {
"code": -1,
"msg": "error",
"data": ret["data"]
}
else:
ret_data = {
"code": 0,
"msg": "success",
"data": {"total_revenue": str(float(ret["data"]["total_fee"]) * 0.2)}
}
return jsonify(ret_data)


@app.route('/get-burrow-liquidate-records', methods=['GET'])
def handle_burrow_liquidate_records():
account_id = request.args.get("account_id")
page_number = request.args.get("page_number", type=int, default=1)
page_size = request.args.get("page_size", type=int, default=10)
if account_id is None or account_id == '' or page_size == 0:
return ""
liquidate_log, count_number, not_read_count = query_burrow_liquidate_log(Cfg.NETWORK_ID, account_id, page_number, page_size)
if count_number % page_size == 0:
total_page = int(count_number / page_size)
else:
total_page = int(count_number / page_size) + 1
res = {
"record_list": liquidate_log,
"page_number": page_number,
"page_size": page_size,
"total_page": total_page,
"total_size": count_number,
"unread": not_read_count
}
return jsonify(res)


@app.route('/set_liquidation_info', methods=['POST', 'PUT'])
def handle_liquidation_info():
try:
ret = {
"code": 0,
"msg": "success",
"data": []
}
json_data = request.get_json()
if "receipt_ids" in json_data and len(json_data["receipt_ids"]) > 0:
receipt_ids = json_data["receipt_ids"]
for receipt_id in receipt_ids:
if not is_base64(receipt_id):
ret = {
"code": -1,
"msg": "Id incorrect",
"data": [receipt_id]
}
return jsonify(ret)
update_burrow_liquidate_log(Cfg.NETWORK_ID, receipt_ids)
ret["data"] = receipt_ids
return jsonify(ret)
except Exception as e:
logger.error("update liquidate data error:{}", e)


current_date = datetime.datetime.now().strftime("%Y-%m-%d")
log_file = "app-%s.log" % current_date
logger.add(log_file)
Expand Down
122 changes: 122 additions & 0 deletions db_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -1372,6 +1372,128 @@ def add_user_wallet_info(network_id, account_id, wallet_address):
cursor.close()


def get_pools_volume_24h(network_id):
db_conn = get_db_connect(network_id)
max_time_sql = "select time from go_volume_24h_pool order by time desc limit 1"
sql = f"select pool_id, volume_24h from go_volume_24h_pool where time = %s"
cursor = db_conn.cursor(cursor=pymysql.cursors.DictCursor)
try:
cursor.execute(max_time_sql)
max_time_data = cursor.fetchone()
max_time = int(max_time_data["time"])
cursor.execute(sql, max_time)
results = cursor.fetchall()
return results
except Exception as e:
print("query multi_pools_volume_rolling_24h_sum to db error:", e)
finally:
cursor.close()


def query_burrow_liquidate_log(network_id, account_id, page_number, page_size):
start_number = handel_page_number(page_number, page_size)
db_conn = get_db_connect(network_id)
receipt_sql = "select receipt_id from burrow_event_log where liquidation_account_id = %s and " \
"`event` = 'liquidate' order by `timestamp` desc limit %s, %s"
liquidate_sql = f"select `event`, amount, token_id, `timestamp`, receipt_id, is_read, update_time, position from " \
f"burrow_event_log where receipt_id in ('%s')"
sql_count = "select count(*) as total_number from burrow_event_log where liquidation_account_id = %s " \
"and `event` = 'liquidate'"
not_read_sql_count = "select count(*) as total_number from burrow_event_log where liquidation_account_id = %s " \
"and `event` = 'liquidate' and is_read = '0'"
cursor = db_conn.cursor(cursor=pymysql.cursors.DictCursor)
try:
cursor.execute(receipt_sql, (account_id, start_number, page_size))
receipt_log = cursor.fetchall()
receipt_ids = [entry['receipt_id'] for entry in receipt_log]
if receipt_ids:
receipt_ids_placeholder = "','".join(map(str, receipt_ids))
cursor.execute(liquidate_sql % receipt_ids_placeholder)
liquidate_log_list = cursor.fetchall()
else:
liquidate_log_list = []
cursor.execute(sql_count, account_id)
burrow_log_count = cursor.fetchone()
cursor.execute(not_read_sql_count, account_id)
not_read_count = cursor.fetchone()
ret_liquidate_log = handel_liquidate_log_data(liquidate_log_list, account_id)
return ret_liquidate_log, burrow_log_count["total_number"], not_read_count["total_number"]
except Exception as e:
print("query burrow_liquidate_log to db error:", e)
finally:
cursor.close()


def handel_liquidate_log_data(liquidate_log_list, account_id):
liquidate_list = []
receipt_data = {}
for burrow_log in liquidate_log_list:
receipt_id = burrow_log["receipt_id"]
event = burrow_log["event"]
timestamp = burrow_log["timestamp"]
position = burrow_log["position"]
time_second = int(int(timestamp) / 1000000000)
updated_at = int(burrow_log["update_time"].timestamp())
is_read = False if burrow_log["is_read"] == "0" else True
if receipt_id in receipt_data:
repaid_assets = receipt_data[receipt_id]["RepaidAssets"]
liquidated_assets = receipt_data[receipt_id]["LiquidatedAssets"]
else:
repaid_assets = []
liquidated_assets = []
receipt_data[receipt_id] = {
"liquidation_account_id": account_id,
"createdAt": time_second,
"isRead": is_read,
"updatedAt": updated_at,
"RepaidAssets": repaid_assets,
"LiquidatedAssets": liquidated_assets,
"position": position
}
if event == "borrow":
repaid_assets_data = {"amount": burrow_log["amount"], "token_id": burrow_log["token_id"]}
repaid_assets.append(repaid_assets_data)
if event == "withdraw_started":
liquidated_assets_data = {"amount": burrow_log["amount"], "token_id": burrow_log["token_id"]}
liquidated_assets.append(liquidated_assets_data)
if repaid_assets:
receipt_data[receipt_id]["RepaidAssets"] = repaid_assets
if liquidated_assets:
receipt_data[receipt_id]["LiquidatedAssets"] = liquidated_assets

for k, v in receipt_data.items():
liquidate_data = {
"healthFactor_after": None,
"RepaidAssets": v["RepaidAssets"],
"isRead": v["isRead"],
"createdAt": v["createdAt"],
"position": position,
"liquidation_type": "liquidate",
"account_id": v["liquidation_account_id"],
"healthFactor_before": None,
"LiquidatedAssets": v["LiquidatedAssets"],
"isDeleted": False,
"updatedAt": v["updatedAt"],
"receipt_id": k,
}
liquidate_list.append(liquidate_data)
return liquidate_list


def update_burrow_liquidate_log(network_id, receipt_ids):
placeholders = ','.join(['%s'] * len(receipt_ids))
sql = f"UPDATE burrow_event_log SET is_read = '1' WHERE receipt_id IN ({placeholders})"
db_conn = get_db_connect(network_id)
cursor = db_conn.cursor()
try:
cursor.execute(sql, receipt_ids)
db_conn.commit()
except Exception as e:
print("update burrow_liquidate_log to db error:", e)
finally:
cursor.close()


if __name__ == '__main__':
print("#########MAINNET###########")
# clear_token_price()
Expand Down
15 changes: 15 additions & 0 deletions utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -769,6 +769,21 @@ def get_lp_lock_info(network_id):
print("Error: ", e)


def is_base64(s):
import base64
import re
base64_pattern = re.compile(r'^[A-Za-z0-9+/=]+$')
if not base64_pattern.match(s):
return False
if len(s) % 4 != 0:
return False
try:
base64.b64decode(s, validate=True)
return True
except (base64.binascii.Error, ValueError):
return False


if __name__ == '__main__':
# from config import Cfg
# from redis_provider import list_token_price, list_pools_by_id_list, list_token_metadata
Expand Down

0 comments on commit 9d7263c

Please sign in to comment.