Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/dev' into kk/add-manual-deploy
Browse files Browse the repository at this point in the history
  • Loading branch information
halaprix committed May 29, 2024
2 parents 552bf2d + 79cdeda commit 98659ab
Show file tree
Hide file tree
Showing 21 changed files with 639 additions and 59 deletions.
3 changes: 2 additions & 1 deletion .env.template
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,5 @@ SST_USER=
DEBANK_API_KEY=
DEBANK_API_URL=
RAYS_DB_WRITE_CONNECTION_STRING=postgres://user:password@localhost:5500/rays
RAYS_DB_READ_CONNECTION_STRING=postgres://user:password@localhost:5500/rays
RAYS_DB_READ_CONNECTION_STRING=postgres://user:password@localhost:5500/rays
BORROW_DB_READ_CONNECTION_STRING=
1 change: 1 addition & 0 deletions .github/workflows/deploy-production.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ jobs:
SECURITY_GROUP_ID: ${{ secrets.SECURITY_GROUP_ID }}
RAYS_DB_WRITE_CONNECTION_STRING: ${{ secrets.RAYS_DB_WRITE_CONNECTION_STRING }}
RAYS_DB_READ_CONNECTION_STRING: ${{ secrets.RAYS_DB_READ_CONNECTION_STRING }}
BORROW_DB_READ_CONNECTION_STRING: ${{ secrets.BORROW_DB_READ_CONNECTION_STRING }}
steps:
- name: Git clone the repository
uses: actions/checkout@v3
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/deploy-sdk-production.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ jobs:
SECURITY_GROUP_ID: ${{ secrets.SECURITY_GROUP_ID }}
RAYS_DB_WRITE_CONNECTION_STRING: ${{ secrets.RAYS_DB_WRITE_CONNECTION_STRING }}
RAYS_DB_READ_CONNECTION_STRING: ${{ secrets.RAYS_DB_READ_CONNECTION_STRING }}
BORROW_DB_READ_CONNECTION_STRING: ${{ secrets.BORROW_DB_READ_CONNECTION_STRING }}
steps:
- name: Git clone the repository
uses: actions/checkout@v3
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/deploy-sdk-staging.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ jobs:
SECURITY_GROUP_ID: ${{ secrets.SECURITY_GROUP_ID }}
RAYS_DB_WRITE_CONNECTION_STRING: ${{ secrets.RAYS_DB_WRITE_CONNECTION_STRING }}
RAYS_DB_READ_CONNECTION_STRING: ${{ secrets.RAYS_DB_READ_CONNECTION_STRING }}
BORROW_DB_READ_CONNECTION_STRING: ${{ secrets.BORROW_DB_READ_CONNECTION_STRING }}
steps:
- name: Git clone the repository
uses: actions/checkout@v3
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/deploy-staging.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ jobs:
SECURITY_GROUP_ID: ${{ secrets.SECURITY_GROUP_ID }}
RAYS_DB_WRITE_CONNECTION_STRING: ${{ secrets.RAYS_DB_WRITE_CONNECTION_STRING }}
RAYS_DB_READ_CONNECTION_STRING: ${{ secrets.RAYS_DB_READ_CONNECTION_STRING }}
BORROW_DB_READ_CONNECTION_STRING: ${{ secrets.BORROW_DB_READ_CONNECTION_STRING }}
steps:
- name: Git clone the repository
uses: actions/checkout@v3
Expand Down
1 change: 1 addition & 0 deletions background-jobs/update-rays-cron-function/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
"zod": "^3.22.4",
"@summerfi/summer-events-subgraph": "workspace:*",
"@summerfi/rays-db": "workspace:*",
"@summerfi/borrow-db": "workspace:*",
"@summerfi/serverless-shared": "workspace:*"
},
"devDependencies": {
Expand Down
132 changes: 95 additions & 37 deletions background-jobs/update-rays-cron-function/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import type { Context, EventBridgeEvent } from 'aws-lambda'
import { Logger } from '@aws-lambda-powertools/logger'
import { Database, getRaysDB } from '@summerfi/rays-db'
import { getBorrowDB } from '@summerfi/borrow-db'
import process from 'node:process'
import { getSummerPointsSubgraphClient } from '@summerfi/summer-events-subgraph'
import { ChainId } from '@summerfi/serverless-shared'
Expand Down Expand Up @@ -33,6 +34,7 @@ enum OngoingPointDistribution {
OPEN_POSITION = 'OPEN_POSITION',
MIGRATION = 'MIGRATION',
SWAP = 'SWAP',
REFERRAL = 'REFERRAL',
}

enum PositionMultiplier {
Expand Down Expand Up @@ -91,11 +93,15 @@ export const handler = async (
event: EventBridgeEvent<'Scheduled Event', never>,
context: Context,
): Promise<void> => {
const { SUBGRAPH_BASE, RAYS_DB_CONNECTION_STRING } = process.env
const { SUBGRAPH_BASE, RAYS_DB_CONNECTION_STRING, BORROW_DB_READ_CONNECTION_STRING } = process.env

logger.addContext(context)
logger.info('Hello World!')

if (!BORROW_DB_READ_CONNECTION_STRING) {
logger.error('BORROW_DB_READ_CONNECTION_STRING is not set')
return
}
if (!SUBGRAPH_BASE) {
logger.error('SUBGRAPH_BASE is not set')
return
Expand All @@ -111,6 +117,10 @@ export const handler = async (
logger,
}
const { db, services } = await getRaysDB(dbConfig)
const { db: borrowDb } = await getBorrowDB({
connectionString: BORROW_DB_READ_CONNECTION_STRING,
logger,
})

const mainnetSubgraphClient = getSummerPointsSubgraphClient({
logger,
Expand Down Expand Up @@ -194,31 +204,49 @@ export const handler = async (
startTimestamp,
endTimestamp,
)
const sortedAccruedPointsFromSnapshot = accruedPointsFromSnapshot.sort((a, b) =>
a.positionId.localeCompare(b.positionId),

// Get all unique addresses and positions from all chunks
const allUniqueUsers: Set<string> = new Set()
const uniqueUserAddressesFromSnapshot = Array.from(
new Set(accruedPointsFromSnapshot.map((c) => c.user)),
)

await checkMigrationEligibility(db, sortedAccruedPointsFromSnapshot)
await checkOpenedPositionEligibility(db, sortedAccruedPointsFromSnapshot)
await insertAllMissingUsers(sortedAccruedPointsFromSnapshot, db)
const allPositions = Array.from(new Set(accruedPointsFromSnapshot.map((c) => c.positionId)))

const chunkedPoints: PositionPoints[] = createChunksOfUserPointsDistributions(
sortedAccruedPointsFromSnapshot,
30,
)
// addresses in borrow db are stored in checsummed addresses
const usersFromReferralsTable = (
await borrowDb
.selectFrom('user')
.where(borrowDb.fn('lower', ['user.address']), 'in', uniqueUserAddressesFromSnapshot)
.selectAll()
.execute()
).map((u) => ({
address: u.address.toLowerCase(),
accepted: u.accepted,
timestamp: u.timestamp,
user_that_referred_address: u.userThatReferredAddress
? u.userThatReferredAddress.toLowerCase()
: null,
}))

for (const user of usersFromReferralsTable) {
if (user.user_that_referred_address) {
allUniqueUsers.add(user.user_that_referred_address)
}
}
for (const user of uniqueUserAddressesFromSnapshot) {
allUniqueUsers.add(user)
}
const allUniqueUserAddresses = Array.from(allUniqueUsers)

// Get all unique addresses and positions from all chunks
const uniqueAddressesFromSnapshot = Array.from(
new Set(chunkedPoints.flatMap((chunk) => chunk.map((c) => c.user))),
)
const allPositions = Array.from(
new Set(chunkedPoints.flatMap((chunk) => chunk.map((c) => c.positionId))),
)
await checkMigrationEligibility(db, accruedPointsFromSnapshot)
await checkOpenedPositionEligibility(db, accruedPointsFromSnapshot)
await insertAllMissingUsers(db, allUniqueUserAddresses)

// Fetch all necessary data for all chunks at once
const uniqueUserAddressesFromDatabase = await db
.selectFrom('userAddress')
.where('address', 'in', uniqueAddressesFromSnapshot)
.where('address', 'in', allUniqueUserAddresses)
.selectAll()
.execute()

Expand All @@ -231,7 +259,7 @@ export const handler = async (
const usersMultipliersFromDatabase = await db
.selectFrom('multiplier')
.innerJoin('userAddress', 'multiplier.userAddressId', 'userAddress.id')
.where('userAddress.address', 'in', uniqueAddressesFromSnapshot)
.where('userAddress.address', 'in', allUniqueUserAddresses)
.select([
'multiplier.value',
'multiplier.type',
Expand All @@ -253,9 +281,14 @@ export const handler = async (
])
.execute()

const chunkedPoints: PositionPoints[] = createChunksOfUserPointsDistributions(
accruedPointsFromSnapshot,
30,
)

await db.transaction().execute(async (transaction) => {
await addOrUpdateUserMultipliers(
uniqueAddressesFromSnapshot,
uniqueUserAddressesFromSnapshot,
uniqueUserAddressesFromDatabase,
accruedPointsFromSnapshot,
usersMultipliersFromDatabase,
Expand Down Expand Up @@ -305,6 +338,26 @@ export const handler = async (
type: OngoingPointDistribution.OPEN_POSITION,
})
.executeTakeFirstOrThrow()

const isUserReferred = usersFromReferralsTable.find((u) => u.address === record.user)
if (isUserReferred && isUserReferred.user_that_referred_address) {
const referringUser = uniqueUserAddressesFromDatabase.find(
(ua) => ua.address === isUserReferred.user_that_referred_address,
)
if (!referringUser) {
throw new Error('Referring user not found')
}

await transaction
.insertInto('pointsDistribution')
.values({
description: 'Points for referred user',
points: record.points.openPositionsPoints * 0.05,
userAddressId: referringUser.id,
type: OngoingPointDistribution.REFERRAL,
})
.executeTakeFirstOrThrow()
}
}

const currentDate = new Date()
Expand Down Expand Up @@ -580,33 +633,38 @@ async function addOrUpdateUserMultipliers(
* @param db - The database instance.
* @returns A Promise that resolves when all missing users are inserted.
*/
async function insertAllMissingUsers(sortedPoints: PositionPoints, db: Kysely<Database>) {
const uniqueUsers = new Set(sortedPoints.map((p) => p.user))
async function insertAllMissingUsers(db: Kysely<Database>, allUniqueUsers: string[]) {
const uniqueUserAddressesFromDatabase = await db
.selectFrom('userAddress')
.where('address', 'in', Array.from(uniqueUsers))
.where('address', 'in', Array.from(allUniqueUsers))
.selectAll()
.execute()

const uniqueMissingUsers = allUniqueUsers.filter(
(userAddress) => !uniqueUserAddressesFromDatabase.some((ua) => ua.address === userAddress),
)

await db.transaction().execute(async (transaction) => {
for (const user of uniqueUsers) {
const userAddress = uniqueUserAddressesFromDatabase.find((ua) => ua.address === user)
if (!userAddress) {
const result = await transaction
.insertInto('blockchainUser')
.values({ category: null })
.returning(['id'])
.executeTakeFirstOrThrow()
await transaction
.insertInto('userAddress')
.values({ address: user, userId: result.id })
.returningAll()
.executeTakeFirstOrThrow()
}
for (const userAddress of uniqueMissingUsers) {
await insertNewUser(transaction, userAddress)
}
})
}

async function insertNewUser(transaction: Transaction<Database>, userAddress: string) {
const result = await transaction
.insertInto('blockchainUser')
.values({ category: null })
.returning(['id'])
.executeTakeFirstOrThrow()
await transaction
.insertInto('userAddress')
.values({ address: userAddress, userId: result.id })
.returningAll()
.executeTakeFirstOrThrow()
return result
}

/**
* Creates chunks of user points distributions based on a given chunk length.
*
Expand Down
1 change: 1 addition & 0 deletions packages/borrow-db/.env.template
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
DATABASE_URL=postgres://user:password@localhost:5500/borrowdb
6 changes: 6 additions & 0 deletions packages/borrow-db/.eslintrc.cjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
/** @type {import('eslint').Linter.Config} */
module.exports = {
root: true,
extends: ['@summerfi/eslint-config/library.cjs'],
parser: '@typescript-eslint/parser',
}
19 changes: 19 additions & 0 deletions packages/borrow-db/jest.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
/** @type {import('ts-jest').JestConfigWithTsJest} */
module.exports = {
preset: 'ts-jest',
roots: ['<rootDir>/src'],
testMatch: ['**/__tests__/**/*.+(ts|tsx|js)', '**/?(*.)+(spec|test).+(ts|tsx|js)'],
silent: true,
maxWorkers: 1,
testTimeout: 10000,
testEnvironment: 'node',
testPathIgnorePatterns: ['dist', 'node_modules'],
transform: {
'^.+\\.(ts|tsx)$': [
'ts-jest',
{
tsconfig: '<rootDir>/tsconfig.test.json',
},
],
},
}
29 changes: 29 additions & 0 deletions packages/borrow-db/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
{
"name": "@summerfi/borrow-db",
"version": "1.0.0",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
"test": "jest --passWithNoTests",
"build": "tsc -b -v tsconfig.build.json",
"dev": "tsc -b -w tsconfig.build.json",
"lint": "eslint .",
"lint:fix": "eslint . --fix",
"codegen": "kysely-codegen --camel-case --out-file ./src/database-types.ts"
},
"dependencies": {
"@summerfi/abstractions": "workspace:*",
"kysely": "^0.27.3",
"kysely-postgres-js": "^2.0.0",
"pg": "^8.11.5",
"postgres": "^3.4.4"
},
"devDependencies": {
"@summerfi/eslint-config": "workspace:*",
"@summerfi/typescript-config": "workspace:*",
"dotenv": "^16.4.5",
"eslint": "^8.57.0",
"kysely-codegen": "^0.15.0",
"tsx": "^4.9.0"
}
}
Loading

0 comments on commit 98659ab

Please sign in to comment.