diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0a8cb5f8f..937083543 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -297,47 +297,7 @@ jobs: with: name: nextjs-build path: ${{ github.workspace }}/apps/next/.next - # - name: Get Supabase Database Branch - # if: github.base_ref == 'dev' - # uses: 0xbigboss/supabase-branch-gh-action@v1 - # id: supabase-branch - # with: - # supabase-access-token: ${{ secrets.SUPABASE_EXPERIMENTAL_ACCESS_TOKEN }} - # supabase-project-id: ${{ secrets.STAGING_SUPABASE_PROJECT_ID }} - # wait-for-migrations: false # Optional. Default is false. - # timeout: 60 # Optional. Default is 60. - # - name: Add SMS provider to Supabase branch - # if: github.base_ref == 'dev' - # uses: 0xbigboss/supabase-manager-script-gh-action@v1 - # id: add-sms-provider - # with: - # supabase-access-token: ${{ secrets.SUPABASE_EXPERIMENTAL_ACCESS_TOKEN }} - # script: | - # const parentAuthConfig = await supabaseManager.projectsConfig.getV1AuthConfig({ - # ref: process.env.SUPABASE_PARENT_PROJECT_ID, - # }); - # core.info('Enabling Twilio verify external phone auth provider'); - - # await supabaseManager.projectsConfig.updateV1AuthConfig({ - # ref: process.env.SUPABASE_PROJECT_ID, - # requestBody: { - # external_phone_enabled: true, - # sms_provider: parentAuthConfig.sms_provider, - # sms_twilio_verify_account_sid: - # parentAuthConfig.sms_twilio_verify_account_sid, - # sms_twilio_verify_auth_token: parentAuthConfig.sms_twilio_verify_auth_token, - # sms_twilio_verify_message_service_sid: - # parentAuthConfig.sms_twilio_verify_message_service_sid, - # }, - # }); - - # core.info('Done'); - - # return "success"; - # env: - # SUPABASE_PROJECT_ID: ${{ steps.supabase-branch.outputs.project_ref }} - # SUPABASE_PARENT_PROJECT_ID: ${{ steps.supabase-branch.outputs.parent_project_ref }} - name: Extract branch name id: extract-branch uses: ./.github/actions/extract-branch @@ -352,34 +312,34 @@ jobs: vercel-token: ${{ secrets.VERCEL_TOKEN }} public-hostname: ${{ steps.public-hostname.outputs.public-hostname }} deploy-preview-extra-args: >- - -e SUPABASE_DB_URL="postgresql://${{steps.supabase-branch.outputs.db_user}}.${{steps.supabase-branch.outputs.project_ref}}:${{steps.supabase-branch.outputs.db_pass}}@fly-0-iad.pooler.supabase.com:${{steps.supabase-branch.outputs.db_port}}/postgres" - -e SUPABASE_JWT_SECRET="${{steps.supabase-branch.outputs.jwt_secret}}" - -e SUPABASE_SERVICE_ROLE="${{ steps.supabase-branch.outputs.service_role_key }}" - -e NEXT_PUBLIC_SUPABASE_URL="https://${{ steps.supabase-branch.outputs.project_ref }}.supabase.co" - -e NEXT_PUBLIC_SUPABASE_PROJECT_ID="${{steps.supabase-branch.outputs.project_ref}}" - -e NEXT_PUBLIC_SUPABASE_GRAPHQL_URL="${{steps.supabase-branch.outputs.graphql_url}}" + -e SUPABASE_DB_URL="${{ secrets.STAGING_SUPABASE_DB_URL }}" + -e SUPABASE_JWT_SECRET="${{ secrets.STAGING_SUPABASE_JWT_SECRET }}" + -e SUPABASE_SERVICE_ROLE="${{ secrets.STAGING_SUPABASE_SERVICE_ROLE }}" + -e NEXT_PUBLIC_SUPABASE_URL="https://${{ secrets.STAGING_SUPABASE_PROJECT_ID }}.supabase.co" + -e NEXT_PUBLIC_SUPABASE_PROJECT_ID="${{ secrets.STAGING_SUPABASE_PROJECT_ID }}" + -e NEXT_PUBLIC_SUPABASE_GRAPHQL_URL="https://${{ secrets.STAGING_SUPABASE_PROJECT_ID }}.supabase.co/graphql" -e NEXT_PUBLIC_BASE_CHAIN_ID="84532" -e NEXT_PUBLIC_MAINNET_CHAIN_ID="11155111" -e NEXT_PUBLIC_BASE_RPC_URL="${{ secrets.BASE_SEPOLIA_RPC_URL }}" -e NEXT_PUBLIC_MAINNET_RPC_URL="https://ethereum-sepolia-rpc.publicnode.com" - -e NEXT_PUBLIC_SUPABASE_ANON_KEY="${{ steps.supabase-branch.outputs.anon_key }}" + -e NEXT_PUBLIC_SUPABASE_ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InVncXRvdWxleGh2YWhldnN5c3VxIiwicm9sZSI6ImFub24iLCJpYXQiOjE2OTMwOTE5MzUsImV4cCI6MjAwODY2NzkzNX0.RL8W-jw2rsDhimYl8KklF2B9bNTPQ-Kj5zZA0XlufUA" env: VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }} VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }} VERCEL_GIT_COMMIT_SHA: ${{ github.sha }} VERCEL_GIT_COMMIT_REF: ${{ github.head_ref }} VERCEL_GIT_PULL_REQUEST_ID: ${{ github.event.pull_request.number }} - SUPABASE_DB_URL: postgresql://${{steps.supabase-branch.outputs.db_user}}.${{steps.supabase-branch.outputs.project_ref}}:${{steps.supabase-branch.outputs.db_pass}}@fly-0-iad.pooler.supabase.com:${{steps.supabase-branch.outputs.db_port}}/postgres - SUPABASE_JWT_SECRET: ${{steps.supabase-branch.outputs.jwt_secret}} - SUPABASE_SERVICE_ROLE: ${{ steps.supabase-branch.outputs.service_role_key }} - NEXT_PUBLIC_SUPABASE_URL: https://${{ steps.supabase-branch.outputs.project_ref }}.supabase.co - NEXT_PUBLIC_SUPABASE_PROJECT_ID: ${{steps.supabase-branch.outputs.project_ref}} - NEXT_PUBLIC_SUPABASE_GRAPHQL_URL: ${{steps.supabase-branch.outputs.graphql_url}} + SUPABASE_DB_URL: ${{ secrets.STAGING_SUPABASE_DB_URL }} + SUPABASE_JWT_SECRET: ${{ secrets.STAGING_SUPABASE_JWT_SECRET }} + SUPABASE_SERVICE_ROLE: ${{ secrets.STAGING_SUPABASE_SERVICE_ROLE }} + NEXT_PUBLIC_SUPABASE_URL: https://${{ secrets.STAGING_SUPABASE_PROJECT_ID }}.supabase.co + NEXT_PUBLIC_SUPABASE_PROJECT_ID: ${{ secrets.STAGING_SUPABASE_PROJECT_ID }} + NEXT_PUBLIC_SUPABASE_GRAPHQL_URL: https://${{ secrets.STAGING_SUPABASE_PROJECT_ID }}.supabase.co/graphql NEXT_PUBLIC_BASE_CHAIN_ID: 84532 NEXT_PUBLIC_MAINNET_CHAIN_ID: 11155111 NEXT_PUBLIC_BASE_RPC_URL: ${{ secrets.BASE_SEPOLIA_RPC_URL }} NEXT_PUBLIC_MAINNET_RPC_URL: https://ethereum-sepolia-rpc.publicnode.com - NEXT_PUBLIC_SUPABASE_ANON_KEY: ${{ steps.supabase-branch.outputs.anon_key }} + NEXT_PUBLIC_SUPABASE_ANON_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InVncXRvdWxleGh2YWhldnN5c3VxIiwicm9sZSI6ImFub24iLCJpYXQiOjE2OTMwOTE5MzUsImV4cCI6MjAwODY2NzkzNX0.RL8W-jw2rsDhimYl8KklF2B9bNTPQ-Kj5zZA0XlufUA - name: Vercel Deploy Preview if: github.base_ref != 'dev' id: vercel-deploy-preview diff --git a/apps/distributor/README.md b/apps/distributor/README.md index 8c73207ef..5e104360f 100644 --- a/apps/distributor/README.md +++ b/apps/distributor/README.md @@ -1,4 +1,3 @@ - # Send Token Distributor This is a singleton Fastify app that analyzes Ethereum mainnet blocks for Send token transfers and re-calculates the distribution shares for each Send token holder. @@ -20,7 +19,7 @@ if (err.error) { // send post request to distributor running at localhost:3050 -const response = await fetch('http://localhost:3050/distributor', { +const response = await fetch('http://localhost:3050/distributor/v2', { method: 'POST', body: JSON.stringify({ id: 1 }), headers: { @@ -32,7 +31,6 @@ const response = await fetch('http://localhost:3050/distributor', { // get the response body const body = await response.json() console.log(body) - ``` ## Getting Started with [Fastify-CLI](https://www.npmjs.com/package/fastify-cli) diff --git a/apps/distributor/src/app.ts b/apps/distributor/src/app.ts index 1ba277506..d02e90e67 100644 --- a/apps/distributor/src/app.ts +++ b/apps/distributor/src/app.ts @@ -1,9 +1,10 @@ import express, { type Request, type Response, Router } from 'express' import pino from 'pino' -import { DistributorWorker } from './distributor' +import { DistributorV1Worker } from './distributor' import { StandardMerkleTree } from '@openzeppelin/merkle-tree' import { selectAll } from 'app/utils/supabase/selectAll' import { supabaseAdmin } from './supabase' +import { DistributorV2Worker } from './distributorv2' const logger = pino({ level: process.env.LOG_LEVEL || 'info', @@ -11,7 +12,8 @@ const logger = pino({ }) // Initialize DistributorWorker -const distributorWorker = new DistributorWorker(logger) +const distributorV1Worker = new DistributorV1Worker(logger, false) +const distributorV2Worker = new DistributorV2Worker(logger) // Initialize Express app const app = express() @@ -25,10 +27,17 @@ app.get('/', (req, res) => { const distributorRouter = Router() -distributorRouter.get('/', async (req: Request, res: Response) => { +distributorRouter.get('/v1', async (req: Request, res: Response) => { res.json({ distributor: true, - ...distributorWorker.toJSON(), + ...distributorV1Worker.toJSON(), + }) +}) + +distributorRouter.get('/v2', async (req: Request, res: Response) => { + res.json({ + distributor: true, + ...distributorV2Worker.toJSON(), }) }) @@ -97,11 +106,11 @@ distributorRouter.post('/merkle', checkAuthorization, async (req: Request, res: res.json(result) }) -distributorRouter.post('/', checkAuthorization, async (req, res) => { +distributorRouter.post('/v1', checkAuthorization, async (req, res) => { const { id } = req.body as { id: string } logger.info({ id }, 'Received request to calculate distribution') try { - await distributorWorker.calculateDistribution(id) + await distributorV1Worker.calculateDistribution(id) } catch (err) { logger.error(err, 'Error while calculating distribution') res.status(500).json({ @@ -117,6 +126,22 @@ distributorRouter.post('/', checkAuthorization, async (req, res) => { }) }) +distributorRouter.post('/v2', checkAuthorization, async (req, res) => { + const { id } = req.body as { id: string } + logger.info({ id }, 'Received request to calculate distribution') + try { + await distributorV2Worker.calculateDistribution(id) + } catch (err) { + logger.error(err, 'Error while calculating distribution') + throw err + } + + res.json({ + distributor: true, + id: id, + }) +}) + app.use('/distributor', distributorRouter) export default app diff --git a/apps/distributor/src/distributor.test.ts b/apps/distributor/src/distributor.test.ts index a905d4af4..9f4ba1eac 100644 --- a/apps/distributor/src/distributor.test.ts +++ b/apps/distributor/src/distributor.test.ts @@ -6,7 +6,7 @@ import request from 'supertest' import app from './app' import { supabaseAdmin } from './supabase' import pino from 'pino' -import { DistributorWorker } from './distributor' +import { DistributorV1Worker } from './distributor' import type { Tables } from '@my/supabase/database.types' describe('Root Route', () => { @@ -20,19 +20,19 @@ describe('Root Route', () => { describe('Distributor Route', () => { it('should reject unauthorized requests', async () => { - const res = await request(app).post('/distributor') + const res = await request(app).post('/distributor/v1') expect(res.statusCode).toBe(401) expect(res.body).toEqual('Unauthorized') }) it('should handle authorization correctly', async () => { - const res = await request(app).get('/distributor') + const res = await request(app).get('/distributor/v1') expect(res.statusCode).toBe(200) expect(res.body).toMatchObject({ distributor: true, - running: true, + running: false, }) }) @@ -58,7 +58,7 @@ describe('Distributor Route', () => { expect(distribution).toBeDefined() const res = await request(app) - .post('/distributor') + .post('/distributor/v1') .set('Content-Type', 'application/json') .set('Authorization', `Bearer ${process.env.SUPABASE_SERVICE_ROLE}`) .send({ id: distribution.number }) @@ -210,7 +210,7 @@ describe('Distributor Worker', () => { const logger = pino({ level: 'silent', }) - const distributor = new DistributorWorker(logger, false) + const distributor = new DistributorV1Worker(logger, false) await distributor.calculateDistribution('4') const expectedShares = [ diff --git a/apps/distributor/src/distributor.ts b/apps/distributor/src/distributor.ts index 7558186ca..3bb33060b 100644 --- a/apps/distributor/src/distributor.ts +++ b/apps/distributor/src/distributor.ts @@ -28,7 +28,7 @@ const jsonBigint = (key, value) => { return value } -export class DistributorWorker { +export class DistributorV1Worker { private log: Logger private running: boolean private id: string diff --git a/apps/distributor/src/distributorv2.test.ts b/apps/distributor/src/distributorv2.test.ts new file mode 100644 index 000000000..de0cf8eba --- /dev/null +++ b/apps/distributor/src/distributorv2.test.ts @@ -0,0 +1,318 @@ +// @ts-expect-error set __DEV__ for code shared between server and client +globalThis.__DEV__ = true + +import { describe, expect, it, mock } from 'bun:test' +import request from 'supertest' +import app from './app' +import { supabaseAdmin } from './supabase' +import pino from 'pino' +import { DistributorV2Worker } from './distributorv2' +import type { Tables } from '@my/supabase/database.types' + +describe('Root Route', () => { + it('should return correct response for the root route', async () => { + const res = await request(app).get('/') + + expect(res.statusCode).toBe(200) + expect(res.body).toEqual({ root: true }) + }) +}) + +describe('Distributor Route', () => { + it('should reject unauthorized requests', async () => { + const res = await request(app).post('/distributor/v2') + + expect(res.statusCode).toBe(401) + expect(res.body).toEqual('Unauthorized') + }) + + it('should handle authorization correctly', async () => { + const res = await request(app).get('/distributor/v2') + + expect(res.statusCode).toBe(200) + expect(res.body).toMatchObject({ + distributor: true, + running: true, + }) + }) + + it.skip('should perform distributor logic correctly', async () => { + const { data: distribution, error } = await supabaseAdmin + .from('distributions') + .select( + `*, + distribution_verification_values (*)` + ) + .order('number', { ascending: false }) + .limit(1) + .single() + + if (error) { + throw error + } + + if (!distribution) { + throw new Error('No distributions found') + } + + expect(distribution).toBeDefined() + + const res = await request(app) + .post('/distributor/v2') + .set('Content-Type', 'application/json') + .set('Authorization', `Bearer ${process.env.SUPABASE_SERVICE_ROLE}`) + .send({ id: distribution.number }) + + expect(res.statusCode).toBe(200) + expect(res.body).toMatchObject({ + distributor: true, + id: distribution.id, + }) + }) + + it.skip('should return a merkle root', async () => { + const res = await request(app) + .post('/distributor/merkle') + .set('Authorization', `Bearer ${process.env.SUPABASE_SERVICE_ROLE}`) + .send({ id: '4' }) + + expect(res.statusCode).toBe(200) + expect({ + root: res.body.root, + total: res.body.total, + }).toMatchSnapshot('distribution 4 merkle root') + }) +}) + +describe('Distributor V2 Worker', () => { + it('should calculate distribution shares', async () => { + const distribution = { + id: 4, + number: 4, + amount: 10000, + hodler_pool_bips: 10000, + bonus_pool_bips: 0, + fixed_pool_bips: 10000, + name: 'Distribution #4', + description: 'Fourth distributions of 900,000,000 SEND tokens to early hodlers', + qualification_start: '2024-04-08T00:00:00+00:00', + qualification_end: '2024-04-21T00:00:00+00:00', + claim_end: '2024-05-31T23:59:59+00:00', + hodler_min_balance: 100000, + created_at: '2024-04-06T16:49:02.569245+00:00', + updated_at: '2024-04-06T16:49:02.569245+00:00', + snapshot_block_num: 13261327, + chain_id: 845337, + distribution_verification_values: [ + { + type: 'tag_referral', + fixed_value: 50, + bips_value: 0, + multiplier_min: 1.5, + multiplier_max: 2.5, + multiplier_step: 0.1, + distribution_id: 4, + }, + { + type: 'total_tag_referrals', + fixed_value: 0, + bips_value: 0, + multiplier_min: 1.0, + multiplier_max: 2.0, + multiplier_step: 0.01, + distribution_id: 4, + }, + { + type: 'create_passkey', + fixed_value: 200, + bips_value: 0, + distribution_id: 4, + }, + { + type: 'tag_registration', + fixed_value: 100, + bips_value: 0, + distribution_id: 4, + created_at: '2024-04-06T16:49:02.569245+00:00', + updated_at: '2024-04-06T16:49:02.569245+00:00', + }, + { + type: 'send_ten', + fixed_value: 100, + bips_value: 0, + distribution_id: 4, + created_at: '2024-04-06T16:49:02.569245+00:00', + updated_at: '2024-04-06T16:49:02.569245+00:00', + }, + { + type: 'send_one_hundred', + fixed_value: 200, + bips_value: 0, + distribution_id: 4, + created_at: '2024-04-06T16:49:02.569245+00:00', + updated_at: '2024-04-06T16:49:02.569245+00:00', + }, + ], + } as Tables<'distributions'> & { + distribution_verification_values: Tables<'distribution_verification_values'>[] + } + const user_id = crypto.randomUUID() + const user_id2 = crypto.randomUUID() + const bobAddr = '0xb0b0000000000000000000000000000000000000' + const aliceAddr = '0xalice000000000000000000000000000000000000' + + const createDistributionShares = mock( + (distributionId: number, shares: Tables<'distribution_shares'>[]) => { + return Promise.resolve({ + data: null, + error: null, + }) + } + ) + + mock.module('./supabase', () => ({ + fetchDistribution: mock((id: string) => { + return Promise.resolve({ + data: distribution, + error: null, + }) + }), + /* + Back of the napkin + Pool = 10,000 + Fixed + Bobs = 200 + 200 + 100 + 100 + 50 = 650 * 1.5 * 1.01 = 985 + Alices = 100 + 100 * 1.05 = 205 + Hodlers = 10,000 - 985 - 205 = 8810 + Bobs = 8810 * 1,000,000 /1,500,000 = 5873 + Alices = 8810 * 500,000 /1,500,000 = 2937 + */ + fetchAllVerifications: mock((distributionId: number) => { + return Promise.resolve({ + data: [ + { user_id, type: 'create_passkey' }, + { + user_id, + type: 'tag_referral', + }, + + { + user_id, + type: 'tag_registration', + }, + { + user_id, + type: 'send_ten', + }, + { + user_id, + type: 'send_one_hundred', + }, + { + user_id, + type: 'total_tag_referrals', + metadata: { + value: 2, + }, + }, + // alice only has tag_registration + { + user_id: user_id2, + type: 'tag_registration', + }, + { + user_id: user_id2, + type: 'send_ten', + }, + { + user_id: user_id2, + type: 'total_tag_referrals', + metadata: { + value: 5, + }, + }, + ], + count: 9, + error: null, + }) + }), + fetchAllHodlers: mock((distributionId: number) => { + return Promise.resolve({ + data: [ + { + address: bobAddr, + created_at: '2024-04-06T16:49:02.569245+00:00', + user_id, + }, + { + address: aliceAddr, + created_at: '2024-04-06T16:49:02.569245+00:00', + user_id: user_id2, + }, + ], + error: null, + }) + }), + createDistributionShares, + })) + + mock.module('./wagmi', () => ({ + fetchAllBalances: mock(({ addresses, distribution }) => { + return [ + Promise.resolve({ + user_id, + address: bobAddr, + balance: '1000000', + }), + // alice has half of the balance of bob + Promise.resolve({ + user_id: user_id2, + address: aliceAddr, + balance: '500000', + }), + ] + }), + isMerkleDropActive: mock((distribution) => { + return Promise.resolve(false) + }), + })) + + const logger = pino({ + level: 'silent', + }) + const distributor = new DistributorV2Worker(logger, false) + await distributor.calculateDistribution('4') + + //Expected values are a little different than back of the napkin because of rounding + //Keep an eye on this, may need to investigate if we see distro problems + const expectedShares = [ + { + address: bobAddr, + distribution_id: 4, + user_id, + amount: '6856', + bonus_pool_amount: '0', // Always 0 in V2 + fixed_pool_amount: '984', + hodler_pool_amount: '5872', + }, + { + address: aliceAddr, + distribution_id: 4, + user_id: user_id2, + amount: '3144', + bonus_pool_amount: '0', // Always 0 in V2 + fixed_pool_amount: '208', + hodler_pool_amount: '2936', + }, + ] + expect(createDistributionShares).toHaveBeenCalled() + + // @ts-expect-error supabase-js does not support bigint + expect(createDistributionShares.mock.calls[0]).toEqual([distribution.id, expectedShares]) + + // expected share amounts cannot exceed the total distribution amount + const totalDistributionAmount = BigInt(distribution.amount) + const totalShareAmounts = expectedShares.reduce((acc, share) => acc + BigInt(share.amount), 0n) + expect(totalShareAmounts).toBeLessThanOrEqual(totalDistributionAmount) + }) +}) diff --git a/apps/distributor/src/distributorv2.ts b/apps/distributor/src/distributorv2.ts new file mode 100644 index 000000000..61fc82cc5 --- /dev/null +++ b/apps/distributor/src/distributorv2.ts @@ -0,0 +1,529 @@ +import { cpus } from 'node:os' +import type { Database, Tables } from '@my/supabase/database.types' +import type { Logger } from 'pino' +import { + createDistributionShares, + fetchAllHodlers, + fetchAllVerifications, + fetchDistribution, + supabaseAdmin, +} from './supabase' +import { fetchAllBalances, isMerkleDropActive } from './wagmi' +import { calculateWeights, PERC_DENOM } from './weights' + +type Multiplier = { + value: number + min: number + max: number + step: number +} + +const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)) + +const cpuCount = cpus().length + +const inBatches = (array: T[], batchSize = Math.max(8, cpuCount - 1)) => { + return Array.from({ length: Math.ceil(array.length / batchSize) }, (_, i) => + array.slice(i * batchSize, (i + 1) * batchSize) + ) +} + +const jsonBigint = (key, value) => { + if (typeof value === 'bigint') { + return value.toString() + } + return value +} + +/** + * Changes from V1: + * Fixed Pool Calculation: In V2, fixed pool amounts are calculated first from the total distribution amount, whereas V1 calculated hodler, bonus, and fixed pools separately. + * Removal of Bips: V2 no longer uses holder and bonus bips (basis points) for calculations, simplifying the distribution logic. + * Bonus Shares Elimination: In V2, bonus shares are always 0, effectively removing the bonus pool concept that existed in V1. + * Multiplier System: V2 introduces a new multiplier system, particularly for referrals and certain verification types + */ + +export class DistributorV2Worker { + private log: Logger + private running: boolean + private id: string + private lastDistributionId: number | null = null + private workerPromise: Promise + + constructor(log: Logger, start = true) { + this.id = Math.random().toString(36).substring(7) + this.log = log.child({ module: 'distributor', id: this.id }) + if (start) { + this.running = true + this.workerPromise = this.worker() + } else { + this.running = false + this.workerPromise = Promise.resolve() + } + } + + /** + * Calculates distribution shares for distributions in qualification period. + */ + private async calculateDistributions() { + this.log.info('Calculating distributions') + + const { data: distributions, error } = await supabaseAdmin + .from('distributions') + .select( + `*, + distribution_verification_values (*)`, + { count: 'exact' } + ) + .lte('qualification_start', new Date().toISOString()) + .gte('qualification_end', new Date().toISOString()) + + if (error) { + this.log.error({ error: error.message, code: error.code }, 'Error fetching distributions.') + throw error + } + + this.log.debug({ distributions }, `Found ${distributions.length} distributions.`) + + if (distributions.length === 0) { + this.log.info('No distributions found.') + return + } + + if (distributions.length > 1) { + this.log.error(`Found ${distributions.length} distributions. Only one is supported.`) + return + } + + const errors: unknown[] = [] + + for (const distribution of distributions) { + await this._calculateDistributionShares(distribution).catch((error) => errors.push(error)) + } + + if (distributions.length > 0) { + const lastDistribution = distributions[distributions.length - 1] + this.lastDistributionId = lastDistribution?.id ?? null + } else { + this.lastDistributionId = null + } + this.log.info( + { lastDistributionId: this.lastDistributionId }, + 'Finished calculating distributions.' + ) + + if (errors.length > 0) { + this.log.error(`Error calculating distribution shares. Encountered ${errors.length} errors.`) + throw errors[0] + } + } + + private async _calculateDistributionShares( + distribution: Tables<'distributions'> & { + distribution_verification_values: Tables<'distribution_verification_values'>[] + } + ): Promise { + const log = this.log.child({ distribution_id: distribution.id }) + + if (await isMerkleDropActive(distribution)) { + throw new Error('Tranche is active. Cannot calculate distribution shares.') + } + + log.info({ distribution_id: distribution.id }, 'Calculating distribution shares.') + + const { + data: verifications, + error: verificationsError, + count, + } = await fetchAllVerifications(distribution.id) + + if (verificationsError) { + throw verificationsError + } + + if (verifications === null || verifications.length === 0) { + log.warn('No verifications found. Skipping distribution.') + return + } + + if (count !== verifications.length) { + throw new Error('Verifications count does not match expected count') + } + + log.info(`Found ${verifications.length} verifications.`) + if (log.isLevelEnabled('debug')) { + await Bun.write( + 'dist/verifications.json', + JSON.stringify(verifications, jsonBigint, 2) + ).catch((e) => { + log.error(e, 'Error writing verifications.json') + }) + } + + const verificationValues = distribution.distribution_verification_values.reduce( + (acc, verification) => { + acc[verification.type] = { + fixedValue: BigInt(verification.fixed_value), + bipsValue: BigInt(verification.bips_value), + multiplier_min: verification.multiplier_min, + multiplier_max: verification.multiplier_max, + multiplier_step: verification.multiplier_step, + } + return acc + }, + {} as Record< + Database['public']['Enums']['verification_type'], + { + fixedValue?: bigint + bipsValue?: bigint + multiplier_min: number + multiplier_max: number + multiplier_step: number + } + > + ) + const verificationsByUserId = verifications.reduce( + (acc, verification) => { + acc[verification.user_id] = acc[verification.user_id] || [] + acc[verification.user_id]?.push(verification) + return acc + }, + {} as Record + ) + + log.info(`Found ${Object.keys(verificationsByUserId).length} users with verifications.`) + if (log.isLevelEnabled('debug')) { + await Bun.write( + 'dist/verificationsByUserId.json', + JSON.stringify(verificationsByUserId, jsonBigint, 2) + ).catch((e) => { + log.error(e, 'Error writing verificationsByUserId.json') + }) + } + + const { data: hodlerAddresses, error: hodlerAddressesError } = await fetchAllHodlers( + distribution.id + ) + + if (hodlerAddressesError) { + throw hodlerAddressesError + } + + if (hodlerAddresses === null || hodlerAddresses.length === 0) { + throw new Error('No hodler addresses found') + } + + const hodlerAddressesByUserId = hodlerAddresses.reduce( + (acc, address) => { + acc[address.user_id] = address + return acc + }, + {} as Record + ) + const hodlerUserIdByAddress = hodlerAddresses.reduce( + (acc, address) => { + acc[address.address] = address.user_id + return acc + }, + {} as Record + ) + + log.info(`Found ${hodlerAddresses.length} addresses.`) + if (log.isLevelEnabled('debug')) { + await Bun.write( + 'dist/hodlerAddresses.json', + JSON.stringify(hodlerAddresses, jsonBigint, 2) + ).catch((e) => { + log.error(e, 'Error writing hodlerAddresses.json') + }) + } + + // lookup balances of all hodler addresses in qualification period + const batches = inBatches(hodlerAddresses).flatMap(async (addresses) => { + return await Promise.all( + fetchAllBalances({ + addresses, + distribution, + }) + ) + }) + + // Filter out hodler with not enough send token balance + let minBalanceAddresses: { user_id: string; address: `0x${string}`; balance: string }[] = [] + for await (const batch of batches) { + minBalanceAddresses = minBalanceAddresses.concat(...batch) + } + + log.info(`Found ${minBalanceAddresses.length} balances.`) + + // Filter out hodler with not enough send token balance + minBalanceAddresses = minBalanceAddresses.filter( + ({ balance }) => BigInt(balance) >= BigInt(distribution.hodler_min_balance) + ) + + log.info( + `Found ${minBalanceAddresses.length} balances after filtering hodler_min_balance of ${distribution.hodler_min_balance}` + ) + + if (log.isLevelEnabled('debug')) { + await Bun.write( + 'dist/balances.json', + JSON.stringify(minBalanceAddresses, jsonBigint, 2) + ).catch((e) => { + log.error(e, 'Error writing balances.json') + }) + } + + // Calculate fixed pool share weights + const distAmt = BigInt(distribution.amount) + const fixedPoolAvailableAmount = distAmt + + const minBalanceByAddress: Record = minBalanceAddresses.reduce( + (acc, balance) => { + acc[balance.address] = BigInt(balance.balance) + return acc + }, + {} as Record + ) + + let fixedPoolAllocatedAmount = 0n + const fixedPoolAmountsByAddress: Record = {} + + for (const [userId, verifications] of Object.entries(verificationsByUserId)) { + const hodler = hodlerAddressesByUserId[userId] + if (!hodler || !hodler.address) continue + const { address } = hodler + if (!minBalanceByAddress[address]) continue + + let userFixedAmount = 0n + let totalReferrals = 0 + const multipliers: Record = {} + + for (const verification of verifications) { + const verificationValue = verificationValues[verification.type] + if (!verificationValue) continue + + // Calculate fixed amount + if (verificationValue.fixedValue) { + userFixedAmount += verificationValue.fixedValue + } + + // Initialize or update multiplier info + if (!multipliers[verification.type] && verificationValue.multiplier_min) { + multipliers[verification.type] = { + value: 1.0, + min: verificationValue.multiplier_min, + max: verificationValue.multiplier_max, + step: verificationValue.multiplier_step, + } + } + const multiplierInfo = multipliers[verification.type] + if (!multiplierInfo) continue + + // Calculate multipliers + switch (verification.type) { + case 'total_tag_referrals': { + // @ts-expect-error this is json + totalReferrals = verification.metadata?.value ?? 0 + // Minus 1 from the count so 1 = multiplier min + if (totalReferrals > 0n) { + multiplierInfo.value = Math.min( + multiplierInfo.min + (totalReferrals - 1) * multiplierInfo.step, + multiplierInfo.max + ) + } else { + multiplierInfo.value = 0 + } + + break + } + case 'tag_referral': { + multiplierInfo.value = Math.max(multiplierInfo.value, multiplierInfo.min) + // Count tag_referral verifications + const tagReferralCount = verifications.filter((v) => v.type === 'tag_referral').length + // Increase multiplier for each additional tag_referral. Minus 1 from the count so 1 = multiplier min + for (let i = 1; i < tagReferralCount; i++) { + multiplierInfo.value = Math.min( + multiplierInfo.min + (tagReferralCount - 1) * multiplierInfo.step, + multiplierInfo.max + ) + } + break + } + } + } + + // Calculate the final multiplier + const finalMultiplier = Object.values(multipliers).reduce( + (acc, info) => acc * info.value, + 1.0 + ) + + // Apply the multiplier to the fixed amount + userFixedAmount = + (userFixedAmount * BigInt(Math.round(finalMultiplier * Number(PERC_DENOM)))) / PERC_DENOM + + if ( + userFixedAmount > 0n && + fixedPoolAllocatedAmount + userFixedAmount <= fixedPoolAvailableAmount + ) { + fixedPoolAmountsByAddress[address] = + (fixedPoolAmountsByAddress[address] || 0n) + userFixedAmount + fixedPoolAllocatedAmount += userFixedAmount + + // Log or save the multipliers for each verification type + log.debug({ userId, address, multipliers, finalMultiplier }, 'User multipliers') + } + } + + // Calculate hodler pool share weights + const hodlerPoolAvailableAmount = distAmt - fixedPoolAllocatedAmount + + let hodlerShares: { address: string; amount: bigint }[] = [] + if (hodlerPoolAvailableAmount > 0n) { + const { weightedShares } = calculateWeights(minBalanceAddresses, hodlerPoolAvailableAmount) + hodlerShares = Object.values(weightedShares) + } + + let totalAmount = 0n + let totalHodlerPoolAmount = 0n + const totalBonusPoolAmount = 0n + let totalFixedPoolAmount = 0n + + if (log.isLevelEnabled('debug')) { + await Bun.write('dist/hodlerShares.json', JSON.stringify(hodlerShares, jsonBigint, 2)).catch( + (e) => { + log.error(e, 'Error writing hodlerShares.json') + } + ) + await Bun.write( + 'dist/fixedPoolAmountsByAddress.json', + JSON.stringify(fixedPoolAmountsByAddress, jsonBigint, 2) + ).catch((e) => { + log.error(e, 'Error writing fixedPoolAmountsByAddress.json') + }) + } + + const shares = hodlerShares + .map((share) => { + const userId = hodlerUserIdByAddress[share.address] + const hodlerPoolAmount = share.amount + const fixedPoolAmount = fixedPoolAmountsByAddress[share.address] || 0n + const amount = hodlerPoolAmount + fixedPoolAmount + totalAmount += amount + totalHodlerPoolAmount += hodlerPoolAmount + totalFixedPoolAmount += fixedPoolAmount + + if (!userId) { + log.debug({ share }, 'Hodler not found for address. Skipping share.') + return null + } + + // log.debug( + // { + // address: share.address, + // balance: balancesByAddress[share.address], + // amount: amount, + // bonusBips, + // hodlerPoolAmount, + // bonusPoolAmount, + // fixedPoolAmount, + // }, + // 'Calculated share.' + // ) + + // @ts-expect-error supabase-js does not support bigint + return { + address: share.address, + distribution_id: distribution.id, + user_id: userId, + amount: amount.toString(), + fixed_pool_amount: fixedPoolAmount.toString(), + hodler_pool_amount: hodlerPoolAmount.toString(), + bonus_pool_amount: '0', + } as Tables<'distribution_shares'> + }) + .filter(Boolean) as Tables<'distribution_shares'>[] + + log.info( + { + totalAmount, + totalHodlerPoolAmount, + hodlerPoolAvailableAmount, + totalBonusPoolAmount, + totalFixedPoolAmount, + fixedPoolAllocatedAmount, + fixedPoolAvailableAmount, + name: distribution.name, + shares: shares.length, + }, + 'Distribution totals' + ) + log.info(`Calculated ${shares.length} shares.`) + if (log.isLevelEnabled('debug')) { + await Bun.write('dist/shares.json', JSON.stringify(shares, jsonBigint, 2)).catch((e) => { + log.error(e, 'Error writing shares.json') + }) + } + + if (totalFixedPoolAmount > fixedPoolAvailableAmount) { + log.warn( + 'Fixed pool amount is greater than available amount. This is not a problem, but it means the fixed pool is exhausted.' + ) + } + + const totalShareAmounts = shares.reduce((acc, share) => acc + BigInt(share.amount), 0n) + if (totalShareAmounts > distAmt) { + throw new Error('Share amounts exceed total distribution amount') + } + + const { error } = await createDistributionShares(distribution.id, shares) + if (error) { + log.error({ error: error.message, code: error.code }, 'Error saving shares.') + throw error + } + } + + private async worker() { + this.log.info('Starting distributor...', { id: this.id }) + + while (this.running) { + try { + await this.calculateDistributions() + } catch (error) { + this.log.error(error, `Error processing block. ${(error as Error).message}`) + } + await sleep(60_000) + } + + this.log.info('Distributor stopped.') + } + + public async stop() { + this.log.info('Stopping distributor...') + this.running = false + return await this.workerPromise + } + + public async calculateDistribution(id: string) { + const { data: distribution, error } = await fetchDistribution(id) + if (error) { + this.log.error({ error: error.message, code: error.code }, 'Error fetching distribution.') + throw error + } + try { + return this._calculateDistributionShares(distribution) + } catch (error) { + this.log.error(error, 'Error calculating distribution.') + throw error + } + } + + public toJSON() { + return { + id: this.id, + running: this.running, + lastDistributionId: this.lastDistributionId, + } + } +} diff --git a/apps/distributor/src/weights.ts b/apps/distributor/src/weights.ts index 0b3b18ef1..9ef3b507e 100644 --- a/apps/distributor/src/weights.ts +++ b/apps/distributor/src/weights.ts @@ -66,6 +66,22 @@ export function calculateWeights( } } + //@todo: this is a hack to ensure the total distributed amount is equal to the amount + // We really should handle these rounding errors instead + let totalDistributed = 0n + for (const share of Object.values(weightedShares)) { + totalDistributed += share.amount + } + + if (totalDistributed !== amount) { + const difference = amount - totalDistributed + // Add or subtract the difference from the largest share + const largestShare = Object.values(weightedShares).reduce((a, b) => + a.amount > b.amount ? a : b + ) + largestShare.amount += difference + } + return { totalWeight, weightPerSend, poolWeights, weightedShares } } diff --git a/apps/next/pages/account/rewards/activity.tsx b/apps/next/pages/account/rewards/activity.tsx new file mode 100644 index 000000000..c5b643d84 --- /dev/null +++ b/apps/next/pages/account/rewards/activity.tsx @@ -0,0 +1,32 @@ +import { ActivityRewardsScreen } from 'app/features/account/rewards/activity/screen' +import Head from 'next/head' +import { userProtectedGetSSP } from 'utils/userProtected' +import type { NextPageWithLayout } from 'next-app/pages/_app' +import { HomeLayout } from 'app/features/home/layout.web' +import { ButtonOption, TopNav } from 'app/components/TopNav' +import { MobileButtonRowLayout } from 'app/components/MobileButtonRowLayout' + +export const Page: NextPageWithLayout = () => { + return ( + <> + + Send | Activity Rewards + + + + ) +} + +export const getServerSideProps = userProtectedGetSSP() + +Page.getLayout = (children) => ( + + } + > + {children} + + +) + +export default Page diff --git a/apps/next/pages/account/rewards.tsx b/apps/next/pages/account/rewards/index.tsx similarity index 62% rename from apps/next/pages/account/rewards.tsx rename to apps/next/pages/account/rewards/index.tsx index 24db3ef10..a20eed54d 100644 --- a/apps/next/pages/account/rewards.tsx +++ b/apps/next/pages/account/rewards/index.tsx @@ -17,22 +17,8 @@ export const Page: NextPageWithLayout = () => { } export const getServerSideProps = userProtectedGetSSP() - -const subheader = - 'Register at least 1 Sendtag, maintain the minimum balance, avoid selling, and refer others for a bonus multiplier. ' - Page.getLayout = (children) => ( - - } - > + }> {children} ) diff --git a/packages/app/components/MobileButtonRowLayout.tsx b/packages/app/components/MobileButtonRowLayout.tsx index a80ffea43..e895f1a6c 100644 --- a/packages/app/components/MobileButtonRowLayout.tsx +++ b/packages/app/components/MobileButtonRowLayout.tsx @@ -6,6 +6,8 @@ import { AnimatePresence, LinearGradient, usePwa, + Paragraph, + H3, } from '@my/ui' import { useSendAccount } from 'app/utils/send-accounts' import { useSendAccountBalances } from 'app/utils/useSendAccountBalances' @@ -17,7 +19,10 @@ import { useScrollDirection } from '../provider/scroll' import { ProfileButtons } from 'app/features/profile/ProfileButtons' import { useUser } from 'app/utils/useUser' import { useProfileLookup } from 'app/utils/useProfileLookup' -import { useProfileScreenParams } from 'app/routers/params' +import { useProfileScreenParams, useRewardsScreenParams } from 'app/routers/params' +import { useMonthlyDistributions } from 'app/utils/distributions' +import { DistributionClaimButton } from 'app/features/account/rewards/components/DistributionClaimButton' +import formatAmount from 'app/utils/formatAmount' const Row = styled(XStack, { w: '100%', @@ -75,6 +80,7 @@ export const Home = ({ children, ...props }: XStackProps) => { fullscreen colors={['transparent', '$background']} $gtLg={{ display: 'none' }} + pointerEvents="none" /> @@ -128,6 +134,7 @@ export const Profile = ( animateOnly={['scale', 'transform', 'opacity']} enterStyle={{ opacity: 0, scale: 0.9 }} exitStyle={{ opacity: 0, scale: 0.95 }} + pointerEvents="none" > { + const isPwa = usePwa() + const [queryParams] = useRewardsScreenParams() + const { data: distributions, isLoading } = useMonthlyDistributions() + const distribution = + distributions?.find((d) => d.number === queryParams.distribution) ?? distributions?.[0] + const shareAmount = distribution?.distribution_shares?.[0]?.amount + const { direction } = useScrollDirection() + + const isVisible = distribution !== undefined && shareAmount !== undefined && shareAmount > 0 + const distributionMonth = distribution?.qualification_end.toLocaleString('default', { + month: 'long', + }) + + const now = new Date() + const isQualificationOver = + distribution?.qualification_end !== undefined && distribution.qualification_end < now + + return ( + <> + {children} + + {!isLoading && isVisible && direction !== 'down' && ( + + + +

+ {isQualificationOver + ? `Total ${distributionMonth} Rewards` + : `Estimated ${distributionMonth} Rewards`} +

+ + {shareAmount === undefined ? '' : `${formatAmount(shareAmount, 10, 0)} SEND`} + + + + +
+
+ )} +
+ + ) +} + export const MobileButtonRowLayout = { Home: Home, Profile: Profile, + ActivityRewards: ActivityRewards, } diff --git a/packages/app/components/TopNav.tsx b/packages/app/components/TopNav.tsx index 442684691..98f0fbcef 100644 --- a/packages/app/components/TopNav.tsx +++ b/packages/app/components/TopNav.tsx @@ -224,7 +224,6 @@ export function TopNav({ fontWeight={'300'} col="$color10" lineHeight={32} - $gtLg={{ ml: isSubRoute ? '$4' : '$0' }} display={selectedCoin ? 'none' : 'flex'} als={'center'} > @@ -239,7 +238,7 @@ export function TopNav({ fontWeight={'300'} col="$color10" lineHeight={32} - $gtLg={{ ml: isSubRoute ? '$4' : '$0' }} + //ml= {isSubRoute ? '$4' : '$0' } display={isSubRoute ? 'flex' : 'none'} als={'center'} > @@ -268,7 +267,7 @@ export function TopNav({ lineHeight={24} py="$3" $gtSm={{ py: '$6' }} - $gtLg={{ pl: '$1', pb: '$6', pt: '$0', ...{ ml: isSubRoute ? '$4' : '$1' } }} + $gtLg={{ pl: '$1', pb: '$6', pt: '$0' }} col="$color10" > {subheader} diff --git a/packages/app/features/account/rewards/__snapshots__/screen.test.tsx.snap b/packages/app/features/account/rewards/__snapshots__/screen.test.tsx.snap deleted file mode 100644 index 5bb7dff91..000000000 --- a/packages/app/features/account/rewards/__snapshots__/screen.test.tsx.snap +++ /dev/null @@ -1,692 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`EarnTokensScreen renders: EarnTokensScreen 1`] = ` - - - - - - ROUND - - - - - # - 1 - - - - - - - Expired Jul 12, 2024 - - - - - Status - - - OPEN - - - - - - - - - - - - Snapshot Send Balance - - - 0 SEND - - - - - - - - - Min Balance required - - - ? - - - - - - - Referrals - - - 123 - - - - - - - - - - - Rewards - - - - 1 SEND - - - 1.00 USD - - - - - - - - - - - - - - - - # 1 - - - - - - - -`; diff --git a/packages/app/features/account/rewards/activity/__snapshots__/screen.test.tsx.snap b/packages/app/features/account/rewards/activity/__snapshots__/screen.test.tsx.snap new file mode 100644 index 000000000..a6270a257 --- /dev/null +++ b/packages/app/features/account/rewards/activity/__snapshots__/screen.test.tsx.snap @@ -0,0 +1,1355 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`ActivityRewardsScreen renders: ActivityRewardsScreen 1`] = ` +[ + + + + + + + + + Unlock +
+ Extra Rewards +
+ + Register at least 1 Sendtag, maintain the minimum balance, +
+ avoid selling, and refer others for a bonus multiplier. +
+
+
+ + + 1,721,001,600,000 Rewards + + + + 1,721,001,600,000 1,721,001,600,000 + + + + + + + + + + + + + + + Your SEND Balance + + + 0 SEND + + + + + + Min. Balance + + + + + + + + + + + + Sendtag Registered + + + + + + + + + + + + + + + Perks + + + + + + Multiplier + + + + + + Estimated 1,721,001,600,000 Rewards + + + + + 1 SEND + + + + + + , + + + + + + + Select Month + + + + + + + + + + + + + + + + + 1,721,001,600,000 1,721,001,600,000 + + + + + + + + + + + + + + + , +] +`; diff --git a/packages/app/features/account/rewards/screen.test.tsx b/packages/app/features/account/rewards/activity/screen.test.tsx similarity index 74% rename from packages/app/features/account/rewards/screen.test.tsx rename to packages/app/features/account/rewards/activity/screen.test.tsx index b02ecf7c6..8ada92fcf 100644 --- a/packages/app/features/account/rewards/screen.test.tsx +++ b/packages/app/features/account/rewards/activity/screen.test.tsx @@ -1,13 +1,12 @@ import { Wrapper } from 'app/utils/__mocks__/Wrapper' -import { RewardsScreen } from './screen' +import { ActivityRewardsScreen } from './screen' import { act, render, screen } from '@testing-library/react-native' -jest.mock('app/utils/useUser') jest.mock('app/utils/distributions', () => ({ - useDistributions: () => ({ + useMonthlyDistributions: () => ({ data: [ { - number: 1, + number: 7, chain_id: 845337, qualification_end: Date.UTC(2024, 6, 15), distribution_shares: [ @@ -41,12 +40,13 @@ jest.mock('app/utils/distributions', () => ({ }), })) -jest.mock('app/utils/useChainAddresses', () => ({ - useChainAddresses: jest.fn().mockReturnValue({ data: { address: '0x123' } }), -})) jest.mock('app/routers/params', () => ({ useRewardsScreenParams: () => [{ distributionNumber: 1 }, jest.fn()], })) + +jest.mock('app/utils/useChainAddresses', () => ({ + useChainAddresses: jest.fn().mockReturnValue({ data: { address: '0x123' } }), +})) jest.mock('wagmi') jest.mock('@web3modal/wagmi/react', () => ({ useWeb3Modal: jest.fn().mockReturnValue({ open: jest.fn() }), @@ -70,27 +70,43 @@ jest.mock('@my/wagmi', () => ({ error: null, }), })) -jest.mock('app/utils/coin-gecko', () => ({ - useSendPrice: jest - .fn() - .mockReturnValue({ data: { 'send-token': { usd: 1 } }, isSuccess: true, error: null }), -})) + jest.mock('app/utils/tags', () => ({ useConfirmedTags: jest.fn().mockReturnValue({ data: [{ name: 'tag1' }, { name: 'tag2' }] }), })) -describe('EarnTokensScreen', () => { + +jest.mock('app/utils/send-accounts', () => ({ + useSendAccount: jest.fn().mockReturnValue({ + data: { + avatar_url: 'https://avatars.githubusercontent.com/u/123', + name: 'test', + about: 'test', + refcode: 'test', + tag: 'test', + address: '0x123', + phone: 'test', + chain_id: 1, + is_public: true, + sendid: 1, + all_tags: ['test'], + }, + }), +})) + +describe('ActivityRewardsScreen', () => { it('renders', async () => { jest.useFakeTimers() jest.setSystemTime(Date.UTC(2024, 6, 12)) render( - + ) await act(async () => { jest.advanceTimersByTime(2000) }) - expect(screen.toJSON()).toMatchSnapshot('EarnTokensScreen') + expect(screen.getByTestId('SelectDistributionDate')).toBeVisible() + expect(screen.toJSON()).toMatchSnapshot('ActivityRewardsScreen') }) }) diff --git a/packages/app/features/account/rewards/activity/screen.tsx b/packages/app/features/account/rewards/activity/screen.tsx new file mode 100644 index 000000000..50aaea9d2 --- /dev/null +++ b/packages/app/features/account/rewards/activity/screen.tsx @@ -0,0 +1,573 @@ +import { + YStack, + H1, + Paragraph, + XStack, + Button, + Image, + LinearGradient, + Stack, + Spinner, + Select, + H3, + Adapt, + Sheet, + type SelectItemProps, + Card, + Label, + Theme, + type CardProps, +} from '@my/ui' +import { type sendTokenAddress, useReadSendTokenBalanceOf } from '@my/wagmi' +import { CheckCircle2, ChevronDown, ChevronUp, Dot } from '@tamagui/lucide-icons' +import { IconAccount, IconInfoCircle, IconX } from 'app/components/icons' +import { useRewardsScreenParams } from 'app/routers/params' +import { useMonthlyDistributions, type UseDistributionsResultData } from 'app/utils/distributions' +import formatAmount from 'app/utils/formatAmount' +import { zeroAddress } from 'viem' +import { type PropsWithChildren, useRef, useId, useState } from 'react' +import { DistributionClaimButton } from '../components/DistributionClaimButton' + +//@todo get this from the db +const verificationTypesAndTitles = [ + ['create_passkey', 'Create a Passkey'], + ['tag_registration', 'Register a Sendtag', '(per tag)'], + ['send_ten', '10+ Sends'], + ['send_one_hundred', '100+ Sends'], + ['tag_referral', 'Referrals'], + ['total_tag_referrals', 'Total Referrals'], +] as const + +export function ActivityRewardsScreen() { + const [queryParams, setRewardsScreenParams] = useRewardsScreenParams() + const { data: distributions, isLoading } = useMonthlyDistributions() + const [isOpen, setIsOpen] = useState(false) + const id = useId() + + const selectTriggerRef = useRef(null) + + const initialDistributionIndex = distributions?.findIndex( + (d) => d.number === queryParams.distribution + ) + + const [selectedDistributionIndex, setSelectedDistributionIndex] = useState( + !initialDistributionIndex || initialDistributionIndex === -1 ? 0 : initialDistributionIndex + ) + + if (isLoading) + return ( + +
+ + + + + ) + if (!distributions || !distributions[selectedDistributionIndex]) + return ( + +
+ + + No rewards available + + + + ) + + const distributionDates = distributions.map( + (d) => + `${d.qualification_end.toLocaleString('default', { + month: 'long', + })} ${d.qualification_end.toLocaleString('default', { year: 'numeric' })}` + ) + + const onValueChange = (value: string) => { + setSelectedDistributionIndex(Number(value)) + setRewardsScreenParams( + { distribution: distributions[Number(value)]?.number }, + { webBehavior: 'replace' } + ) + } + + return ( + +
+ +

+ {`${distributionDates[selectedDistributionIndex]?.split(' ')[0]} Rewards`} +

+ +
+ + + + + + + + ) +} + +const Header = () => ( + + + + + +

+ Unlock
+ Extra Rewards +

+ + Register at least 1 Sendtag, maintain the minimum balance, +
avoid selling, and refer others for a bonus multiplier. +
+
+
+) + +const DistributionRequirementsCard = ({ + distribution, +}: { distribution: UseDistributionsResultData[number] }) => { + const { + data: snapshotBalance, + isLoading: isLoadingSnapshotBalance, + error: snapshotBalanceError, + } = useReadSendTokenBalanceOf({ + chainId: distribution.chain_id as keyof typeof sendTokenAddress, + args: [distribution.distribution_shares.at(0)?.address ?? zeroAddress], + blockNumber: distribution.snapshot_block_num + ? BigInt(distribution.snapshot_block_num) + : undefined, + query: { + enabled: !!distribution.distribution_shares.at(0)?.address, + }, + }) + + if (snapshotBalanceError) throw snapshotBalanceError + + const sendTagRegistrations = + distribution.distribution_verifications_summary.at(0)?.tag_registrations + + return ( + + + + + {isLoadingSnapshotBalance ? ( + + ) : ( + + + {`${formatAmount(snapshotBalance?.toString() ?? 0, 9, 0)} SEND`} + + + )} + + + + + Min. Balance {formatAmount(distribution.hodler_min_balance, 9, 0)} + + {(() => { + switch (true) { + case isLoadingSnapshotBalance: + return + case distribution.hodler_min_balance === undefined || + distribution.hodler_min_balance > (snapshotBalance ?? 0): + return ( + + + + ) + default: + return ( + + ) + } + })()} + + + + Sendtag Registered + {sendTagRegistrations && sendTagRegistrations > 0 ? ( + + ) : ( + + + + )} + + + + + ) +} + +const SendPerksCards = ({ distribution }: { distribution: UseDistributionsResultData[number] }) => { + const verificationValues = + distribution.distribution_verifications_summary.at(0)?.verification_values + + const now = new Date() + const isQualificationOver = distribution.qualification_end < now + + return ( + +

+ Perks +

+ + {verificationTypesAndTitles + .filter( + ([verificationType]) => + (verificationValues?.[verificationType].fixed_value > 0 && !isQualificationOver) || + (isQualificationOver && + verificationValues?.[verificationType].count !== 0 && + verificationValues?.[verificationType].fixed_value > 0) + ) + .map(([verificationType, title, details]) => ( + + +

+ {title} +

+ + + {verificationValues?.[verificationType]?.fixed_value.toLocaleString() ?? 0} SEND{' '} + {details ?? ''} + +
+
+ ))} +
+
+ ) +} + +const PerkCard = ({ + isCompleted, + children, +}: PropsWithChildren & { isCompleted: boolean }) => { + return ( + + + {isCompleted ? ( + <> + + Completed + + ) : ( + <> + + + + Pending + + )} + + {children} + + ) +} + +const MultiplierCards = ({ + distribution, + distributionDate, +}: { + distribution: UseDistributionsResultData[number] + distributionDate?: string +}) => { + const multipliers = distribution.distribution_verifications_summary[0]?.multipliers + + return ( + +

+ Multiplier +

+ + {verificationTypesAndTitles + .filter(([verificationType]) => multipliers?.[verificationType].multiplier_step > 0) + .map(([verificationType, title]) => ( + + + +

+ {verificationType === 'tag_referral' + ? distributionDate?.split(' ')[0] ?? 'Monthly' + : ''}{' '} + {title} +

+
+ + X {multipliers?.[verificationType].value ?? 1} + +
+ ))} +
+
+ ) +} + +const MultiplierCard = ({ children }: PropsWithChildren) => { + return ( + + + {children} + + + ) +} + +const ClaimableRewardsCard = ({ + distribution, +}: { distribution: UseDistributionsResultData[number] }) => { + const shareAmount = distribution.distribution_shares?.[0]?.amount + if (shareAmount === undefined || shareAmount === 0) return null + const now = new Date() + const isQualificationOver = distribution.qualification_end < now + + const distributionMonth = distribution.qualification_end.toLocaleString('default', { + month: 'long', + }) + + return ( + +

+ {isQualificationOver + ? `Total ${distributionMonth} Rewards` + : `Estimated ${distributionMonth} Rewards`} +

+ + + + {shareAmount === undefined ? 'N/A' : `${formatAmount(shareAmount, 10, 0)} SEND`} + + + + +
+ ) +} + +const DistributionItem = ({ + isActive, + value, + index, + children, + ...props +}: { + isActive: boolean +} & SelectItemProps) => { + return ( + + + + {children} + + {isActive && ( + + + + )} + + + ) +} diff --git a/packages/app/features/account/rewards/components/DistributionClaimButton.tsx b/packages/app/features/account/rewards/components/DistributionClaimButton.tsx index fec720f29..a6da184bf 100644 --- a/packages/app/features/account/rewards/components/DistributionClaimButton.tsx +++ b/packages/app/features/account/rewards/components/DistributionClaimButton.tsx @@ -1,14 +1,6 @@ -import { - Anchor, - Button as ButtonOg, - ButtonText, - Paragraph, - ScrollView, - Spinner, - type ButtonProps, - Stack, -} from '@my/ui' +import { Button as ButtonOg, Paragraph, ScrollView, Spinner, type ButtonProps, Stack } from '@my/ui' import type { sendMerkleDropAddress } from '@my/wagmi' +import { IconDollar } from 'app/components/icons' import { assert } from 'app/utils/assert' import { type UseDistributionsResultData, @@ -16,17 +8,8 @@ import { useSendMerkleDropIsClaimed, useSendMerkleDropTrancheActive, } from 'app/utils/distributions' -import { shorten } from 'app/utils/strings' -import { - useAccount, - useConnect, - useWriteContract, - useSwitchChain, - useWaitForTransactionReceipt, -} from 'wagmi' -import { OpenConnectModalWrapper } from 'app/utils/OpenConnectModalWrapper' -import { useWeb3Modal } from '@web3modal/wagmi/react' +import { useWriteContract, useWaitForTransactionReceipt } from 'wagmi' interface DistributionsClaimButtonProps { distribution: UseDistributionsResultData[number] @@ -58,10 +41,7 @@ export const DistributionClaimButton = ({ distribution }: DistributionsClaimButt tranche: trancheId, index: share?.index !== undefined ? BigInt(share.index) : undefined, }) - const { isConnected, address: account, chain: accountChain } = useAccount() - const { open: openConnectModal } = useWeb3Modal() - const { error: connectError } = useConnect() - const { chains, switchChain, error: switchError } = useSwitchChain() + const { data: claimWriteConfig, error: claimWriteConfigError, @@ -94,21 +74,9 @@ export const DistributionClaimButton = ({ distribution }: DistributionsClaimButt // If the user is eligible but has already claimed, show the claim button disabled if (isClaimed) { return ( - <> - - Already claimed - {claimReceiptSuccess && ( - - - {shorten(claimWriteHash)} - - - )} - - + + Claimed + ) } @@ -124,7 +92,10 @@ export const DistributionClaimButton = ({ distribution }: DistributionsClaimButt return ( <> ) - if (!isConnected) { - return ( - <> - - - - - {connectError ? ( - connectError.message?.includes('Connector not found') ? ( - - ) : ( - - ) - ) : null} - - ) - } - - if (distribution.chain_id !== accountChain?.id) { - const distributionChain = chains.find((c) => c.id === distribution.chain_id) - assert(!!distributionChain, `No chain found for ${distribution.chain_id}`) - return ( - <> - - - - ) - } - - // If the user is eligible but has already claimed, show the claim button disabled - if (isClaimed) { - return ( - - - Already claimed - {claimReceiptSuccess && ( - - - {shorten(claimWriteHash)} - - - )} - - - ) - } - - if (account !== share?.address) { - return ( - <> - - Please switch to the address you verified previously to claim,{' '} - - {shorten(share?.address)} - - . - - - Connected address:{' '} - - {shorten(account)} - - - - ) - } - if (claimWriteConfigError) { return ( <> @@ -289,17 +158,6 @@ export const DistributionClaimButton = ({ distribution }: DistributionsClaimButt error={`Error claiming. Please try again later. ${writeClaimError.message}`} /> )} - {claimReceiptSuccess && ( - - Claimed!{' '} - - {shorten(claimWriteHash)} - - - )} ) } diff --git a/packages/app/features/account/rewards/screen.tsx b/packages/app/features/account/rewards/screen.tsx index c65e33d4d..ce67db6a4 100644 --- a/packages/app/features/account/rewards/screen.tsx +++ b/packages/app/features/account/rewards/screen.tsx @@ -1,594 +1,159 @@ +import { YStack, H1, Paragraph, XStack, LinkableButton, Button, Image, Stack } from '@my/ui' +import type { sendMerkleDropAddress } from '@my/wagmi' +import { IconArrowRight, IconSend } from 'app/components/icons' import { - Button, - ButtonText, - Card, - H1, - H2, - H3, - Label, - Link, - Paragraph, - ScrollView, - Spinner, - Stack, - Text, - Theme, - View, - XStack, - YStack, - useThemeName, -} from '@my/ui' -import { - type UseDistributionsResultData, - useDistributions, + useMonthlyDistributions, + useSendMerkleDropIsClaimed, useSendMerkleDropTrancheActive, } from 'app/utils/distributions' -import { useRewardsScreenParams } from 'app/routers/params' -import { type TimeRemaining, useTimeRemaining } from 'app/utils/useTimeRemaining' -import { useChainAddresses } from 'app/utils/useChainAddresses' -import { DistributionClaimButton } from './components/DistributionClaimButton' -import { type sendMerkleDropAddress, sendTokenAddress, useReadSendTokenBalanceOf } from '@my/wagmi' -import { assert } from 'app/utils/assert' -import formatAmount from 'app/utils/formatAmount' -import { useSendPrice } from 'app/utils/coin-gecko' -import { useConfirmedTags } from 'app/utils/tags' -import { IconPlus } from 'app/components/icons' export function RewardsScreen() { - const { data: distributions, isLoading } = useDistributions() - const sortedDistributions = distributions?.sort((a, b) => a.number - b.number) - - const [queryParams] = useRewardsScreenParams() - const selectedDistributionIndex = queryParams.distribution - ? queryParams.distribution - 1 - : sortedDistributions - ? sortedDistributions.length - 1 - : 0 - - const selectedDistribution = sortedDistributions?.at(selectedDistributionIndex) - - if (isLoading) - return ( - - - - ) - - return ( - - {selectedDistribution ? ( - <> - - - - - - ) : ( - -

No distributions available

-
- )} -
- ) -} - -const DistributionRewardsSection = ({ - distribution, -}: { distribution: UseDistributionsResultData[number] }) => { - const trancheId = BigInt(distribution.number - 1) // tranches are 0-indexed - const chainId = distribution.chain_id as keyof typeof sendMerkleDropAddress - const { - data: isTrancheActive, - isLoading: isTrancheActiveLoading, - error: isTrancheActiveError, - } = useSendMerkleDropTrancheActive({ + const { data: distributions, isLoading: isLoadingDistributions } = useMonthlyDistributions() + const currentDistribution = distributions?.[0] + const trancheId = BigInt((currentDistribution?.number ?? 0) - 1) // tranches are 0-indexed + const chainId = currentDistribution?.chain_id as keyof typeof sendMerkleDropAddress + const share = currentDistribution?.distribution_shares?.[0] + + // find out if the tranche is active using SendMerkleDrop.trancheActive(uint256 _tranche) + const { data: isTrancheActive, isLoading: isTrancheActiveLoading } = + useSendMerkleDropTrancheActive({ + tranche: trancheId, + chainId: chainId, + }) + // find out if user is eligible onchain using SendMerkleDrop.isClaimed(uint256 _tranche, uint256 _index) + const { data: isClaimed, isLoading: isClaimedLoading } = useSendMerkleDropIsClaimed({ + chainId, tranche: trancheId, - chainId: chainId, + index: share?.index !== undefined ? BigInt(share.index) : undefined, }) - const shareAmount = distribution.distribution_shares?.[0]?.amount - - const now = new Date() - const isBeforeQualification = now < distribution.qualification_start - const isDuringQualification = - now >= distribution.qualification_start && now <= distribution.qualification_end - const isAfterQualification = now > distribution.qualification_end - const isClaimable = now > distribution.qualification_end && now <= distribution.claim_end - - const confirmedTags = useConfirmedTags() - - const timeRemaining = useTimeRemaining( - isDuringQualification - ? distribution.qualification_end - : isClaimable - ? distribution.claim_end - : now - ) return ( - - - - - - -

- #{distribution.number} -

-
-
- - + + +

- - - {(() => { - switch (true) { - case isBeforeQualification: - return ( - - Round has not started - - ) - case shareAmount === undefined || - shareAmount === 0 || - confirmedTags?.length === 0: - return ( - - Not eligible - - ) - case isDuringQualification: - return ( - <> - - - - - - ) - case isTrancheActiveLoading: - return ( - - Checking claimability... - - ) - case !!isTrancheActiveError: - return ( - - Error checking claimability. Please try again later - - ) - case isAfterQualification && !isTrancheActive: - return ( - - Rewards will be available soon - - ) - - case isClaimable: - return ( - - Claim Rewards - - ) - default: - return ( - - {`Expired ${now.toLocaleDateString(undefined, { - year: 'numeric', - month: 'short', - day: 'numeric', - })}`} - - ) - } - })()} - - - - - - - - - - - - {confirmedTags?.length === -1 ? ( - - - Register a Sendtag to unlock rewards + Invest Time, EARN Send +

+ + Participate in the Send Ecosystem and earn Send Tokens. Your Network! Your Rewards! - - - - - - SENDTAGS - - - - - - ) : ( - - - - - - - - - - - - - - - )} +
+ + + {/* @TODO: href, reward */} +
{ + switch (true) { + case !share || !share.amount: + return undefined + case !isTrancheActive: + return 'Upcoming Reward' + case isClaimed: + return 'Claimed' + default: + return 'Claimable' + } + })()} + /> + + ) } -const DistributionRewardTimer = ({ timeRemaining }: { timeRemaining: TimeRemaining }) => { - return ( - - - {String(timeRemaining.days).padStart(2, '0')}D - - : - - {String(timeRemaining.hours).padStart(2, '0')}Hr - - : - - {String(timeRemaining.minutes).padStart(2, '0')}Min - - : - - {String(timeRemaining.seconds).padStart(2, '0')}Sec - - - ) -} - -const DistributionRewardTimerDigit = ({ children }: { children?: string | string[] }) => ( - - {children} - -) - -const SendBalanceCard = ({ - distribution, -}: { distribution: UseDistributionsResultData[number] }) => { - const { - data: addresses, - isLoading: isLoadingChainAddresses, - error: chainAddressesError, - } = useChainAddresses() - - if (chainAddressesError) throw chainAddressesError - - const address = addresses?.[0]?.address - - const chainId = distribution.chain_id as keyof typeof sendTokenAddress - assert(chainId in sendTokenAddress, 'Chain ID not found in sendTokenAddress') - - const { - data: snapshotBalance, - isLoading: isLoadingSnapshotBalance, - error: snapshotBalanceError, - } = useReadSendTokenBalanceOf({ - chainId, - args: address ? [address] : undefined, - blockNumber: distribution.snapshot_block_num - ? BigInt(distribution.snapshot_block_num) - : undefined, - query: { - enabled: !!address, - }, - }) - - if (snapshotBalanceError) throw snapshotBalanceError - +const Section = ({ + title, + href, + reward, + isLoading = false, + claimStatus, +}: { + //@todo: using props like this is weird, better to pass children so we don't have to pass as much state + title: string + href: string + reward: string + isLoading?: boolean + claimStatus?: 'Claimable' | 'Claimed' | 'Upcoming Reward' +}) => { return ( - - - - {isLoadingSnapshotBalance || isLoadingChainAddresses ? ( - - ) : ( - - - {(() => { - switch (true) { - case snapshotBalance === undefined: - return 'Error fetching SEND balance' - default: - return `${formatAmount(snapshotBalance.toString(), 9, 0)} SEND` - } - })()} + + + {title} + + + + + + {isLoading ? '' : claimStatus} - - )} - - - ) -} -const MinBalanceCard = ({ hodler_min_balance }: { hodler_min_balance: number }) => ( - - - - - {hodler_min_balance ? `${formatAmount(hodler_min_balance, 9, 0)} SEND` : '?'} - - - -) - -const ReferralsCard = ({ referrals }: { referrals: number | null }) => ( - - - - - - {referrals !== null ? referrals : '---'} - - - -) - -const SendRewardsCard = ({ - distribution, -}: { distribution: UseDistributionsResultData[number] }) => { - const shareAmount = distribution.distribution_shares?.[0]?.amount - const { data: sendPrice } = useSendPrice() - const pricePerSend = sendPrice?.['send-token'].usd - const rewardValue = pricePerSend && shareAmount ? shareAmount * pricePerSend : undefined - - return ( - - - - - - - - {shareAmount === undefined ? 'N/A' : `${formatAmount(shareAmount, 10, 0)} SEND`} - - - {rewardValue && ( - - {`${rewardValue.toFixed(2)} USD`} - - )} - - - - - - ) -} - -const DistributionStatus = ({ - distribution, -}: { distribution: UseDistributionsResultData[number] }) => { - const isClaimActive = distribution.qualification_end > new Date() - return ( -

- {isClaimActive ? 'OPEN' : 'CLOSED'} -

- ) -} - -const DistributionRewardsList = ({ - distributions, -}: { distributions?: (UseDistributionsResultData[number] | undefined)[] }) => { - const { isLoading, error } = useDistributions() - const [queryParams, setParams] = useRewardsScreenParams() - - const isDark = useThemeName().includes('dark') - - if (error) throw error - - if (isLoading) return - - if (!distributions) return - - return ( - - - - {distributions?.map((distribution, i) => { - return distribution?.id === undefined ? ( - + + {isLoading ? ( + + ) : ( - {`# ${i + 1}`} + {reward === '' ? '' : `${reward} SEND`} - - ) : queryParams.distribution === distribution?.number || - (queryParams.distribution === undefined && - distribution?.number === distributions?.length) ? ( - - - - - ) : ( - - ) - })} + )} + + + + + + + - - + + ) } - -const DistributionRewardsSkeleton = () => { - return null -} diff --git a/packages/app/utils/distributions.ts b/packages/app/utils/distributions.ts index 689adc31a..cc31a8cac 100644 --- a/packages/app/utils/distributions.ts +++ b/packages/app/utils/distributions.ts @@ -14,6 +14,7 @@ import { type UseQueryResult, useQuery } from '@tanstack/react-query' import { useSupabase } from 'app/utils/supabase/useSupabase' import { useBalance, useSimulateContract } from 'wagmi' import { api } from './api' +import { useSendAccount } from './send-accounts' export const DISTRIBUTION_INITIAL_POOL_AMOUNT = BigInt(20e9) @@ -38,6 +39,39 @@ export const useDistributions = (): UseQueryResult ({ + ...distribution, + qualification_end: new Date(distribution.qualification_end), + qualification_start: new Date(distribution.qualification_start), + claim_end: new Date(distribution.claim_end), + })) + }, + }) +} + +//@todo: make a Zod type for the JSON in distribution_verifications_summary +/* +After distribution 6 we switched to monthly distributions +This function cuts out the first 6 distributions +*/ +export const useMonthlyDistributions = () => { + const supabase = useSupabase() + const { data: sendAccount } = useSendAccount() + return useQuery({ + queryKey: ['monthly_distributions', sendAccount?.created_at], + queryFn: async () => { + const { data, error } = await supabase + .from('distributions') + .select('*, distribution_shares(*), distribution_verifications_summary(*)') + .gt('number', 6) + .gt('qualification_end', sendAccount?.created_at) + .order('number', { ascending: false }) if (error) { throw error diff --git a/packages/contracts/package.json b/packages/contracts/package.json index 9a2bbe90e..c4bb998a0 100644 --- a/packages/contracts/package.json +++ b/packages/contracts/package.json @@ -11,7 +11,7 @@ ], "scripts": { "build": "forge build", - "gen-dist-merkle-tree": "bun run ./script/gen-dist-merkle-tree.ts", + "gen-dist-merkle-tree": "bun run --env-file=../../.env ./script/gen-dist-merkle-tree.ts", "forge": "forge", "clean": "forge clean", "test": "forge test", diff --git a/packages/playwright/tests/account-rewards.onboarded.spec.ts b/packages/playwright/tests/account-rewards.onboarded.spec.ts index 33ff3af7c..250b65626 100644 --- a/packages/playwright/tests/account-rewards.onboarded.spec.ts +++ b/packages/playwright/tests/account-rewards.onboarded.spec.ts @@ -12,5 +12,6 @@ test.beforeEach(async ({ page }) => { test('can visit rewards page', async ({ page }) => { await page.goto('/account/rewards') await expect(page).toHaveURL('/account/rewards') - await expect(page.getByRole('heading', { name: 'Send Rewards', exact: true })).toBeVisible() + await expect(page.getByText('Rewards', { exact: true })).toBeVisible() + await expect(page.getByRole('heading', { name: 'Claim Your Network Benefits' })).toBeVisible() }) diff --git a/packages/snaplet/.gitignore b/packages/snaplet/.gitignore new file mode 100644 index 000000000..6f685ede8 --- /dev/null +++ b/packages/snaplet/.gitignore @@ -0,0 +1 @@ +.snaplet/snapshots/* diff --git a/packages/snaplet/.snaplet/dataModel.json b/packages/snaplet/.snaplet/dataModel.json index 82febd6a1..e702d5be7 100644 --- a/packages/snaplet/.snaplet/dataModel.json +++ b/packages/snaplet/.snaplet/dataModel.json @@ -9637,4 +9637,4 @@ ] } } -} \ No newline at end of file +} diff --git a/packages/snaplet/README.md b/packages/snaplet/README.md index fa8ef7735..fd6af5387 100644 --- a/packages/snaplet/README.md +++ b/packages/snaplet/README.md @@ -8,9 +8,21 @@ It seeds our database using two main methods: [`seed.ts`](./seed.ts): This resets the database and seeds it with a set of default data. It is used to seed the database for local development esepcially for data that is not available in the production database. +## Capturing snapshots + +`bunx @snaplet/snapshot snapshot capture`: This captures a snapshot of the database and saves it locally. It can then be shared with other developers by uploading the snapshot to snaplet's cloud storage. + +**⚠️ Capturing snapshots** requires access to the production database. + +```shell +# set to production database url +export SNAPLET_SOURCE_DATABASE_URL=postgresql://postgres:postgres@127.0.0.1:54322/postgres +bunx @snaplet/snapshot snapshot capture +``` + ## Restoring from snapshot -`bunx snaplet snapshot restore --no-reset`: This restores the database from a snapshot hosted in snaplet's cloud storage. It is used to restore the database for local development and restores production-like data. This is useful for testing and debugging. +`bunx @snaplet/snapshot snapshot restore --no-reset`: This restores the database from a snapshot hosted in snaplet's cloud storage. It is used to restore the database for local development and restores production-like data. This is useful for testing and debugging. **⚠️ When restoring from snapshot** migrations are not run and can make your local database inconsistent with the production database or even fail to restore some data. To mitigate this, remove any migrations that are not in production yet. See below for how to remove migrations to overcome this. @@ -26,22 +38,12 @@ git diff --name-only --diff-filter=A origin/main..HEAD -- supabase/migrations | ### Restore ```shell +# set target database url to local development database +export SNAPLET_TARGET_DATABASE_URL=$SUPABASE_DB_URL # now run the snapshot restore command bunx supabase db reset && \ -bunx snaplet snapshot restore --no-reset --latest && \ +bunx @snaplet/snapshot snapshot restore --no-reset --latest && \ git checkout ./supabase/migrations && \ bunx supabase db push --local --include-all psql $SUPABASE_DB_URL -c "insert into send_accounts (user_id, address, chain_id, init_code) select u.id as user_id, c.address, '845337' as chain_id, CONCAT( '\\x00', upper( CONCAT( md5(random() :: text), md5(random() :: text), md5(random() :: text), md5(random() :: text) ) ) ) :: bytea as init_code from auth.users u join chain_addresses c on c.user_id = u.id where user_id not in ( select user_id from send_accounts );" ``` - -## Capturing snapshots - -`bunx snaplet snapshot capture`: This captures a snapshot of the database and saves it locally. It can then be shared with other developers by uploading the snapshot to snaplet's cloud storage. - -**⚠️ Capturing snapshots** requires access to the production database. - -```shell -export SNAPLET_SOURCE_DATABASE_URL=postgresql://postgres:postgres@127.0.0.1:54322/postgres -bunx snaplet snapshot capture -bunx snaplet snapshot share -``` diff --git a/packages/snaplet/bin/snaplet.ts b/packages/snaplet/bin/snaplet.ts index fe820a1a2..77d352a79 100644 --- a/packages/snaplet/bin/snaplet.ts +++ b/packages/snaplet/bin/snaplet.ts @@ -74,10 +74,12 @@ if (argv.restore) { process.exit(1) }) // restore the database from the latest snapshot - await $`bunx snaplet snapshot restore --no-reset --latest`.catch((e) => { - console.log(chalk.red('Error restoring database:'), e) - process.exit(1) - }) + await $`env SNAPLET_TARGET_DATABASE_URL=$SUPABASE_DB_URL bunx @snaplet/snapshot snapshot restore --no-reset --latest`.catch( + (e) => { + console.log(chalk.red('Error restoring database:'), e.stderr) + process.exit(1) + } + ) if (rmMigs) { // now migrate the database with the latest migrations await $`git checkout ${prjRoot}/supabase/migrations`.catch((e) => { diff --git a/packages/snaplet/snaplet.config.ts b/packages/snaplet/snaplet.config.ts index 01c95f8eb..6f272ffe4 100644 --- a/packages/snaplet/snaplet.config.ts +++ b/packages/snaplet/snaplet.config.ts @@ -51,8 +51,27 @@ export default defineConfig({ shovel: false, // @ts-ignore pgtap: false, + public: { + // activity: false, + }, }, + // TODO: figure out how much data we need to snapshot + // subset: { + // targets: [ + // // { + // // table: "public.activity", + // // orderBy: `"activity"."created_at" desc`, + // // percent: 10 + // // }, + // // { + // // table: "public.send_account_transfers", + // // orderBy: `"send_account_transfers"."block_num" desc`, + // // percent: 5 + // // } + // ], + // }, transform: { + $mode: 'auto', auth: { users: ({ row }) => { let phone: string diff --git a/supabase/database-generated.types.ts b/supabase/database-generated.types.ts index 3d39b0284..7288e40a4 100644 --- a/supabase/database-generated.types.ts +++ b/supabase/database-generated.types.ts @@ -196,6 +196,9 @@ export type Database = { created_at: string distribution_id: number fixed_value: number + multiplier_max: number + multiplier_min: number + multiplier_step: number type: Database["public"]["Enums"]["verification_type"] updated_at: string } @@ -204,6 +207,9 @@ export type Database = { created_at?: string distribution_id: number fixed_value: number + multiplier_max?: number + multiplier_min?: number + multiplier_step?: number type: Database["public"]["Enums"]["verification_type"] updated_at?: string } @@ -212,6 +218,9 @@ export type Database = { created_at?: string distribution_id?: number fixed_value?: number + multiplier_max?: number + multiplier_min?: number + multiplier_step?: number type?: Database["public"]["Enums"]["verification_type"] updated_at?: string } @@ -1106,9 +1115,12 @@ export type Database = { distribution_verifications_summary: { Row: { distribution_id: number | null + multipliers: Json | null tag_referrals: number | null tag_registrations: number | null + total_tag_referrals: number | null user_id: string | null + verification_values: Json | null } Relationships: [ { @@ -1200,7 +1212,12 @@ export type Database = { } Returns: { address: string + chain_id: number created_at: string + deleted_at: string | null + id: string + init_code: string + updated_at: string user_id: string }[] } @@ -1299,7 +1316,13 @@ export type Database = { key_type_enum: "ES256" lookup_type_enum: "sendid" | "tag" | "refcode" | "address" | "phone" tag_status: "pending" | "confirmed" - verification_type: "tag_registration" | "tag_referral" + verification_type: + | "tag_registration" + | "tag_referral" + | "create_passkey" + | "send_ten" + | "send_one_hundred" + | "total_tag_referrals" } CompositeTypes: { activity_feed_user: { diff --git a/supabase/migrations/20241009042110_alter_db_for_monthly_distributions.sql b/supabase/migrations/20241009042110_alter_db_for_monthly_distributions.sql new file mode 100644 index 000000000..84e91f241 --- /dev/null +++ b/supabase/migrations/20241009042110_alter_db_for_monthly_distributions.sql @@ -0,0 +1,73 @@ +ALTER TABLE public.distribution_verification_values + ADD COLUMN multiplier_min NUMERIC(10, 4) NOT NULL DEFAULT 1.0, + ADD COLUMN multiplier_max NUMERIC(10, 4) NOT NULL DEFAULT 1.0, + ADD COLUMN multiplier_step NUMERIC(10, 4) NOT NULL DEFAULT 0.0; + +ALTER TYPE public.verification_type + ADD VALUE IF NOT EXISTS 'create_passkey'; + +ALTER TYPE public.verification_type + ADD VALUE IF NOT EXISTS 'send_ten'; + +ALTER TYPE public.verification_type + ADD VALUE IF NOT EXISTS 'send_one_hundred'; + +ALTER TYPE public.verification_type + ADD VALUE IF NOT EXISTS 'total_tag_referrals'; + +DROP FUNCTION IF EXISTS public.distribution_hodler_addresses(integer); + +CREATE OR REPLACE FUNCTION public.distribution_hodler_addresses(distribution_id integer) + RETURNS SETOF send_accounts + LANGUAGE plpgsql + SECURITY DEFINER + SET search_path TO 'public' + AS $function$ +BEGIN + -- get the distribution + IF( + SELECT + 1 + FROM + distributions + WHERE + id = distribution_id + LIMIT 1) IS NULL THEN + RAISE EXCEPTION 'Distribution not found.'; + END IF; + -- return the hodler addresses that had no sells during the qualification period and have verifications + RETURN query WITH sellers AS( + -- find sellers during the qualification period + SELECT + lower(concat('0x', encode(f, 'hex')))::citext AS seller + FROM + distributions + JOIN send_token_transfers ON to_timestamp(send_token_transfers.block_time) >= distributions.qualification_start + AND to_timestamp(send_token_transfers.block_time) <= distributions.qualification_end + JOIN send_liquidity_pools ON send_liquidity_pools.address = send_token_transfers.t + WHERE + distributions.id = $1) + -- the hodler addresses that had no sells during the qualification period and have verifications + SELECT DISTINCT + send_accounts.* + FROM + distributions + JOIN distribution_verifications ON distribution_verifications.distribution_id = distributions.id + JOIN send_accounts ON send_accounts.user_id = distribution_verifications.user_id + WHERE + distributions.id = $1 + AND send_accounts.address NOT IN( + SELECT + seller + FROM + sellers); +END; +$function$; + +-- only service role can execute this function +REVOKE EXECUTE ON FUNCTION "public"."distribution_hodler_addresses"(integer) FROM PUBLIC; + +REVOKE EXECUTE ON FUNCTION "public"."distribution_hodler_addresses"(integer) FROM anon; + +REVOKE EXECUTE ON FUNCTION "public"."distribution_hodler_addresses"(integer) FROM authenticated; + diff --git a/supabase/migrations/20241015041504_app_usage_metrics_in_distribution_verification_view.sql b/supabase/migrations/20241015041504_app_usage_metrics_in_distribution_verification_view.sql new file mode 100644 index 000000000..5100960aa --- /dev/null +++ b/supabase/migrations/20241015041504_app_usage_metrics_in_distribution_verification_view.sql @@ -0,0 +1,55 @@ +CREATE OR REPLACE VIEW "public"."distribution_verifications_summary" WITH ( security_barrier +) AS +WITH base_counts AS ( + SELECT + distribution_id, + user_id, + type, + count(*) AS type_count, + MAX( + CASE WHEN type = 'total_tag_referrals'::public.verification_type THEN + (metadata ->> 'value')::int + ELSE + NULL + END) AS total_referrals + FROM + distribution_verifications + WHERE + user_id = auth.uid() + GROUP BY + distribution_id, + user_id, + type +) +SELECT + bc.distribution_id, + bc.user_id, + SUM( + CASE WHEN bc.type = 'tag_registration'::public.verification_type THEN + bc.type_count + ELSE + 0 + END)::bigint AS tag_registrations, + SUM( + CASE WHEN bc.type = 'tag_referral'::public.verification_type THEN + bc.type_count + ELSE + 0 + END)::bigint AS tag_referrals, + MAX(bc.total_referrals)::bigint AS total_referrals, + BOOL_OR(bc.type = 'send_ten'::public.verification_type) AS has_send_ten, + BOOL_OR(bc.type = 'send_one_hundred'::public.verification_type) AS has_send_one_hundred, + BOOL_OR(bc.type = 'create_passkey'::public.verification_type) AS has_create_passkey, + jsonb_object_agg(bc.type, jsonb_build_object('value', CASE WHEN bc.type_count = 0 THEN + 0 + ELSE + -- @todo double check that when count = 1, this value = min + LEAST(dvv.multiplier_min +(bc.type_count - 1) * dvv.multiplier_step, dvv.multiplier_max) + END, 'multiplier_min', dvv.multiplier_min, 'multiplier_max', dvv.multiplier_max, 'multiplier_step', dvv.multiplier_step)) AS multipliers +FROM + base_counts bc + JOIN distribution_verification_values dvv ON bc.distribution_id = dvv.distribution_id + AND bc.type = dvv.type +GROUP BY + bc.distribution_id, + bc.user_id diff --git a/supabase/migrations/20241016050821_insert_distribution_seven.sql b/supabase/migrations/20241016050821_insert_distribution_seven.sql new file mode 100644 index 000000000..f89a88550 --- /dev/null +++ b/supabase/migrations/20241016050821_insert_distribution_seven.sql @@ -0,0 +1,377 @@ +-- Round #7 +-- 300m $send +-- Opens Sept 01th 00:00 UTC +-- Closes Sept 30th 11:59 UTC +-- 100k minimum +-- Create the seventh distribution +INSERT INTO public.distributions( + number, + name, + description, + amount, + hodler_pool_bips, + bonus_pool_bips, + fixed_pool_bips, + qualification_start, + qualification_end, + hodler_min_balance, + claim_end, + chain_id, + snapshot_block_num) +VALUES ( + 7, + 'Distribution #7', + 'Seventh distributions of 300,000,000 SEND tokens to early hodlers', + 300000000, + -- 300,000,000 SEND + 10000, + 0, + 10000,( + SELECT + '2024-09-01T00:00:00Z'::timestamp with time zone), +( + SELECT + '2024-10-01T00:00:00Z'::timestamp with time zone - interval '1 second'), + -- 100,000 SEND + 100000,('infinity'), + 8453, -- Base chain + 20475726 -- Sept 30 11:59:59 UTC +); + +INSERT INTO public.distribution_verification_values( + type, + fixed_value, + bips_value, + distribution_id) +VALUES ( + 'tag_registration' ::public.verification_type, + 10000, + 0, +( + SELECT + id + FROM + distributions + WHERE + "number" = 7 + LIMIT 1)); + +INSERT INTO public.distribution_verification_values( + type, + fixed_value, + bips_value, + distribution_id) +VALUES ( + 'create_passkey' ::public.verification_type, + 50000, + 0, +( + SELECT + id + FROM + distributions + WHERE + "number" = 7 + LIMIT 1)); + +INSERT INTO public.distribution_verification_values( + type, + fixed_value, + bips_value, + distribution_id) +VALUES ( + 'send_ten' ::public.verification_type, + 100000, + 0, +( + SELECT + id + FROM + distributions + WHERE + "number" = 7 + LIMIT 1)); + +INSERT INTO public.distribution_verification_values( + type, + fixed_value, + bips_value, + distribution_id) +VALUES ( + 'send_one_hundred' ::public.verification_type, + 100000, + 0, +( + SELECT + id + FROM + distributions + WHERE + "number" = 7 + LIMIT 1)); + +INSERT INTO public.distribution_verification_values( + type, + fixed_value, + bips_value, + distribution_id, + multiplier_min, + multiplier_max, + multiplier_step) +VALUES ( + 'total_tag_referrals' ::public.verification_type, + 0, + 0, +( + SELECT + id + FROM + distributions + WHERE + "number" = 7 + LIMIT 1), + 1.0, + 5.0, + 0.05); + +INSERT INTO public.distribution_verification_values( + type, + fixed_value, + bips_value, + distribution_id, + multiplier_min, + multiplier_max, + multiplier_step) +VALUES ( + 'tag_referral' ::public.verification_type, + 0, + 0, +( + SELECT + id + FROM + distributions + WHERE + "number" = 7 + LIMIT 1), + 1.5, + 5, + 0.25); + +-- Add create_passkey verifications for all existing Send accounts +INSERT INTO public.distribution_verifications( + distribution_id, + user_id, + type, + metadata, + created_at) +SELECT + ( + SELECT + id + FROM + distributions + WHERE + "number" = 7 + LIMIT 1) AS distribution_id, +sa.user_id, +'create_passkey'::public.verification_type AS type, +jsonb_build_object('account_created_at', sa.created_at) AS metadata, +LEAST(sa.created_at,( + SELECT + qualification_end + FROM distributions + WHERE + "number" = 7 LIMIT 1)) AS created_at +FROM + send_accounts sa +WHERE + sa.created_at <=( + SELECT + qualification_end + FROM + distributions + WHERE + "number" = 7 + LIMIT 1); + +-- Add existing tags to distribution_verifications +INSERT INTO public.distribution_verifications( + distribution_id, + user_id, + type, + metadata, + created_at) +SELECT + ( + SELECT + id + FROM + distributions + WHERE + "number" = 7 + LIMIT 1), +user_id, +'tag_registration'::public.verification_type, +jsonb_build_object('tag', "name"), +created_at +FROM + tags +WHERE + status = 'confirmed'::public.tag_status; + +-- Add month referrals to distribution_verifications +INSERT INTO public.distribution_verifications( + distribution_id, + user_id, + type, + metadata, + created_at) +SELECT + ( + SELECT + id + FROM + distributions + WHERE + "number" = 7 + LIMIT 1), +referrer_id, +'tag_referral'::public.verification_type, +jsonb_build_object('referred_id', referred_id, 'tag', tag), +tags.created_at +FROM + referrals + JOIN tags ON tags.name = referrals.tag +WHERE + created_at <( + SELECT + qualification_end + FROM + distributions + WHERE + "number" = 7 + LIMIT 1) + AND created_at >( + SELECT + qualification_start + FROM + distributions + WHERE + "number" = 7 + LIMIT 1); + +-- Add total_tag_referrals to distribution_verifications +INSERT INTO public.distribution_verifications( + distribution_id, + user_id, + type, + metadata, + created_at) +WITH distribution_info AS ( + SELECT + id, + qualification_end + FROM + distributions + WHERE + "number" = 7 + LIMIT 1), +total_referrals AS ( + SELECT + r.referrer_id, + COUNT(*) AS total_referrals, + MAX(t.created_at) AS last_referral_date + FROM + referrals r + JOIN tags t ON t.name = r.tag + WHERE + t.created_at <=( + SELECT + qualification_end + FROM + distribution_info) + GROUP BY + r.referrer_id +) +SELECT + ( + SELECT + id + FROM + distribution_info) AS distribution_id, + tr.referrer_id AS user_id, + 'total_tag_referrals'::public.verification_type AS type, + jsonb_build_object('value', tr.total_referrals) AS metadata, + LEAST(tr.last_referral_date,( + SELECT + qualification_end + FROM distribution_info)) AS created_at +FROM + total_referrals tr +WHERE + tr.total_referrals > 0; + +-- Add send_ten or send_one_hundred to distribution_verifications based on user activity +INSERT INTO public.distribution_verifications( + distribution_id, + user_id, + type, + metadata, + created_at) +WITH distribution_info AS ( + SELECT + id, + qualification_start, + qualification_end + FROM + distributions + WHERE + "number" = 7 + LIMIT 1), +transfer_counts AS ( + SELECT + from_user_id AS user_id, + COUNT(*) AS transfer_count, + MAX(created_at) AS last_transfer_date + FROM + activity + WHERE + event_name = 'send_account_transfers' + AND created_at >( + SELECT + qualification_start + FROM + distribution_info) + AND created_at <( + SELECT + qualification_end + FROM + distribution_info) + AND from_user_id IS NOT NULL -- Add this line + GROUP BY + from_user_id +) + SELECT + ( + SELECT + id + FROM + distribution_info) AS distribution_id, + tc.user_id, + CASE WHEN tc.transfer_count >= 100 THEN + 'send_one_hundred'::public.verification_type + WHEN tc.transfer_count >= 10 THEN + 'send_ten'::public.verification_type + END AS type, + jsonb_build_object('transfer_count', tc.transfer_count) AS metadata, + LEAST(tc.last_transfer_date,( + SELECT + qualification_end + FROM distribution_info)) AS created_at +FROM + transfer_counts tc +WHERE + tc.transfer_count >= 10 + AND tc.user_id IS NOT NULL; + diff --git a/supabase/migrations/20241018010646_update_activity_metrics_with_values.sql b/supabase/migrations/20241018010646_update_activity_metrics_with_values.sql new file mode 100644 index 000000000..c9b69a72c --- /dev/null +++ b/supabase/migrations/20241018010646_update_activity_metrics_with_values.sql @@ -0,0 +1,58 @@ +DROP VIEW IF EXISTS "public"."distribution_verifications_summary"; + +CREATE OR REPLACE VIEW "public"."distribution_verifications_summary" WITH ( security_barrier +) AS +WITH base_counts AS ( + SELECT + distribution_id, + user_id, + type, + count(*) AS type_count, + MAX( + CASE WHEN type = 'total_tag_referrals'::public.verification_type THEN + (metadata ->> 'value')::int + ELSE + 0 + END) AS total_referrals + FROM + distribution_verifications + WHERE + user_id = auth.uid() + GROUP BY + distribution_id, + user_id, + type +) +SELECT + dvv.distribution_id, + COALESCE(bc.user_id, auth.uid()) AS user_id, + SUM( + CASE WHEN dvv.type = 'tag_registration'::public.verification_type THEN + COALESCE(bc.type_count, 0) + ELSE + 0 + END)::bigint AS tag_registrations, + SUM( + CASE WHEN dvv.type = 'tag_referral'::public.verification_type THEN + COALESCE(bc.type_count, 0) + ELSE + 0 + END)::bigint AS tag_referrals, + MAX(COALESCE(bc.total_referrals, 0))::bigint AS total_tag_referrals, + jsonb_object_agg(dvv.type, jsonb_build_object('count', COALESCE(bc.type_count, 0), 'fixed_value', dvv.fixed_value, 'bips_value', dvv.bips_value)) AS verification_values, + -- @todo set value to null if multiplier is unused + jsonb_object_agg(dvv.type, jsonb_build_object('value', CASE WHEN COALESCE(bc.type_count, 0) = 0 THEN + 1 + WHEN dvv.type = 'total_tag_referrals'::public.verification_type THEN + LEAST(dvv.multiplier_min +(COALESCE(bc.total_referrals, 0) * dvv.multiplier_step), dvv.multiplier_max) + ELSE + LEAST(dvv.multiplier_min +((COALESCE(bc.type_count, 0) - 1) * dvv.multiplier_step), dvv.multiplier_max) + END, 'multiplier_min', dvv.multiplier_min, 'multiplier_max', dvv.multiplier_max, 'multiplier_step', dvv.multiplier_step)) AS multipliers +FROM + distribution_verification_values dvv + LEFT JOIN base_counts bc ON bc.distribution_id = dvv.distribution_id + AND bc.type = dvv.type +GROUP BY + dvv.distribution_id, + COALESCE(bc.user_id, auth.uid()); + diff --git a/supabase/migrations/20241018010738_update_distribution_seven.sql b/supabase/migrations/20241018010738_update_distribution_seven.sql new file mode 100644 index 000000000..9e536a41f --- /dev/null +++ b/supabase/migrations/20241018010738_update_distribution_seven.sql @@ -0,0 +1,8 @@ +-- Update hodler_min_balance for distribution #7 +UPDATE + public.distributions +SET + hodler_min_balance = 300000 +WHERE + number = 7; + diff --git a/supabase/tests/distribution_hodler_addresses_test.sql b/supabase/tests/distribution_hodler_addresses_test.sql index 2dbc881da..b327ce8c1 100644 --- a/supabase/tests/distribution_hodler_addresses_test.sql +++ b/supabase/tests/distribution_hodler_addresses_test.sql @@ -1,29 +1,27 @@ -begin; - -select plan(5); - -create extension "basejump-supabase_test_helpers"; -- noqa: RF05 - -grant usage on schema tests to service_role; - -grant execute on all functions in schema tests to service_role; - +BEGIN; +SELECT + plan(5); +CREATE EXTENSION "basejump-supabase_test_helpers"; +-- noqa: RF05 +GRANT usage ON SCHEMA tests TO service_role; +GRANT EXECUTE ON ALL functions IN SCHEMA tests TO service_role; \set hodler_address '\'f39Fd6e51aad88F6F4ce6aB8827279cffFb92266\'' - -- 1. Test when provided distribution_id does not exist -select throws_ok( - $$SELECT * - FROM distribution_hodler_addresses(999999) $$, 'Distribution not found.', - 'Should raise exception if distribution does not exist' -); - -select tests.create_supabase_user('hodler'); - +SELECT + throws_ok($$ + SELECT + * FROM distribution_hodler_addresses(999999) $$, 'Distribution not found.', 'Should raise exception if distribution does not exist'); +SELECT + tests.create_supabase_user('hodler'); -- create a liquidity pool -insert into send_liquidity_pools (address, chain_id) -values (decode('a1b2457c0b627f97f6cc892946a382451e979014', 'hex'), 8453); - -insert into distributions ( +INSERT INTO send_liquidity_pools( + address, + chain_id) +VALUES ( + decode( + 'a1b2457c0b627f97f6cc892946a382451e979014', 'hex'), + 8453); +INSERT INTO distributions( number, name, description, @@ -35,9 +33,8 @@ insert into distributions ( qualification_end, hodler_min_balance, claim_end, - chain_id -) -values ( + chain_id) +VALUES ( 123, 'distribution #123', 'Description', @@ -49,11 +46,9 @@ values ( '2023-01-31T00:00:00.000Z', 1e6::bigint, '2023-02-28T00:00:00.000Z', - 8453 -); - + 8453); -- 2. Test when there are eligible hodler addresses -insert into public.send_token_transfers ( +INSERT INTO public.send_token_transfers( "f", "t", "v", @@ -66,50 +61,66 @@ insert into public.send_token_transfers ( chain_id, log_addr, tx_hash, - abi_idx -) -values ( - (select address from send_liquidity_pools limit 1), + abi_idx) +VALUES (( + SELECT + address + FROM + send_liquidity_pools + LIMIT 1), decode(:hodler_address, 'hex'), -- noqa: LT01 1000000, 'send_token_transfers', 'send_token_transfers', 18181005, - extract(epoch from '2023-01-21 01:32:59.000000 +00:00'::timestamp), + extract(epoch FROM '2023-01-21 01:32:59.000000 +00:00'::timestamp), 1, 158, 8453, '\xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266', '\x1234', - 0 -); - -insert into chain_addresses (address, user_id) -values ( - concat('0x',:hodler_address), -- noqa: LT01 - tests.get_supabase_uid('hodler') -); - -insert into distribution_verifications (user_id, distribution_id, type) -values ( - tests.get_supabase_uid('hodler'), - (select id from distributions where number = 123), - 'tag_registration' + 0); +INSERT INTO send_accounts( + address, + user_id, + chain_id, + init_code) +VALUES ( + concat( + '0x', :hodler_address), -- noqa: LT01 + tests.get_supabase_uid( + 'hodler'), + '8453', + CONCAT( + '\\x00', upper( + CONCAT( + md5( + random() ::text), md5( + random() ::text), md5( + random() ::text), md5( + random() ::text)))) ::bytea); +INSERT INTO distribution_verifications( + user_id, + distribution_id, + type) +VALUES ( + tests.get_supabase_uid( + 'hodler'), +( + SELECT + id + FROM + distributions + WHERE + number = 123), 'tag_registration'); +SET ROLE TO service_role; +SELECT results_eq( + $$SELECT address, user_id FROM distribution_hodler_addresses((SELECT id FROM distributions WHERE number = 123))$$, + $$SELECT address, user_id FROM send_accounts WHERE user_id = tests.get_supabase_uid('hodler')$$, + 'Should return the eligible hodler addresses' ); - -set role to service_role; - -select results_eq($$SELECT - address, - user_id - FROM distribution_hodler_addresses((select id from distributions where number = 123)) $$, $$ - SELECT address, user_id from chain_addresses - WHERE user_id = tests.get_supabase_uid('hodler') - $$, 'Should return the eligible hodler addresses'); - -- 3. Test paper hands are excluded - -insert into public.send_token_transfers ( +INSERT INTO public.send_token_transfers( "f", "t", "v", @@ -122,53 +133,36 @@ insert into public.send_token_transfers ( chain_id, log_addr, tx_hash, - abi_idx -) -values ( - decode(:hodler_address, 'hex'), -- noqa: LT01 - (select address from send_liquidity_pools limit 1), + abi_idx) +VALUES ( + decode( + :hodler_address, 'hex'), -- noqa: LT01 +( + SELECT + address + FROM send_liquidity_pools LIMIT 1), 64509, 'send_token_transfers', 'send_token_transfers', 18180534, - extract(epoch from '2023-01-20 23:58:35.000000 +00:00'::timestamp), + extract(epoch FROM '2023-01-20 23:58:35.000000 +00:00'::timestamp), 1, 182, 8453, '\xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266', '\x1234', - 0 -); - -select is_empty($$SELECT * - FROM distribution_hodler_addresses((select id from distributions where number = 123)) $$, --- empty result -'Should return empty result when the distribution exists but user has sold'); - -select tests.authenticate_as('hodler'); - + 0); +SELECT is_empty($$SELECT * FROM distribution_hodler_addresses((SELECT id FROM distributions WHERE number = 123))$$, 'Should return empty result when the distribution exists but user has sold'); +SELECT + tests.authenticate_as('hodler'); -- verify only service_role can call this function -select throws_ok( - $$ - SELECT * - FROM distribution_hodler_addresses((select id from distributions where number = 123)) - $$, - 'permission denied for function distribution_hodler_addresses', - 'Should raise exception if user is not service_role' -); - -select tests.clear_authentication(); - -select throws_ok( - $$ - SELECT * - FROM distribution_hodler_addresses((select id from distributions where number = 123)) - $$, - 'permission denied for function distribution_hodler_addresses', - 'Should raise exception if user is not authenticated' -); - - -SELECT finish(); +SELECT + throws_ok($$SELECT * FROM distribution_hodler_addresses((SELECT id FROM distributions WHERE number = 123)) $$, 'permission denied for function distribution_hodler_addresses', 'Should raise exception if user is not service_role'); +SELECT + tests.clear_authentication(); +SELECT + throws_ok($$SELECT * FROM distribution_hodler_addresses((SELECT id FROM distributions WHERE number = 123)) $$, 'permission denied for function distribution_hodler_addresses', 'Should raise exception if user is not authenticated'); +SELECT + finish(); +ROLLBACK; -rollback;