From cf58bf0583415b61d04c5249215af37316ed0a87 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 28 Aug 2024 17:57:55 -0700 Subject: [PATCH 01/58] Create transfer tempora workflow --- apps/workers/src/client.ts | 86 ++++++++++++------- apps/workers/src/worker.ts | 28 ++++-- package.json | 1 + packages/workflows/src/all-activities.ts | 3 +- packages/workflows/src/all-workflows.ts | 3 +- .../src/distribution-workflow/activities.ts | 2 +- .../src/distribution-workflow/workflow.ts | 4 +- .../src/transfer-workflow/activities.ts | 82 ++++++++++++++++++ .../src/transfer-workflow/supabase.ts | 10 +++ .../workflows/src/transfer-workflow/wagmi.ts | 58 +++++++++++++ .../src/transfer-workflow/workflow.ts | 47 ++++++++++ 11 files changed, 279 insertions(+), 45 deletions(-) create mode 100644 packages/workflows/src/transfer-workflow/activities.ts create mode 100644 packages/workflows/src/transfer-workflow/supabase.ts create mode 100644 packages/workflows/src/transfer-workflow/wagmi.ts create mode 100644 packages/workflows/src/transfer-workflow/workflow.ts diff --git a/apps/workers/src/client.ts b/apps/workers/src/client.ts index fdec13f87..f3eb01622 100644 --- a/apps/workers/src/client.ts +++ b/apps/workers/src/client.ts @@ -1,43 +1,71 @@ import { Connection, Client } from '@temporalio/client' -import { - // WorkflowA, WorkflowB, +import { TransferWorkflow } from '@my/workflows/workflows' +import type { UserOperation } from 'permissionless' - DistributionsWorkflow, -} from '@my/workflows/all-workflows' +// async function runDistributionWorkflow() { +// const connection = await Connection.connect() // Connect to localhost with default ConnectionOptions. +// // In production, pass options to the Connection constructor to configure TLS and other settings. +// // This is optional but we leave this here to remind you there is a gRPC connection being established. -export async function runWorkflow(): Promise { - const connection = await Connection.connect() // Connect to localhost with default ConnectionOptions. - // In production, pass options to the Connection constructor to configure TLS and other settings. - // This is optional but we leave this here to remind you there is a gRPC connection being established. +// const client = new Client({ +// connection, +// // In production you will likely specify `namespace` here; it is 'default' if omitted +// }) +// // Invoke the `DistributionWorkflow` Workflow, only resolved when the workflow completes +// const handle = await client.workflow.start(DistributionsWorkflow, { +// taskQueue: 'dev', +// workflowId: 'distributions-workflow', // TODO: remember to replace this with a meaningful business ID +// args: [], // type inference works! args: [name: string] +// }) +// console.log('Started handle', handle.workflowId) +// // optional: wait for client result +// const result = await handle.result() + +// return result +// } + +async function runTransferWorkflow(userOp: UserOperation<'v0.7'>) { + const connection = await Connection.connect() const client = new Client({ connection, - // In production you will likely specify `namespace` here; it is 'default' if omitted }) - // Invoke the `DistributionWorkflow` Workflow, only resolved when the workflow completes - const handle = await client.workflow.start(DistributionsWorkflow, { - taskQueue: 'dev', - workflowId: 'distributions-workflow', // TODO: remember to replace this with a meaningful business ID - args: [], // type inference works! args: [name: string] + const handle = await client.workflow.start(TransferWorkflow, { + taskQueue: 'monorepo', + workflowId: `transfers-workflow-${userOp.sender}-${userOp.nonce.toString()}`, // TODO: remember to replace this with a meaningful business ID + args: [userOp], }) - console.log('Started handle', handle) - // // Invoke the `WorkflowA` Workflow, only resolved when the workflow completes - // const result = await client.workflow.execute(WorkflowA, { - // taskQueue: 'monorepo', - // workflowId: `workflow-a-${Date.now()}`, // TODO: remember to replace this with a meaningful business ID - // args: ['Temporal'], // type inference works! args: [name: string] - // }) - // // Starts the `WorkflowB` Workflow, don't wait for it to complete - // await client.workflow.start(WorkflowB, { - // taskQueue: 'monorepo', - // workflowId: `workflow-b-${Date.now()}`, // TODO: remember to replace this with a meaningful business ID - // }) - // console.log(result) // // [api-server] A: Hello, Temporal!, B: Hello, Temporal! - // return result + console.log('Started handle', handle.workflowId) + // optional: wait for client result + const result = await handle.result() + console.log('result: ', result) + + return result } -runWorkflow().catch((err) => { +// runDistributionWorkflow().catch((err) => { +// console.error(err) +// process.exit(1) +// }) + +runTransferWorkflow({ + callData: + '0x34fcd5be000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000833589fcd6edb6e08f4c7c32d4f71b54bda02913000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000044a9059cbb000000000000000000000000713ddc85a615beaec95333736d80c406732f6d7600000000000000000000000000000000000000000000000000000000000f424000000000000000000000000000000000000000000000000000000000', + callGasLimit: 100000n, + maxFeePerGas: 1000000110n, + maxPriorityFeePerGas: 1000000000n, + nonce: 1n, + paymaster: '0x592e1224D203Be4214B15e205F6081FbbaCFcD2D', + paymasterData: '0x', + paymasterPostOpGasLimit: 100000n, + paymasterVerificationGasLimit: 150000n, + preVerificationGas: 70000n, + sender: '0x713ddC85a615BEaec95333736D80C406732f6d76', + signature: + '0x01000066ce986500000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000017000000000000000000000000000000000000000000000000000000000000000193e778a488b82629b608dabe2a0979742f065662e670ca4b3e365162bff5457e6fd8931f1d72ab0ba388a92725cf7dba903799639c4cffb45bc232ef9dcb1da2000000000000000000000000000000000000000000000000000000000000002549960de5880e8c687434170f6476605b8fe4aeb9a28632c7995cf3ba831d97631d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008f7b2274797065223a22776562617574686e2e676574222c226368616c6c656e6765223a22415141415a7336595a66796251683649754d4d6a4e774f35657171626f3930573058594f644b714d33345742314e4c35484c4250222c226f726967696e223a22687474703a2f2f6c6f63616c686f73743a33303030222c2263726f73734f726967696e223a66616c73657d0000000000000000000000000000000000', + verificationGasLimit: 550000n, +}).catch((err) => { console.error(err) process.exit(1) }) diff --git a/apps/workers/src/worker.ts b/apps/workers/src/worker.ts index a8abc47ce..e9daa07f5 100644 --- a/apps/workers/src/worker.ts +++ b/apps/workers/src/worker.ts @@ -1,5 +1,8 @@ import { Worker } from '@temporalio/worker' -import { createActivities } from '@my/workflows/all-activities' +import { + createTransferActivities, + createDistributionActivities, +} from '@my/workflows/all-activities' import { URL, fileURLToPath } from 'node:url' import path from 'node:path' @@ -8,20 +11,27 @@ async function run() { `../../../packages/workflows/src/all-workflows${path.extname(import.meta.url)}`, import.meta.url ) - // Step 1: Register Workflows and Activities with the Worker and connect to // the Temporal server. - const worker = await Worker.create({ + const transferWorker = await Worker.create({ workflowsPath: fileURLToPath(workflowsPathUrl), - activities: createActivities( - process.env.NEXT_PUBLIC_SUPABASE_URL, - process.env.SUPABASE_SERVICE_ROLE - ), - taskQueue: 'dev', + activities: { + ...createTransferActivities( + process.env.NEXT_PUBLIC_SUPABASE_URL, + process.env.SUPABASE_SERVICE_ROLE + ), + // ...createDistributionActivities( + // process.env.NEXT_PUBLIC_SUPABASE_URL, + // process.env.SUPABASE_SERVICE_ROLE + // ), + }, + namespace: 'default', + taskQueue: 'monorepo', bundlerOptions: { ignoreModules: ['@supabase/supabase-js'], }, }) + // Worker connects to localhost by default and uses console.error for logging. // Customize the Worker by passing more options to create(): // https://typescript.temporal.io/api/classes/worker.Worker @@ -30,7 +40,7 @@ async function run() { // https://github.com/temporalio/samples-typescript/tree/main/hello-world-mtls // Step 2: Start accepting tasks on the `monorepo` queue - await worker.run() + await transferWorker.run() // You may create multiple Workers in a single process in order to poll on multiple task queues. } diff --git a/package.json b/package.json index 3ae346903..a8bc7bc9b 100644 --- a/package.json +++ b/package.json @@ -33,6 +33,7 @@ "playwright": "yarn workspace @my/playwright", "distributor": "yarn workspace distributor", "snaplet": "yarn workspace @my/snaplet", + "workers": "yarn workspace workers", "shovel": "yarn workspace @my/shovel", "clean": "yarn workspaces foreach --all -pi run clean" }, diff --git a/packages/workflows/src/all-activities.ts b/packages/workflows/src/all-activities.ts index d11c74d46..efba65ba6 100644 --- a/packages/workflows/src/all-activities.ts +++ b/packages/workflows/src/all-activities.ts @@ -1,3 +1,2 @@ -// export * from './workflowA/activities/activitiesA' -// export * from './workflowA/activities/activitiesB' +export * from './transfer-workflow/activities' export * from './distribution-workflow/activities' diff --git a/packages/workflows/src/all-workflows.ts b/packages/workflows/src/all-workflows.ts index 381c6989a..2f3b1486c 100644 --- a/packages/workflows/src/all-workflows.ts +++ b/packages/workflows/src/all-workflows.ts @@ -1,3 +1,2 @@ -// export * from './workflowA/workflow' -// export * from './workflowB/workflow' +export * from './transfer-workflow/workflow' export * from './distribution-workflow/workflow' diff --git a/packages/workflows/src/distribution-workflow/activities.ts b/packages/workflows/src/distribution-workflow/activities.ts index 5b984cb61..1db8f8a34 100644 --- a/packages/workflows/src/distribution-workflow/activities.ts +++ b/packages/workflows/src/distribution-workflow/activities.ts @@ -19,7 +19,7 @@ const inBatches = (array: T[], batchSize = Math.max(8, cpuCount - 1)) => { ) } -export function createActivities(supabaseUrl: string, supabaseKey: string) { +export function createDistributionActivities(supabaseUrl: string, supabaseKey: string) { globalThis.process = globalThis.process || {} globalThis.process.env.SUPABASE_URL = supabaseUrl // HACK: set the supabase url in the environment globalThis.process.env.SUPABASE_SERVICE_ROLE = supabaseKey // HACK: set the supabase key in the environment diff --git a/packages/workflows/src/distribution-workflow/workflow.ts b/packages/workflows/src/distribution-workflow/workflow.ts index 64f028d3a..17cf82ddb 100644 --- a/packages/workflows/src/distribution-workflow/workflow.ts +++ b/packages/workflows/src/distribution-workflow/workflow.ts @@ -1,12 +1,12 @@ // workflows.ts import { proxyActivities, log, ApplicationFailure } from '@temporalio/workflow' -import type { createActivities } from './activities' +import type { createDistributionActivities } from './activities' const { calculateDistributionSharesActivity, fetchDistributionActivity, fetchAllOpenDistributionsActivity, -} = proxyActivities>({ +} = proxyActivities>({ startToCloseTimeout: '30 seconds', }) diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts new file mode 100644 index 000000000..cd662fd78 --- /dev/null +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -0,0 +1,82 @@ +import { log, ApplicationFailure } from '@temporalio/activity' +import { fetchTransfer } from './supabase' +import { + simulateUserOperation, + sendUserOperation, + waitForTransactionReceipt, + generateTransferUserOp, +} from './wagmi' +import type { TransferWorkflowArgs } from './workflow' +import { isAddress, type Hex } from 'viem' +export function createTransferActivities(supabaseUrl: string, supabaseKey: string) { + globalThis.process = globalThis.process || {} + globalThis.process.env.SUPABASE_URL = supabaseUrl // HACK: set the supabase url in the environment + globalThis.process.env.SUPABASE_SERVICE_ROLE = supabaseKey // HACK: set the supabase key in the environment + + return { + sendUserOpActivity, + fetchTransferActivity, + waitForTransactionReceiptActivity, + } +} + +async function sendUserOpActivity(args: TransferWorkflowArgs) { + const { sender, to, token, amount, nonce } = args + const parsedAmount = BigInt(amount) + const parsedNonce = BigInt(nonce) + if (!!sender && !isAddress(sender)) + throw ApplicationFailure.nonRetryable('Invalid send account address') + if (!!to && !isAddress(to)) throw ApplicationFailure.nonRetryable('Invalid to address') + if (!token || !isAddress(token)) throw ApplicationFailure.nonRetryable('Invalid token address') + if (typeof parsedAmount !== 'bigint' || parsedAmount <= 0n) + throw ApplicationFailure.nonRetryable('Invalid amount') + if (typeof parsedNonce !== 'bigint' || parsedNonce < 0n) + throw ApplicationFailure.nonRetryable('Invalid nonce') + try { + const userOp = await generateTransferUserOp({ + sender, + to, + token, + amount: parsedAmount, + nonce: parsedNonce, + }) + userOp.signature = args.signature + console.log('userOp: ', userOp) + + const hash = await sendUserOperation(userOp) + console.log('hash: ', hash) + log.info('sendUserOperationActivity', { hash, userOp }) + return hash + } catch (error) { + throw ApplicationFailure.nonRetryable('Error sending user operation', error.code, error) + } +} + +async function waitForTransactionReceiptActivity(hash: `0x${string}`) { + try { + const receipt = await waitForTransactionReceipt(hash) + if (!receipt.success) + throw ApplicationFailure.nonRetryable('Tx failed', receipt.sender, receipt.userOpHash) + log.info('waitForTransactionReceiptActivity', { receipt }) + return receipt + } catch (error) { + throw ApplicationFailure.nonRetryable('Error waiting for tx receipt', error.code, error) + } +} + +async function fetchTransferActivity(hash: `0x${string}`) { + const { data: transfer, error } = await fetchTransfer(hash) + if (error) { + if (error.code === 'PGRST116') { + log.info('fetchTransferActivity', { error }) + return null + } + throw ApplicationFailure.nonRetryable( + 'Error fetching transfer from activity column.', + error.code, + error + ) + } + log.info('fetchTransferActivity', { transfer }) + return transfer +} diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts new file mode 100644 index 000000000..8f5bb2c6a --- /dev/null +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -0,0 +1,10 @@ +import { hexToBytea } from 'app/utils/hexToBytea' +import { supabaseAdmin } from 'app/utils/supabase/admin' + +export async function fetchTransfer(hash: `0x${string}`) { + return await supabaseAdmin + .from('send_account_transfers') + .select('*', { count: 'exact', head: true }) + .eq('tx_hash', hexToBytea(hash)) + .single() +} diff --git a/packages/workflows/src/transfer-workflow/wagmi.ts b/packages/workflows/src/transfer-workflow/wagmi.ts new file mode 100644 index 000000000..c009fde4b --- /dev/null +++ b/packages/workflows/src/transfer-workflow/wagmi.ts @@ -0,0 +1,58 @@ +import { log, ApplicationFailure } from '@temporalio/activity' +import type { UserOperation } from 'permissionless' +import { + baseMainnetBundlerClient, + baseMainnetClient, + sendAccountAbi, + tokenPaymasterAddress, + entryPointAddress, +} from '@my/wagmi' +import { encodeFunctionData, erc20Abi, isAddress, type Hex } from 'viem' + +/** + * default user op with preset gas values that work will probably need to move this to the database. + * Paymaster post-op gas limit could be set dynamically based on the status of the paymaster if the price cache is + * outdated, otherwise, a lower post op gas limit around only 50K is needed. In case of needing to update cached price, + * the post op uses around 75K gas. + * + * - [example no update price](https://www.tdly.co/shared/simulation/a0122fae-a88c-47cd-901c-02de87901b45) + * - [Failed due to OOG](https://www.tdly.co/shared/simulation/c259922c-8248-4b43-b340-6ebbfc69bcea) + */ +export const defaultUserOp: Pick< + UserOperation<'v0.7'>, + | 'callGasLimit' + | 'verificationGasLimit' + | 'preVerificationGas' + | 'maxFeePerGas' + | 'maxPriorityFeePerGas' + | 'paymasterVerificationGasLimit' + | 'paymasterPostOpGasLimit' +> = { + callGasLimit: 100000n, + verificationGasLimit: 550000n, + preVerificationGas: 70000n, + maxFeePerGas: 10000000n, + maxPriorityFeePerGas: 10000000n, + paymasterVerificationGasLimit: 150000n, + paymasterPostOpGasLimit: 100000n, +} + +export async function simulateUserOperation(userOp: UserOperation<'v0.7'>) { + return await baseMainnetClient.call({ + account: entryPointAddress[baseMainnetClient.chain.id], + to: userOp.sender, + data: userOp.callData, + }) +} + +export async function sendUserOperation(userOp: UserOperation<'v0.7'>) { + const hash = await baseMainnetBundlerClient.sendUserOperation({ + userOperation: userOp, + }) + return hash +} + +export async function waitForTransactionReceipt(hash: `0x${string}`) { + const receipt = await baseMainnetBundlerClient.waitForUserOperationReceipt({ hash }) + return receipt +} diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts new file mode 100644 index 000000000..caab455e8 --- /dev/null +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -0,0 +1,47 @@ +import { proxyActivities, ApplicationFailure, defineQuery, setHandler } from '@temporalio/workflow' +import type { createTransferActivities } from './activities' +import type { Hex } from 'viem' + +const { sendUserOpActivity, waitForTransactionReceiptActivity, fetchTransferActivity } = + proxyActivities>({ + startToCloseTimeout: '30 seconds', + }) + +type simulating = { status: 'simulating'; data: { userOp: UserOperation<'v0.7'> } } +type sending = { status: 'sending'; data: { userOp: UserOperation<'v0.7'> } } +type waiting = { status: 'waiting'; data: { hash: string; userOp: UserOperation<'v0.7'> } } +type indexing = { + status: 'indexing' + data: { receipt: GetUserOperationReceiptReturnType; userOp: UserOperation<'v0.7'> } +} +type confirmed = { + status: 'confirmed' + data: { receipt: GetUserOperationReceiptReturnType; userOp: UserOperation<'v0.7'> } +} + +export type transferState = simulating | sending | waiting | indexing | confirmed + +export const getTransferStateQuery = defineQuery('getTransferState') + +export type TransferWorkflowArgs = { + sender: Hex + to: Hex + token?: Hex + amount: string + nonce: string + signature: Hex +} + +export async function TransferWorkflow(args: TransferWorkflowArgs) { + const hash = await sendUserOpActivity(args) + if (!hash) throw ApplicationFailure.nonRetryable('No hash returned from sendUserOperation') + setHandler(getTransferStateQuery, () => ({ status: 'waiting', data: { userOp, hash } })) + const receipt = await waitForTransactionReceiptActivity(hash) + if (!receipt) + throw ApplicationFailure.nonRetryable('No receipt returned from waitForTransactionReceipt') + setHandler(getTransferStateQuery, () => ({ status: 'indexing', data: { userOp, receipt } })) + const transfer = await fetchTransferActivity(receipt.userOpHash) + if (!transfer) throw ApplicationFailure.retryable('Transfer not yet indexed in db') + setHandler(getTransferStateQuery, () => ({ status: 'confirmed', data: { userOp, receipt } })) + return transfer +} From 88c3222adc76b365c35b9cb2a0bc425d365a2b10 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 28 Aug 2024 17:59:18 -0700 Subject: [PATCH 02/58] New trpc route that starts transfer workflow --- packages/api/package.json | 2 + packages/api/src/routers/_app.ts | 2 + packages/api/src/routers/transfer.ts | 104 +++++++++++++++++++++++++++ packages/temporal/package.json | 38 ++++++++++ tilt/apps.Tiltfile | 1 + tilt/deps.Tiltfile | 11 +++ yarn.lock | 2 + 7 files changed, 160 insertions(+) create mode 100644 packages/api/src/routers/transfer.ts create mode 100644 packages/temporal/package.json diff --git a/packages/api/package.json b/packages/api/package.json index 078a480df..48699556b 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -5,7 +5,9 @@ "private": true, "dependencies": { "@my/supabase": "workspace:*", + "@my/temporal": "workspace:*", "@my/wagmi": "workspace:*", + "@my/workflows": "workspace:*", "@supabase/supabase-js": "2.44.2", "@tanstack/react-query": "^5.18.1", "@trpc/client": "11.0.0-next-beta.264", diff --git a/packages/api/src/routers/_app.ts b/packages/api/src/routers/_app.ts index b7f8b326f..311fe3629 100644 --- a/packages/api/src/routers/_app.ts +++ b/packages/api/src/routers/_app.ts @@ -6,6 +6,7 @@ import { distributionRouter } from './distribution' import { tagRouter } from './tag' import { secretShopRouter } from './secretShop' import { sendAccountRouter } from './sendAccount' +import { transferRouter } from './transfer' import { accountRecoveryRouter } from './account-recovery/router' import { referralsRouter } from './referrals' @@ -18,6 +19,7 @@ export const appRouter = createTRPCRouter({ secretShop: secretShopRouter, sendAccount: sendAccountRouter, referrals: referralsRouter, + transfer: transferRouter, }) export type AppRouter = typeof appRouter diff --git a/packages/api/src/routers/transfer.ts b/packages/api/src/routers/transfer.ts new file mode 100644 index 000000000..c616a2df2 --- /dev/null +++ b/packages/api/src/routers/transfer.ts @@ -0,0 +1,104 @@ +import { TRPCError } from '@trpc/server' +import debug from 'debug' +import { z } from 'zod' +import { createTRPCRouter, protectedProcedure } from '../trpc' +import { client } from '@my/temporal/client' +import type { UserOperation } from 'permissionless' +import { TransferWorkflow, type transferState } from '@my/workflows' +import type { coinsDict } from 'app/data/coins' + +const log = debug('api:transfer') + +export const transferRouter = createTRPCRouter({ + withUserOp: protectedProcedure + .input( + z.object({ + userOp: z.custom>(), + token: z.custom(), //@ todo: might be safer to decode the token from the userOp, to ensure we don't apply the wrong token + }) + ) + .mutation(async ({ input: { token, userOp } }) => { + const { sender, nonce } = userOp + try { + const handle = await client.workflow.start(TransferWorkflow, { + taskQueue: 'monorepo', + workflowId: `transfer-workflow-${token}-${sender}-${nonce}`, + args: [userOp], + }) + log('Started transfer handle', handle.workflowId) + // optional: wait for client result + return await handle.workflowId + } catch (error) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: error instanceof Error ? error.message : 'Unknown error', + }) + } + }), + getState: protectedProcedure.input(z.string()).query(async ({ input: workflowId }) => { + try { + const handle = await client.workflow.getHandle(workflowId) + const state = await handle.query('getTransferState') + return state + } catch (error) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: error instanceof Error ? error.message : 'Unknown error', + }) + } + }), + getPending: protectedProcedure + .input( + z.object({ + token: z.custom(), + sender: z.string(), + }) + ) + .query(async ({ input: { token, sender } }) => { + try { + const states: transferState[] = [] + const workflows = await client.workflow.list({ + query: `ExecutionStatus = "Running" AND WorkflowId BETWEEN "transfer-workflow-${token}-${sender}-" AND "transfer-workflow-${token}-${sender}-~"`, + }) + for await (const workflow of workflows) { + const handle = await client.workflow.getHandle(workflow.workflowId) + + const state = await handle.query('getTransferState') + states.push(state) + } + return states + } catch (error) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: error instanceof Error ? error.message : 'Unknown error', + }) + } + }), + getFailed: protectedProcedure + .input( + z.object({ + token: z.custom(), + sender: z.string(), + }) + ) + .query(async ({ input: { token, sender } }) => { + try { + const states: transferState[] = [] + const workflows = await client.workflow.list({ + query: `ExecutionStatus = "Failed" AND WorkflowId BETWEEN "transfer-workflow-${token}-${sender}-" AND "transfer-workflow-${token}-${sender}-~"`, + }) + for await (const workflow of workflows) { + const handle = await client.workflow.getHandle(workflow.workflowId) + const state = await handle.query('getTransferState') + states.push(state) + } + return states + } catch (error) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: error instanceof Error ? error.message : 'Unknown error', + cause: error, + }) + } + }), +}) diff --git a/packages/temporal/package.json b/packages/temporal/package.json new file mode 100644 index 000000000..d47c437c3 --- /dev/null +++ b/packages/temporal/package.json @@ -0,0 +1,38 @@ +{ + "version": "0.0.0", + "name": "@my/temporal", + "type": "module", + "files": [ + "package.json", + "src" + ], + "exports": { + "./payload-converter": { + "types": "./src/payload-converter.ts", + "require": "./build/payload-converter.cjs", + "default": "./src/payload-converter.ts" + }, + "./client": { + "types": "./src/client.ts", + "default": "./src/client.ts" + } + }, + "scripts": { + "lint": "tsc", + "server": "temporal server start-dev --db-filename ./var/temporal.db", + "build": "esbuild --bundle --outfile=build/payload-converter.cjs --target=esnext --platform=node --external:@temporalio/common --external:@bufbuild/protobuf src/payload-converter.ts" + }, + "dependencies": { + "@temporalio/client": "^1.10.1", + "@temporalio/common": "^1.11.1", + "superjson": "^2.2.1" + }, + "peerDependencies": { + "typescript": "^5.5.3" + }, + "devDependencies": { + "esbuild": "^0.23.1", + "temporal": "^0.7.1", + "typescript": "^5.5.3" + } +} diff --git a/tilt/apps.Tiltfile b/tilt/apps.Tiltfile index 054aae685..6ef49b0c5 100644 --- a/tilt/apps.Tiltfile +++ b/tilt/apps.Tiltfile @@ -115,6 +115,7 @@ local_resource( "supabase", "supabase:generate", "wagmi:generate", + "temporal:build", "temporal", ], serve_cmd = "yarn workspace workers start", diff --git a/tilt/deps.Tiltfile b/tilt/deps.Tiltfile index cb48f6ccb..42d24a05f 100644 --- a/tilt/deps.Tiltfile +++ b/tilt/deps.Tiltfile @@ -247,6 +247,17 @@ local_resource( ), ) +local_resource( + name = "temporal:build", + allow_parallel = True, + cmd = "yarn workspace @my/temporal build", + labels = labels, + resource_deps = [ + "yarn:install", + ], + deps = ui_files, +) + local_resource( name = "shovel:generate-config", allow_parallel = True, diff --git a/yarn.lock b/yarn.lock index f120643cc..41208f9f9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6326,7 +6326,9 @@ __metadata: resolution: "@my/api@workspace:packages/api" dependencies: "@my/supabase": "workspace:*" + "@my/temporal": "workspace:*" "@my/wagmi": "workspace:*" + "@my/workflows": "workspace:*" "@supabase/supabase-js": "npm:2.44.2" "@tanstack/react-query": "npm:^5.18.1" "@trpc/client": "npm:11.0.0-next-beta.264" From c8eb1d051fb5b3bcc3ff555e6e992dea9cccc3ff Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Mon, 2 Sep 2024 16:17:18 -0700 Subject: [PATCH 03/58] Encapsulate temporal into a package --- apps/workers/src/worker.ts | 6 +--- .../src/transfer-workflow/supabase.ts | 18 ++++++++-- .../src/transfer-workflow/workflow.ts | 33 +++++++++---------- 3 files changed, 33 insertions(+), 24 deletions(-) diff --git a/apps/workers/src/worker.ts b/apps/workers/src/worker.ts index e9daa07f5..71ec4354b 100644 --- a/apps/workers/src/worker.ts +++ b/apps/workers/src/worker.ts @@ -7,10 +7,6 @@ import { URL, fileURLToPath } from 'node:url' import path from 'node:path' async function run() { - const workflowsPathUrl = new URL( - `../../../packages/workflows/src/all-workflows${path.extname(import.meta.url)}`, - import.meta.url - ) // Step 1: Register Workflows and Activities with the Worker and connect to // the Temporal server. const transferWorker = await Worker.create({ @@ -40,7 +36,7 @@ async function run() { // https://github.com/temporalio/samples-typescript/tree/main/hello-world-mtls // Step 2: Start accepting tasks on the `monorepo` queue - await transferWorker.run() + await worker.run() // You may create multiple Workers in a single process in order to poll on multiple task queues. } diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 8f5bb2c6a..1760f1219 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -1,10 +1,24 @@ +import { log, ApplicationFailure } from '@temporalio/activity' import { hexToBytea } from 'app/utils/hexToBytea' import { supabaseAdmin } from 'app/utils/supabase/admin' -export async function fetchTransfer(hash: `0x${string}`) { - return await supabaseAdmin +export async function isTransferIndexed(hash: `0x${string}`) { + const { data, error } = await supabaseAdmin .from('send_account_transfers') .select('*', { count: 'exact', head: true }) .eq('tx_hash', hexToBytea(hash)) .single() + + if (error) { + if (error.code === 'PGRST116') { + log.info('isTransferIndexedActivity', { error }) + return null + } + throw ApplicationFailure.nonRetryable( + 'Error reading transfer from send_account_transfers column.', + error.code, + error + ) + } + return data !== null } diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index caab455e8..340a210d6 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -1,11 +1,16 @@ import { proxyActivities, ApplicationFailure, defineQuery, setHandler } from '@temporalio/workflow' import type { createTransferActivities } from './activities' -import type { Hex } from 'viem' +import type { UserOperation, GetUserOperationReceiptReturnType } from 'permissionless' -const { sendUserOpActivity, waitForTransactionReceiptActivity, fetchTransferActivity } = - proxyActivities>({ - startToCloseTimeout: '30 seconds', - }) +const { + simulateUserOpActivity, + sendUserOpActivity, + waitForTransactionReceiptActivity, + isTransferIndexedActivity, +} = proxyActivities>({ + // TODO: make this configurable + startToCloseTimeout: '45 seconds', +}) type simulating = { status: 'simulating'; data: { userOp: UserOperation<'v0.7'> } } type sending = { status: 'sending'; data: { userOp: UserOperation<'v0.7'> } } @@ -23,24 +28,18 @@ export type transferState = simulating | sending | waiting | indexing | confirme export const getTransferStateQuery = defineQuery('getTransferState') -export type TransferWorkflowArgs = { - sender: Hex - to: Hex - token?: Hex - amount: string - nonce: string - signature: Hex -} - -export async function TransferWorkflow(args: TransferWorkflowArgs) { - const hash = await sendUserOpActivity(args) +export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { + setHandler(getTransferStateQuery, () => ({ status: 'simulating', data: { userOp } })) + await simulateUserOpActivity(userOp) + setHandler(getTransferStateQuery, () => ({ status: 'sending', data: { userOp } })) + const hash = await sendUserOpActivity(userOp) if (!hash) throw ApplicationFailure.nonRetryable('No hash returned from sendUserOperation') setHandler(getTransferStateQuery, () => ({ status: 'waiting', data: { userOp, hash } })) const receipt = await waitForTransactionReceiptActivity(hash) if (!receipt) throw ApplicationFailure.nonRetryable('No receipt returned from waitForTransactionReceipt') setHandler(getTransferStateQuery, () => ({ status: 'indexing', data: { userOp, receipt } })) - const transfer = await fetchTransferActivity(receipt.userOpHash) + const transfer = await isTransferIndexedActivity(receipt.userOpHash) if (!transfer) throw ApplicationFailure.retryable('Transfer not yet indexed in db') setHandler(getTransferStateQuery, () => ({ status: 'confirmed', data: { userOp, receipt } })) return transfer From c1dca9b6e28a527e0ac89b98fe120652fe0eed33 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 28 Aug 2024 18:00:15 -0700 Subject: [PATCH 04/58] Call transfer trpc mutation from frontend --- packages/api/src/routers/transfer.ts | 8 +- packages/app/features/send/confirm/screen.tsx | 142 ++++++------------ packages/app/features/send/screen.test.tsx | 10 ++ .../app/utils/useUserOpTransferMutation.ts | 1 + 4 files changed, 62 insertions(+), 99 deletions(-) diff --git a/packages/api/src/routers/transfer.ts b/packages/api/src/routers/transfer.ts index c616a2df2..5c7ece373 100644 --- a/packages/api/src/routers/transfer.ts +++ b/packages/api/src/routers/transfer.ts @@ -5,7 +5,7 @@ import { createTRPCRouter, protectedProcedure } from '../trpc' import { client } from '@my/temporal/client' import type { UserOperation } from 'permissionless' import { TransferWorkflow, type transferState } from '@my/workflows' -import type { coinsDict } from 'app/data/coins' +import type { allCoins } from 'app/data/coins' const log = debug('api:transfer') @@ -14,7 +14,7 @@ export const transferRouter = createTRPCRouter({ .input( z.object({ userOp: z.custom>(), - token: z.custom(), //@ todo: might be safer to decode the token from the userOp, to ensure we don't apply the wrong token + token: z.custom(), //@ todo: might be safer to decode the token from the userOp, to ensure we don't apply the wrong token }) ) .mutation(async ({ input: { token, userOp } }) => { @@ -50,7 +50,7 @@ export const transferRouter = createTRPCRouter({ getPending: protectedProcedure .input( z.object({ - token: z.custom(), + token: z.custom(), sender: z.string(), }) ) @@ -77,7 +77,7 @@ export const transferRouter = createTRPCRouter({ getFailed: protectedProcedure .input( z.object({ - token: z.custom(), + token: z.custom(), sender: z.string(), }) ) diff --git a/packages/app/features/send/confirm/screen.tsx b/packages/app/features/send/confirm/screen.tsx index fafe828ce..b35db9063 100644 --- a/packages/app/features/send/confirm/screen.tsx +++ b/packages/app/features/send/confirm/screen.tsx @@ -2,7 +2,6 @@ import { Avatar, Button, ButtonText, - isWeb, Label, LinkableAvatar, Paragraph, @@ -15,42 +14,37 @@ import { type TamaguiElement, type YStackProps, } from '@my/ui' -import { baseMainnet } from '@my/wagmi' +import { baseMainnet, baseMainnetClient, entryPointAddress } from '@my/wagmi' import { useQueryClient } from '@tanstack/react-query' import { IconAccount } from 'app/components/icons' -import { useTokenActivityFeed } from 'app/features/home/utils/useTokenActivityFeed' +import { IconCoin } from 'app/components/icons/IconCoin' import { useSendScreenParams } from 'app/routers/params' import { assert } from 'app/utils/assert' import formatAmount, { localizeAmount } from 'app/utils/formatAmount' -import { hexToBytea } from 'app/utils/hexToBytea' import { useSendAccount } from 'app/utils/send-accounts' import { shorten } from 'app/utils/strings' import { throwIf } from 'app/utils/throwIf' import { useProfileLookup } from 'app/utils/useProfileLookup' import { useUSDCFees } from 'app/utils/useUSDCFees' -import { - useGenerateTransferUserOp, - useUserOpTransferMutation, -} from 'app/utils/useUserOpTransferMutation' +import { useGenerateTransferUserOp } from 'app/utils/useUserOpTransferMutation' import { useAccountNonce } from 'app/utils/userop' -import { - type Activity, - isSendAccountReceiveEvent, - isSendAccountTransfersEvent, -} from 'app/utils/zod/activity' import { useEffect, useRef, useState } from 'react' import { useRouter } from 'solito/router' -import { formatUnits, type Hex, isAddress } from 'viem' +import { formatUnits, isAddress } from 'viem' import { useEstimateFeesPerGas } from 'wagmi' import { useCoin } from 'app/provider/coins' import { useCoinFromSendTokenParam } from 'app/utils/useCoinFromTokenParam' import { allCoinsDict } from 'app/data/coins' -import { IconCoin } from 'app/components/icons/IconCoin' import debug from 'debug' import { useTokenPrices } from 'app/utils/useTokenPrices' const log = debug('app:features:send:confirm:screen') +import { api } from 'app/utils/api' +import { TRPCClientError } from '@trpc/client' +import { getUserOperationHash } from 'permissionless' +import { signUserOp } from 'app/utils/signUserOp' +import { byteaToBase64 } from 'app/utils/byteaToBase64' export function SendConfirmScreen() { const [queryParams] = useSendScreenParams() @@ -81,6 +75,11 @@ export function SendConfirm() { const submitButtonRef = useRef(null) const [queryParams] = useSendScreenParams() const { sendToken, recipient, idType, amount } = queryParams + const { + mutateAsync: transfer, + isPending: isTransferPending, + isError: isTransferError, + } = api.transfer.withUserOp.useMutation() const queryClient = useQueryClient() const { data: sendAccount, isLoading: isSendAccountLoading } = useSendAccount() @@ -100,7 +99,6 @@ export function SendConfirm() { sendAccount?.send_account_credentials .filter((c) => !!c.webauthn_credentials) .map((c) => c.webauthn_credentials as NonNullable) ?? [] - const [sentTxHash, setSentTxHash] = useState() const router = useRouter() @@ -135,21 +133,9 @@ export function SendConfirm() { } = useEstimateFeesPerGas({ chainId: baseMainnet.id, }) - const { - mutateAsync: sendUserOp, - isPending: isTransferPending, - isError: isTransferError, - submittedAt, - } = useUserOpTransferMutation() const [error, setError] = useState() - const { data: transfers, error: tokenActivityError } = useTokenActivityFeed({ - address: sendToken === 'eth' ? undefined : hexToBytea(sendToken), - refetchInterval: sentTxHash ? 1000 : undefined, // refetch every second if we have sent a tx - enabled: !!sentTxHash, - }) - const hasEnoughBalance = selectedCoin?.balance && selectedCoin.balance >= BigInt(amount ?? '0') const gas = usdcFees ? usdcFees.baseFee + usdcFees.gasFees : BigInt(Number.MAX_SAFE_INTEGER) const hasEnoughGas = @@ -194,6 +180,7 @@ export function SendConfirm() { assert(nonce !== undefined, 'Nonce is not available') throwIf(feesPerGasError) assert(!!feesPerGas, 'Fees per gas is not available') + assert(!!profile?.address, 'Could not resolve recipients send account') assert(selectedCoin?.balance >= BigInt(amount ?? '0'), 'Insufficient balance') const sender = sendAccount?.address as `0x${string}` @@ -204,15 +191,38 @@ export function SendConfirm() { maxPriorityFeePerGas: feesPerGas.maxPriorityFeePerGas, } - log('gasEstimate', usdcFees) - log('feesPerGas', feesPerGas) - log('userOp', _userOp) - const receipt = await sendUserOp({ - userOp: _userOp, - webauthnCreds, + console.log('gasEstimate', usdcFees) + console.log('feesPerGas', feesPerGas) + console.log('userOp', _userOp) + const chainId = baseMainnetClient.chain.id + const entryPoint = entryPointAddress[chainId] + const userOpHash = getUserOperationHash({ + userOperation: userOp, + entryPoint, + chainId, + }) + const signature = await signUserOp({ + userOpHash, + allowedCredentials: + webauthnCreds?.map((c) => ({ + id: byteaToBase64(c.raw_credential_id), + userHandle: c.name, + })) ?? [], + }) + userOp.signature = signature + + const { data: workflowId, error } = await transfer({ + token: selectedCoin.token, + userOp, + }).catch((e) => { + console.error("Couldn't send the userOp", e) + if (e instanceof TRPCClientError) { + return { data: undefined, error: { message: e.message } } + } + return { data: undefined, error: { message: e.message } } }) - assert(receipt.success, 'Failed to send user op') - setSentTxHash(receipt.receipt.transactionHash) + console.log('workflowId', workflowId) + console.log('error', error) if (selectedCoin?.token === 'eth') { await ethQuery.refetch() } else { @@ -225,55 +235,6 @@ export function SendConfirm() { } } - useEffect(() => { - if (!submittedAt) return - - if (sentTxHash) { - log('sent tx hash', { sentTxHash }) - const tfr = transfers?.pages.some((page) => - page.some((activity: Activity) => { - if (isSendAccountTransfersEvent(activity)) { - return activity.data.tx_hash === sentTxHash - } - if (isSendAccountReceiveEvent(activity)) { - return activity.data.tx_hash === sentTxHash - } - return false - }) - ) - - if (tokenActivityError) { - console.error(tokenActivityError) - } - - // found the transfer or we waited too long or we got an error 😢 - // or we are sending eth since event logs are not always available for eth - // (when receipient is not a send account or contract) - if (tfr || tokenActivityError || (sentTxHash && sendToken === 'eth')) { - router.replace({ pathname: '/', query: { token: sendToken } }) - } - } - - // create a window unload event on web - const eventHandlersToRemove: (() => void)[] = [] - if (isWeb) { - const unloadHandler = (e: BeforeUnloadEvent) => { - // prevent unload if we have a tx hash or a submitted at - if (submittedAt || sentTxHash) { - e.preventDefault() - } - } - window.addEventListener('beforeunload', unloadHandler) - eventHandlersToRemove.push(() => window.removeEventListener('beforeunload', unloadHandler)) - } - - return () => { - for (const remove of eventHandlersToRemove) { - remove() - } - } - }, [sentTxHash, transfers, router, sendToken, tokenActivityError, submittedAt]) - useEffect(() => { if (submitButtonRef.current) { submitButtonRef.current.focus() @@ -422,7 +383,7 @@ export function SendConfirm() { onPress={onSubmit} br={'$4'} disabledStyle={{ opacity: 0.7, cursor: 'not-allowed', pointerEvents: 'none' }} - disabled={!canSubmit || isTransferPending || !!sentTxHash} + disabled={!canSubmit || isTransferPending} gap={4} py={'$5'} width={'100%'} @@ -444,15 +405,6 @@ export function SendConfirm() { Sending... ) - case sentTxHash !== undefined: - return ( - <> - - - - Confirming... - - ) case !hasEnoughBalance: return Insufficient Balance case !hasEnoughGas: diff --git a/packages/app/features/send/screen.test.tsx b/packages/app/features/send/screen.test.tsx index a894bbbfa..f6f270fe4 100644 --- a/packages/app/features/send/screen.test.tsx +++ b/packages/app/features/send/screen.test.tsx @@ -6,6 +6,16 @@ jest.mock('expo-router', () => ({ usePathname: jest.fn().mockReturnValue('/send'), })) +jest.mock('app/utils/api', () => ({ + transfer: { + withUserOp: jest.fn().mockReturnValue({ + useMutation: jest.fn().mockReturnValue({ + mutateAsync: jest.fn().mockReturnValue(Promise.resolve('123')), + }), + }), + }, +})) + jest.mock('app/provider/coins', () => ({ useCoins: jest.fn().mockReturnValue({ coins: [ diff --git a/packages/app/utils/useUserOpTransferMutation.ts b/packages/app/utils/useUserOpTransferMutation.ts index 204fbfae2..87d5864c9 100644 --- a/packages/app/utils/useUserOpTransferMutation.ts +++ b/packages/app/utils/useUserOpTransferMutation.ts @@ -136,6 +136,7 @@ export async function sendUserOpTransfer({ throwNiceError(e) } } + export function useGenerateTransferUserOp({ sender, to, From 0ea5b0acd7a285358f3ff653597e646061d08040 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 28 Aug 2024 17:57:55 -0700 Subject: [PATCH 05/58] Create transfer temporal workflow --- apps/workers/src/worker.ts | 2 +- .../src/distribution-workflow/activities.ts | 13 +++---------- 2 files changed, 4 insertions(+), 11 deletions(-) diff --git a/apps/workers/src/worker.ts b/apps/workers/src/worker.ts index 71ec4354b..e93cd7599 100644 --- a/apps/workers/src/worker.ts +++ b/apps/workers/src/worker.ts @@ -36,7 +36,7 @@ async function run() { // https://github.com/temporalio/samples-typescript/tree/main/hello-world-mtls // Step 2: Start accepting tasks on the `monorepo` queue - await worker.run() + await transferWorker.run() // You may create multiple Workers in a single process in order to poll on multiple task queues. } diff --git a/packages/workflows/src/distribution-workflow/activities.ts b/packages/workflows/src/distribution-workflow/activities.ts index 1db8f8a34..93b4a3df3 100644 --- a/packages/workflows/src/distribution-workflow/activities.ts +++ b/packages/workflows/src/distribution-workflow/activities.ts @@ -10,6 +10,7 @@ import { } from './supabase' import { fetchAllBalances, isMerkleDropActive } from './wagmi' import { calculatePercentageWithBips, calculateWeights, PERC_DENOM } from './weights' +import { bootstrap } from '@my/workflows/utils' const cpuCount = cpus().length @@ -19,16 +20,8 @@ const inBatches = (array: T[], batchSize = Math.max(8, cpuCount - 1)) => { ) } -export function createDistributionActivities(supabaseUrl: string, supabaseKey: string) { - globalThis.process = globalThis.process || {} - globalThis.process.env.SUPABASE_URL = supabaseUrl // HACK: set the supabase url in the environment - globalThis.process.env.SUPABASE_SERVICE_ROLE = supabaseKey // HACK: set the supabase key in the environment - - return { - calculateDistributionSharesActivity, - fetchDistributionActivity, - fetchAllOpenDistributionsActivity, - } +export function createDistributionActivities(env: Record) { + bootstrap(env) } async function fetchAllOpenDistributionsActivity() { From 22d3d3bfc698266f4e0c66640e465bdec61b0b3e Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Mon, 2 Sep 2024 16:17:18 -0700 Subject: [PATCH 06/58] Encapsulate temporal into a package --- apps/workers/src/worker.ts | 2 +- apps/workers/tsconfig.json | 3 ++- packages/workflows/package.json | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/apps/workers/src/worker.ts b/apps/workers/src/worker.ts index e93cd7599..71ec4354b 100644 --- a/apps/workers/src/worker.ts +++ b/apps/workers/src/worker.ts @@ -36,7 +36,7 @@ async function run() { // https://github.com/temporalio/samples-typescript/tree/main/hello-world-mtls // Step 2: Start accepting tasks on the `monorepo` queue - await transferWorker.run() + await worker.run() // You may create multiple Workers in a single process in order to poll on multiple task queues. } diff --git a/apps/workers/tsconfig.json b/apps/workers/tsconfig.json index 487ce956d..6ad8297a0 100644 --- a/apps/workers/tsconfig.json +++ b/apps/workers/tsconfig.json @@ -21,6 +21,7 @@ "./src", "../../packages/workflows/src", "../../globals.d.ts", - "../../environment.d.ts" + "../../environment.d.ts", + "../../packages/temporal/src" ] } diff --git a/packages/workflows/package.json b/packages/workflows/package.json index ac11b461c..3f52db670 100644 --- a/packages/workflows/package.json +++ b/packages/workflows/package.json @@ -5,7 +5,7 @@ "src" ], "exports": { - "./all-activities": { + "./activities": { "types": "./src/all-activities.ts", "default": "./src/all-activities.ts" }, From 2db18f2edffeb35893d435298a5d3fbdc942cb46 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 28 Aug 2024 17:57:55 -0700 Subject: [PATCH 07/58] Create transfer tempora workflow --- apps/workers/src/worker.ts | 6 +++++- .../workflows/src/transfer-workflow/supabase.ts | 1 - .../workflows/src/transfer-workflow/workflow.ts | 13 ++++--------- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/apps/workers/src/worker.ts b/apps/workers/src/worker.ts index 71ec4354b..e9daa07f5 100644 --- a/apps/workers/src/worker.ts +++ b/apps/workers/src/worker.ts @@ -7,6 +7,10 @@ import { URL, fileURLToPath } from 'node:url' import path from 'node:path' async function run() { + const workflowsPathUrl = new URL( + `../../../packages/workflows/src/all-workflows${path.extname(import.meta.url)}`, + import.meta.url + ) // Step 1: Register Workflows and Activities with the Worker and connect to // the Temporal server. const transferWorker = await Worker.create({ @@ -36,7 +40,7 @@ async function run() { // https://github.com/temporalio/samples-typescript/tree/main/hello-world-mtls // Step 2: Start accepting tasks on the `monorepo` queue - await worker.run() + await transferWorker.run() // You may create multiple Workers in a single process in order to poll on multiple task queues. } diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 1760f1219..4842eea95 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -1,4 +1,3 @@ -import { log, ApplicationFailure } from '@temporalio/activity' import { hexToBytea } from 'app/utils/hexToBytea' import { supabaseAdmin } from 'app/utils/supabase/admin' diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index 340a210d6..d1d5535f9 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -2,15 +2,10 @@ import { proxyActivities, ApplicationFailure, defineQuery, setHandler } from '@t import type { createTransferActivities } from './activities' import type { UserOperation, GetUserOperationReceiptReturnType } from 'permissionless' -const { - simulateUserOpActivity, - sendUserOpActivity, - waitForTransactionReceiptActivity, - isTransferIndexedActivity, -} = proxyActivities>({ - // TODO: make this configurable - startToCloseTimeout: '45 seconds', -}) +const { sendUserOpActivity, waitForTransactionReceiptActivity, fetchTransferActivity } = + proxyActivities>({ + startToCloseTimeout: '30 seconds', + }) type simulating = { status: 'simulating'; data: { userOp: UserOperation<'v0.7'> } } type sending = { status: 'sending'; data: { userOp: UserOperation<'v0.7'> } } From babf28d46b7a68c6cd6fe828a3047d4638fc73c8 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Mon, 2 Sep 2024 16:17:18 -0700 Subject: [PATCH 08/58] Encapsulate temporal into a package --- apps/workers/src/worker.ts | 6 +----- .../workflows/src/transfer-workflow/supabase.ts | 1 + .../workflows/src/transfer-workflow/workflow.ts | 13 +++++++++---- 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/apps/workers/src/worker.ts b/apps/workers/src/worker.ts index e9daa07f5..71ec4354b 100644 --- a/apps/workers/src/worker.ts +++ b/apps/workers/src/worker.ts @@ -7,10 +7,6 @@ import { URL, fileURLToPath } from 'node:url' import path from 'node:path' async function run() { - const workflowsPathUrl = new URL( - `../../../packages/workflows/src/all-workflows${path.extname(import.meta.url)}`, - import.meta.url - ) // Step 1: Register Workflows and Activities with the Worker and connect to // the Temporal server. const transferWorker = await Worker.create({ @@ -40,7 +36,7 @@ async function run() { // https://github.com/temporalio/samples-typescript/tree/main/hello-world-mtls // Step 2: Start accepting tasks on the `monorepo` queue - await transferWorker.run() + await worker.run() // You may create multiple Workers in a single process in order to poll on multiple task queues. } diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 4842eea95..1760f1219 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -1,3 +1,4 @@ +import { log, ApplicationFailure } from '@temporalio/activity' import { hexToBytea } from 'app/utils/hexToBytea' import { supabaseAdmin } from 'app/utils/supabase/admin' diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index d1d5535f9..340a210d6 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -2,10 +2,15 @@ import { proxyActivities, ApplicationFailure, defineQuery, setHandler } from '@t import type { createTransferActivities } from './activities' import type { UserOperation, GetUserOperationReceiptReturnType } from 'permissionless' -const { sendUserOpActivity, waitForTransactionReceiptActivity, fetchTransferActivity } = - proxyActivities>({ - startToCloseTimeout: '30 seconds', - }) +const { + simulateUserOpActivity, + sendUserOpActivity, + waitForTransactionReceiptActivity, + isTransferIndexedActivity, +} = proxyActivities>({ + // TODO: make this configurable + startToCloseTimeout: '45 seconds', +}) type simulating = { status: 'simulating'; data: { userOp: UserOperation<'v0.7'> } } type sending = { status: 'sending'; data: { userOp: UserOperation<'v0.7'> } } From 3bb089fff2dfc88e7e88e6bab83f17840aac10f1 Mon Sep 17 00:00:00 2001 From: Beezy Date: Tue, 1 Oct 2024 03:10:34 +0000 Subject: [PATCH 09/58] Temporal cloud changes --- Makefile | 62 +++++++++++ apps/next/Dockerfile | 1 + apps/next/package.json | 2 +- apps/next/tsconfig.json | 3 +- apps/workers/src/client.ts | 4 +- apps/workers/src/worker.ts | 56 +++++----- package.json | 2 +- .../src/routers/account-recovery/router.ts | 2 +- packages/api/src/routers/transfer.ts | 24 ++-- packages/api/tsconfig.json | 3 +- packages/temporal/package.json | 4 + packages/temporal/src/client.ts | 51 +++++++++ packages/temporal/src/payload-converter.ts | 7 ++ .../src/superjson-payload-converter.ts | 83 ++++++++++++++ packages/workflows/package.json | 8 ++ .../src/transfer-workflow/activities.ts | 105 +++++++++--------- .../src/transfer-workflow/supabase.ts | 1 + .../workflows/src/transfer-workflow/wagmi.ts | 28 +++-- .../src/transfer-workflow/workflow.ts | 14 ++- packages/workflows/tsconfig.json | 3 +- yarn.lock | 61 +++++----- 21 files changed, 382 insertions(+), 142 deletions(-) create mode 100644 Makefile create mode 100644 packages/temporal/src/client.ts create mode 100644 packages/temporal/src/payload-converter.ts create mode 100644 packages/temporal/src/superjson-payload-converter.ts diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..a1563864a --- /dev/null +++ b/Makefile @@ -0,0 +1,62 @@ +# Makefile + +# Check if .env.local exists, if not, create it from template +$(shell test -f .env.local || cp .env.local.template .env.local) +include .env.local + +# Export variables from .env.local if not already set in the environment +define read_env + $(eval export $(shell sed -ne 's/ *#.*$$//; /./ s/=.*$$//; s/^/export /; s/$$/?=$$\(shell grep -m1 "^&=" .env.local | cut -d= -f2-\)/' .env.local)) +endef + +$(call read_env) + +IMAGE_NAME = sendapp/next-app +GIT_BRANCH = $(shell git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD) +GIT_HASH = $(shell git rev-parse --short=10 HEAD) +NEXT_COMPOSE_IMAGE = "${IMAGE_NAME}-${GIT_BRANCH}-${GIT_HASH}" +DOCKERFILE_PATH = ./apps/next/Dockerfile +BUILD_CONTEXT = . + +# Docker build arguments +BUILD_ARGS = \ + --build-arg CI=${CI} \ + --build-arg DEBUG=${DEBUG} \ + --build-arg NEXT_PUBLIC_SUPABASE_PROJECT_ID=${NEXT_PUBLIC_SUPABASE_PROJECT_ID} \ + --build-arg NEXT_PUBLIC_URL=${NEXT_PUBLIC_URL} \ + --build-arg NEXT_PUBLIC_SUPABASE_URL=${NEXT_PUBLIC_SUPABASE_URL} \ + --build-arg NEXT_PUBLIC_SUPABASE_GRAPHQL_URL=${NEXT_PUBLIC_SUPABASE_GRAPHQL_URL} \ + --build-arg NEXT_PUBLIC_SUPABASE_ANON_KEY=${NEXT_PUBLIC_SUPABASE_ANON_KEY} \ + --build-arg NEXT_PUBLIC_MAINNET_RPC_URL=${NEXT_PUBLIC_MAINNET_RPC_URL} \ + --build-arg NEXT_PUBLIC_BASE_RPC_URL=${NEXT_PUBLIC_BASE_RPC_URL} \ + --build-arg NEXT_PUBLIC_BUNDLER_RPC_URL=${NEXT_PUBLIC_BUNDLER_RPC_URL} \ + --build-arg NEXT_PUBLIC_MAINNET_CHAIN_ID=${NEXT_PUBLIC_MAINNET_CHAIN_ID} \ + --build-arg NEXT_PUBLIC_BASE_CHAIN_ID=${NEXT_PUBLIC_BASE_CHAIN_ID} \ + --build-arg NEXT_PUBLIC_WALLETCONNECT_PROJECT_ID=${NEXT_PUBLIC_WALLETCONNECT_PROJECT_ID} \ + --build-arg NEXT_PUBLIC_TURNSTILE_SITE_KEY=${NEXT_PUBLIC_TURNSTILE_SITE_KEY} + +# Docker secrets +SECRETS = \ + --secret id=SUPABASE_DB_URL \ + --secret id=SUPABASE_SERVICE_ROLE \ + --secret id=TURBO_TOKEN \ + --secret id=TURBO_TEAM + + +# Targets +.PHONY: docker-web docker-web-push +docker-web: + @if [ -z "$$(docker images -q $(NEXT_COMPOSE_IMAGE))" ]; then \ + echo "Image $(NEXT_COMPOSE_IMAGE) does not exist locally. Building..."; \ + docker buildx build --progress=plain --platform linux/amd64 -t $(IMAGE_NAME)-$(GIT_BRANCH):$(GIT_HASH) -t $(IMAGE_NAME)-$(GIT_BRANCH):latest $(BUILD_ARGS) $(SECRETS) -f $(DOCKERFILE_PATH) $(BUILD_CONTEXT) ;\ + else \ + echo "Image $(NEXT_COMPOSE_IMAGE) already exists locally. Skipping build."; \ + fi + +docker-web-push: docker-web + docker push $(IMAGE_NAME)-$(GIT_BRANCH):$(GIT_HASH) + docker push $(IMAGE_NAME)-$(GIT_BRANCH):latest + +# Prune docker system images and containers older than 7 days != otterscan +docker-clean: + docker image prune -f --filter "label!=otterscan*" --filter until=168h diff --git a/apps/next/Dockerfile b/apps/next/Dockerfile index 8356a291e..35e7685ee 100644 --- a/apps/next/Dockerfile +++ b/apps/next/Dockerfile @@ -32,6 +32,7 @@ COPY packages/eslint-config-custom/package.json packages/eslint-config-custom/pa COPY packages/playwright/package.json packages/playwright/package.json COPY packages/shovel/package.json packages/shovel/package.json COPY packages/snaplet/package.json packages/snaplet/package.json +COPY packages/temporal/package.json packages/temporal/package.json COPY packages/ui/package.json packages/ui/package.json COPY packages/wagmi/package.json packages/wagmi/package.json COPY packages/webauthn-authenticator/package.json packages/webauthn-authenticator/package.json diff --git a/apps/next/package.json b/apps/next/package.json index 65c27f771..f8cd50bd3 100644 --- a/apps/next/package.json +++ b/apps/next/package.json @@ -11,7 +11,7 @@ "serve": "NODE_ENV=production yarn with-env next start --port 8151", "lint": "next lint", "lint:fix": "next lint --fix", - "with-env": "TAMAGUI_TARGET=web dotenv -e ../../.env -c --" + "with-env": "TAMAGUI_TARGET=web dotenv -e ../../.env.localtemp -c --" }, "dependencies": { "@my/api": "workspace:*", diff --git a/apps/next/tsconfig.json b/apps/next/tsconfig.json index 4191fcaf3..204f3e3c7 100644 --- a/apps/next/tsconfig.json +++ b/apps/next/tsconfig.json @@ -30,7 +30,8 @@ "pathToApp": "." } ], - "types": ["node"] + "types": ["node"], + "sourceMap": true }, "include": [ "next-env.d.ts", diff --git a/apps/workers/src/client.ts b/apps/workers/src/client.ts index f3eb01622..6fe55a7ab 100644 --- a/apps/workers/src/client.ts +++ b/apps/workers/src/client.ts @@ -25,13 +25,13 @@ import type { UserOperation } from 'permissionless' // return result // } -async function runTransferWorkflow(userOp: UserOperation<'v0.7'>) { +export async function runTransferWorkflow(userOp: UserOperation<'v0.7'>) { const connection = await Connection.connect() const client = new Client({ connection, }) - const handle = await client.workflow.start(TransferWorkflow, { + const handle = await client.workflow.start(SendTransferWorkflow, { taskQueue: 'monorepo', workflowId: `transfers-workflow-${userOp.sender}-${userOp.nonce.toString()}`, // TODO: remember to replace this with a meaningful business ID args: [userOp], diff --git a/apps/workers/src/worker.ts b/apps/workers/src/worker.ts index 71ec4354b..151de0bbc 100644 --- a/apps/workers/src/worker.ts +++ b/apps/workers/src/worker.ts @@ -1,25 +1,41 @@ -import { Worker } from '@temporalio/worker' +import { Worker, NativeConnection } from '@temporalio/worker' import { createTransferActivities, createDistributionActivities, } from '@my/workflows/all-activities' -import { URL, fileURLToPath } from 'node:url' -import path from 'node:path' +import fs from 'node:fs/promises' +import { createRequire } from 'node:module' +import { dataConverter } from '@my/temporal/payload-converter' +const require = createRequire(import.meta.url) + +const { NODE_ENV = 'development' } = process.env +const isDeployed = ['production', 'staging'].includes(NODE_ENV) async function run() { - // Step 1: Register Workflows and Activities with the Worker and connect to - // the Temporal server. - const transferWorker = await Worker.create({ - workflowsPath: fileURLToPath(workflowsPathUrl), + const connection = isDeployed + ? await NativeConnection.connect({ + address: `${process.env.TEMPORAL_NAMESPACE}.tmprl.cloud:7233`, + tls: { + clientCertPair: { + crt: await fs.readFile(process.env.TEMPORAL_MTLS_TLS_CERT ?? '').catch((e) => { + console.error(e) + throw new Error('no cert found. Check the TEMPORAL_MTLS_TLS_CERT env var') + }), + key: await fs.readFile(process.env.TEMPORAL_MTLS_TLS_KEY ?? '').catch((e) => { + console.error(e) + throw new Error('no key found. Check the TEMPORAL_MTLS_TLS_KEY env var') + }), + }, + }, + }) + : undefined + + const worker = await Worker.create({ + connection, + dataConverter: dataConverter, + workflowsPath: require.resolve('@my/workflows'), activities: { - ...createTransferActivities( - process.env.NEXT_PUBLIC_SUPABASE_URL, - process.env.SUPABASE_SERVICE_ROLE - ), - // ...createDistributionActivities( - // process.env.NEXT_PUBLIC_SUPABASE_URL, - // process.env.SUPABASE_SERVICE_ROLE - // ), + ...createTransferActivities(process.env), }, namespace: 'default', taskQueue: 'monorepo', @@ -28,17 +44,7 @@ async function run() { }, }) - // Worker connects to localhost by default and uses console.error for logging. - // Customize the Worker by passing more options to create(): - // https://typescript.temporal.io/api/classes/worker.Worker - - // If you need to configure server connection parameters, see the mTLS example: - // https://github.com/temporalio/samples-typescript/tree/main/hello-world-mtls - - // Step 2: Start accepting tasks on the `monorepo` queue await worker.run() - - // You may create multiple Workers in a single process in order to poll on multiple task queues. } run().catch((err) => { diff --git a/package.json b/package.json index a8bc7bc9b..71b32a100 100644 --- a/package.json +++ b/package.json @@ -56,7 +56,7 @@ "check-dependency-version-consistency": "^3.0.3", "eslint": "^8.46.0", "node-gyp": "^9.3.1", - "turbo": "^2.0.3", + "turbo": "^2.1.2", "typescript": "^5.5.3" }, "packageManager": "yarn@4.3.1", diff --git a/packages/api/src/routers/account-recovery/router.ts b/packages/api/src/routers/account-recovery/router.ts index 2c0247908..95ec44556 100644 --- a/packages/api/src/routers/account-recovery/router.ts +++ b/packages/api/src/routers/account-recovery/router.ts @@ -50,7 +50,7 @@ export const accountRecoveryRouter = createTRPCRouter({ .single() if (challengeError || !challengeData) { - logger(`getChallenge:cant-insert-challenge: [${challengeError}]`) + logger(`getChallenge:cant-insert-challenge: [${JSON.stringify(challengeError)}]`) throw new TRPCError({ code: 'INTERNAL_SERVER_ERROR', message: formatErr('Cannot generate challenge: Internal server error'), diff --git a/packages/api/src/routers/transfer.ts b/packages/api/src/routers/transfer.ts index 5c7ece373..be70743cc 100644 --- a/packages/api/src/routers/transfer.ts +++ b/packages/api/src/routers/transfer.ts @@ -2,8 +2,9 @@ import { TRPCError } from '@trpc/server' import debug from 'debug' import { z } from 'zod' import { createTRPCRouter, protectedProcedure } from '../trpc' -import { client } from '@my/temporal/client' +import { getTemporalClient } from '@my/temporal/client' import type { UserOperation } from 'permissionless' + import { TransferWorkflow, type transferState } from '@my/workflows' import type { allCoins } from 'app/data/coins' @@ -20,14 +21,14 @@ export const transferRouter = createTRPCRouter({ .mutation(async ({ input: { token, userOp } }) => { const { sender, nonce } = userOp try { + const client = await getTemporalClient() const handle = await client.workflow.start(TransferWorkflow, { taskQueue: 'monorepo', - workflowId: `transfer-workflow-${token}-${sender}-${nonce}`, + workflowId: `send-transfer-workflow-${token}-${sender}-${nonce}`, args: [userOp], }) - log('Started transfer handle', handle.workflowId) - // optional: wait for client result - return await handle.workflowId + log(`Workflow Created: ${handle.workflowId}`) + return handle.workflowId } catch (error) { throw new TRPCError({ code: 'INTERNAL_SERVER_ERROR', @@ -37,7 +38,8 @@ export const transferRouter = createTRPCRouter({ }), getState: protectedProcedure.input(z.string()).query(async ({ input: workflowId }) => { try { - const handle = await client.workflow.getHandle(workflowId) + const client = await getTemporalClient() + const handle = client.workflow.getHandle(workflowId) const state = await handle.query('getTransferState') return state } catch (error) { @@ -57,11 +59,12 @@ export const transferRouter = createTRPCRouter({ .query(async ({ input: { token, sender } }) => { try { const states: transferState[] = [] - const workflows = await client.workflow.list({ + const client = await getTemporalClient() + const workflows = client.workflow.list({ query: `ExecutionStatus = "Running" AND WorkflowId BETWEEN "transfer-workflow-${token}-${sender}-" AND "transfer-workflow-${token}-${sender}-~"`, }) for await (const workflow of workflows) { - const handle = await client.workflow.getHandle(workflow.workflowId) + const handle = client.workflow.getHandle(workflow.workflowId) const state = await handle.query('getTransferState') states.push(state) @@ -84,11 +87,12 @@ export const transferRouter = createTRPCRouter({ .query(async ({ input: { token, sender } }) => { try { const states: transferState[] = [] - const workflows = await client.workflow.list({ + const client = await getTemporalClient() + const workflows = client.workflow.list({ query: `ExecutionStatus = "Failed" AND WorkflowId BETWEEN "transfer-workflow-${token}-${sender}-" AND "transfer-workflow-${token}-${sender}-~"`, }) for await (const workflow of workflows) { - const handle = await client.workflow.getHandle(workflow.workflowId) + const handle = client.workflow.getHandle(workflow.workflowId) const state = await handle.query('getTransferState') states.push(state) } diff --git a/packages/api/tsconfig.json b/packages/api/tsconfig.json index 845dd7939..d2e8cdc17 100644 --- a/packages/api/tsconfig.json +++ b/packages/api/tsconfig.json @@ -9,7 +9,8 @@ "app/*": ["../app/*"], "@my/wagmi": ["../wagmi/src"], "@my/wagmi/*": ["../wagmi/src/*"], - "@my/supabase/*": ["../../supabase/*"] + "@my/supabase/*": ["../../supabase/*"], + "@my/workflows": ["./packages/workflows/src/all-workflows.ts"] } }, "references": [] diff --git a/packages/temporal/package.json b/packages/temporal/package.json index d47c437c3..ce6dd2db2 100644 --- a/packages/temporal/package.json +++ b/packages/temporal/package.json @@ -15,6 +15,10 @@ "./client": { "types": "./src/client.ts", "default": "./src/client.ts" + }, + "./superjson-payload-converter": { + "types": "./src/superjson-payload-converter.ts", + "default": "./src/superjson-payload-converter.ts" } }, "scripts": { diff --git a/packages/temporal/src/client.ts b/packages/temporal/src/client.ts new file mode 100644 index 000000000..298224e21 --- /dev/null +++ b/packages/temporal/src/client.ts @@ -0,0 +1,51 @@ +import { Client, Connection } from '@temporalio/client' +import { dataConverter } from './payload-converter' +import { createRequire } from 'node:module' +const require = createRequire(import.meta.url) +import debug from 'debug' +import fs from 'node:fs/promises' +const { NODE_ENV = 'development' } = process.env +const isDeployed = ['production', 'staging'].includes(NODE_ENV) + +const log = debug('api:temporal') +log(`connecting to temporal: ${process.env.TEMPORAL_NAMESPACE} with NODE_ENV: ${NODE_ENV}`) + +let connectionOptions = {} +if (isDeployed) { + connectionOptions = { + address: `${process.env.TEMPORAL_NAMESPACE}.tmprl.cloud:7233`, + tls: { + clientCertPair: { + crt: await fs.readFile(process.env.TEMPORAL_MTLS_TLS_CERT ?? '').catch((e) => { + console.error(e) + throw new Error('no cert found. Check the TEMPORAL_MTLS_TLS_CERT env var') + }), + key: await fs.readFile(process.env.TEMPORAL_MTLS_TLS_KEY ?? '').catch((e) => { + console.error(e) + throw new Error('no key found. Check the TEMPORAL_MTLS_TLS_KEY env var') + }), + }, + }, + } +} + +let client: Client | null = null + +export async function getTemporalClient(): Promise { + if (!client) { + const connection = await Connection.connect(connectionOptions) + client = new Client({ + connection, + namespace: process.env.TEMPORAL_NAMESPACE ?? 'default', + dataConverter: dataConverter, + }) + } + return client +} + +export async function closeTemporalClient(): Promise { + if (client) { + await client.connection.close() + client = null + } +} diff --git a/packages/temporal/src/payload-converter.ts b/packages/temporal/src/payload-converter.ts new file mode 100644 index 000000000..fc1cd19d6 --- /dev/null +++ b/packages/temporal/src/payload-converter.ts @@ -0,0 +1,7 @@ +import { CompositePayloadConverter, UndefinedPayloadConverter } from '@temporalio/common' +import { SuperjsonPayloadConverter } from './superjson-payload-converter' + +export const payloadConverter = new CompositePayloadConverter( + new UndefinedPayloadConverter(), + new SuperjsonPayloadConverter() +) diff --git a/packages/temporal/src/superjson-payload-converter.ts b/packages/temporal/src/superjson-payload-converter.ts new file mode 100644 index 000000000..3ab6367d4 --- /dev/null +++ b/packages/temporal/src/superjson-payload-converter.ts @@ -0,0 +1,83 @@ +import { + type EncodingType, + METADATA_ENCODING_KEY, + type Payload, + type PayloadConverterWithEncoding, + PayloadConverterError, +} from '@temporalio/common' +import superjson from 'superjson' +import { decode, encode } from '@temporalio/common/lib/encoding' + +/** + * Converts between values and [SUPERJSON](https://github.com/flightcontrolhq/superjson) Payloads. + */ +export class SuperjsonPayloadConverter implements PayloadConverterWithEncoding { + // Use 'json/plain' so that Payloads are displayed in the UI + public encodingType = 'json/plain' as EncodingType + + public toPayload(value: unknown): Payload | undefined { + if (value === undefined) return undefined + let sjson = '' + try { + sjson = superjson.stringify(value) + } catch (e) { + throw new UnsupportedSuperjsonTypeError( + `Can't run SUPERJSON.stringify on this value: ${value}. Either convert it (or its properties) to SUPERJSON-serializable values (see https://github.com/flightcontrolhq/superjson#readme ), or create a custom data converter. SJSON.stringify error message: ${errorMessage( + e + )}`, + e as Error + ) + } + + return { + metadata: { + [METADATA_ENCODING_KEY]: encode('json/plain'), + // Include an additional metadata field to indicate that this is an SuperJSON payload + format: encode('extended'), + }, + data: encode(sjson), + } + } + + public fromPayload(content: Payload): T { + try { + if (!content.data) { + throw new UnsupportedSuperjsonTypeError( + `Can't run SUPERJSON.parse on this value: ${content.data}. Either convert it (or its properties) to SUPERJSON-serializable values (see https://github.com/flightcontrolhq/superjson#readme ), or create a custom data converter. No data found in payload.` + ) + } + return superjson.parse(decode(content.data)) + } catch (e) { + throw new UnsupportedSuperjsonTypeError( + `Can't run SUPERJSON.parse on this value: ${ + content.data + }. Either convert it (or its properties) to SUPERJSON-serializable values (see https://github.com/flightcontrolhq/superjson#readme ), or create a custom data converter. SJSON.parse error message: ${errorMessage( + e + )}`, + e as Error + ) + } + } +} + +export class UnsupportedSuperjsonTypeError extends PayloadConverterError { + public readonly name: string = 'UnsupportedJsonTypeError' + + constructor( + message: string | undefined, + public readonly cause?: Error + ) { + super(message ?? undefined) + } +} +// @@@SNIPEND + +export function errorMessage(error: unknown): string | undefined { + if (typeof error === 'string') { + return error + } + if (error instanceof Error) { + return error.message + } + return undefined +} diff --git a/packages/workflows/package.json b/packages/workflows/package.json index 3f52db670..af90b30c5 100644 --- a/packages/workflows/package.json +++ b/packages/workflows/package.json @@ -9,9 +9,17 @@ "types": "./src/all-activities.ts", "default": "./src/all-activities.ts" }, + "./utils": { + "types": "./src/utils/index.ts", + "default": "./src/utils/index.ts" + }, "./all-workflows": { "types": "./src/all-workflows.ts", "default": "./src/all-workflows.ts" + }, + "/": { + "types": "./src/all-workflows.ts", + "default": "./src/all-workflows.ts" } }, "type": "module", diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index cd662fd78..67581eb37 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -1,58 +1,60 @@ import { log, ApplicationFailure } from '@temporalio/activity' -import { fetchTransfer } from './supabase' -import { - simulateUserOperation, - sendUserOperation, - waitForTransactionReceipt, - generateTransferUserOp, -} from './wagmi' -import type { TransferWorkflowArgs } from './workflow' -import { isAddress, type Hex } from 'viem' -export function createTransferActivities(supabaseUrl: string, supabaseKey: string) { - globalThis.process = globalThis.process || {} - globalThis.process.env.SUPABASE_URL = supabaseUrl // HACK: set the supabase url in the environment - globalThis.process.env.SUPABASE_SERVICE_ROLE = supabaseKey // HACK: set the supabase key in the environment +import { isTransferIndexed } from './supabase' +import { simulateUserOperation, sendUserOperation, waitForTransactionReceipt } from './wagmi' +import type { UserOperation } from 'permissionless' +import { bootstrap } from '@my/workflows/utils' + +export const createTransferActivities = (env: Record) => { + bootstrap(env) return { + simulateUserOpActivity, sendUserOpActivity, - fetchTransferActivity, waitForTransactionReceiptActivity, + isTransferIndexedActivity, } } - -async function sendUserOpActivity(args: TransferWorkflowArgs) { - const { sender, to, token, amount, nonce } = args - const parsedAmount = BigInt(amount) - const parsedNonce = BigInt(nonce) - if (!!sender && !isAddress(sender)) - throw ApplicationFailure.nonRetryable('Invalid send account address') - if (!!to && !isAddress(to)) throw ApplicationFailure.nonRetryable('Invalid to address') - if (!token || !isAddress(token)) throw ApplicationFailure.nonRetryable('Invalid token address') - if (typeof parsedAmount !== 'bigint' || parsedAmount <= 0n) - throw ApplicationFailure.nonRetryable('Invalid amount') - if (typeof parsedNonce !== 'bigint' || parsedNonce < 0n) - throw ApplicationFailure.nonRetryable('Invalid nonce') +async function simulateUserOpActivity(userOp: UserOperation<'v0.7'>) { + if (!userOp.signature) { + throw ApplicationFailure.nonRetryable('UserOp signature is required') + } try { - const userOp = await generateTransferUserOp({ - sender, - to, - token, - amount: parsedAmount, - nonce: parsedNonce, - }) - userOp.signature = args.signature - console.log('userOp: ', userOp) + await simulateUserOperation(userOp) + } catch (error) { + throw ApplicationFailure.nonRetryable('Error simulating user operation', error.code, error) + } +} + +async function sendUserOpActivity(userOp: UserOperation<'v0.7'>) { + const creationTime = Date.now() + try { const hash = await sendUserOperation(userOp) - console.log('hash: ', hash) - log.info('sendUserOperationActivity', { hash, userOp }) + log.info('UserOperation sent', { + hash, + sendTime: Date.now(), + userOp: JSON.stringify(userOp, null, 2), + }) return hash } catch (error) { - throw ApplicationFailure.nonRetryable('Error sending user operation', error.code, error) + const errorMessage = + error instanceof Error ? `${error.name}: ${error.message}` : 'Unknown error occurred' + + log.error('Error in sendUserOpActivity', { + error: errorMessage, + creationTime, + sendTime: Date.now(), + userOp: JSON.stringify(userOp, null, 2), + }) + + throw ApplicationFailure.nonRetryable(errorMessage) } } async function waitForTransactionReceiptActivity(hash: `0x${string}`) { + if (!hash) { + throw ApplicationFailure.nonRetryable('Invalid hash: hash is undefined') + } try { const receipt = await waitForTransactionReceipt(hash) if (!receipt.success) @@ -60,23 +62,16 @@ async function waitForTransactionReceiptActivity(hash: `0x${string}`) { log.info('waitForTransactionReceiptActivity', { receipt }) return receipt } catch (error) { - throw ApplicationFailure.nonRetryable('Error waiting for tx receipt', error.code, error) + const errorMessage = error instanceof Error ? error.message : String(error) + log.error('Error in waitForTransactionReceiptActivity', { hash, error: errorMessage }) + throw ApplicationFailure.nonRetryable('Error waiting for tx receipt', errorMessage) } } - -async function fetchTransferActivity(hash: `0x${string}`) { - const { data: transfer, error } = await fetchTransfer(hash) - if (error) { - if (error.code === 'PGRST116') { - log.info('fetchTransferActivity', { error }) - return null - } - throw ApplicationFailure.nonRetryable( - 'Error fetching transfer from activity column.', - error.code, - error - ) +async function isTransferIndexedActivity(hash: `0x${string}`) { + const isIndexed = await isTransferIndexed(hash) + log.info('isTransferIndexedActivity', { isIndexed }) + if (!isIndexed) { + throw ApplicationFailure.retryable('Transfer not yet indexed in db') } - log.info('fetchTransferActivity', { transfer }) - return transfer + return isIndexed } diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 1760f1219..94c0352e0 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -9,6 +9,7 @@ export async function isTransferIndexed(hash: `0x${string}`) { .eq('tx_hash', hexToBytea(hash)) .single() + log.info('isTransferIndexed', { count, error, status, statusText }) if (error) { if (error.code === 'PGRST116') { log.info('isTransferIndexedActivity', { error }) diff --git a/packages/workflows/src/transfer-workflow/wagmi.ts b/packages/workflows/src/transfer-workflow/wagmi.ts index c009fde4b..eefab60ac 100644 --- a/packages/workflows/src/transfer-workflow/wagmi.ts +++ b/packages/workflows/src/transfer-workflow/wagmi.ts @@ -1,13 +1,7 @@ import { log, ApplicationFailure } from '@temporalio/activity' import type { UserOperation } from 'permissionless' -import { - baseMainnetBundlerClient, - baseMainnetClient, - sendAccountAbi, - tokenPaymasterAddress, - entryPointAddress, -} from '@my/wagmi' -import { encodeFunctionData, erc20Abi, isAddress, type Hex } from 'viem' +import { baseMainnetBundlerClient, baseMainnetClient, entryPointAddress } from '@my/wagmi' +import type { Hex } from 'viem' /** * default user op with preset gas values that work will probably need to move this to the database. @@ -46,10 +40,20 @@ export async function simulateUserOperation(userOp: UserOperation<'v0.7'>) { } export async function sendUserOperation(userOp: UserOperation<'v0.7'>) { - const hash = await baseMainnetBundlerClient.sendUserOperation({ - userOperation: userOp, - }) - return hash + log.info('Sending UserOperation', { userOp: JSON.stringify(userOp, null, 2) }) + try { + const hash = await baseMainnetBundlerClient.sendUserOperation({ + userOperation: userOp, + }) + log.info('UserOperation sent successfully', { hash }) + return hash + } catch (error) { + log.error('Error in sendUserOperation', { + error: error instanceof Error ? error.message : String(error), + userOp: JSON.stringify(userOp, null, 2), + }) + throw error + } } export async function waitForTransactionReceipt(hash: `0x${string}`) { diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index 340a210d6..e1b423434 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -1,6 +1,10 @@ import { proxyActivities, ApplicationFailure, defineQuery, setHandler } from '@temporalio/workflow' import type { createTransferActivities } from './activities' import type { UserOperation, GetUserOperationReceiptReturnType } from 'permissionless' +import debug from 'debug' +import superjson from 'superjson' + +const log = debug('workflows:transfer') const { simulateUserOpActivity, @@ -21,8 +25,8 @@ type indexing = { } type confirmed = { status: 'confirmed' - data: { receipt: GetUserOperationReceiptReturnType; userOp: UserOperation<'v0.7'> } -} + receipt: GetUserOperationReceiptReturnType | boolean +} & BaseState export type transferState = simulating | sending | waiting | indexing | confirmed @@ -30,17 +34,23 @@ export const getTransferStateQuery = defineQuery('getTransferStat export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { setHandler(getTransferStateQuery, () => ({ status: 'simulating', data: { userOp } })) + log('SendTransferWorkflow started with userOp:', JSON.stringify(parsedUserOp, null, 2)) await simulateUserOpActivity(userOp) + log('Simulation completed') setHandler(getTransferStateQuery, () => ({ status: 'sending', data: { userOp } })) + log('Sending UserOperation') const hash = await sendUserOpActivity(userOp) if (!hash) throw ApplicationFailure.nonRetryable('No hash returned from sendUserOperation') + log('UserOperation sent, hash:', hash) setHandler(getTransferStateQuery, () => ({ status: 'waiting', data: { userOp, hash } })) const receipt = await waitForTransactionReceiptActivity(hash) if (!receipt) throw ApplicationFailure.nonRetryable('No receipt returned from waitForTransactionReceipt') + log('Receipt received:', superjson.stringify(receipt)) setHandler(getTransferStateQuery, () => ({ status: 'indexing', data: { userOp, receipt } })) const transfer = await isTransferIndexedActivity(receipt.userOpHash) if (!transfer) throw ApplicationFailure.retryable('Transfer not yet indexed in db') + log('Transfer indexed:', superjson.stringify(transfer)) setHandler(getTransferStateQuery, () => ({ status: 'confirmed', data: { userOp, receipt } })) return transfer } diff --git a/packages/workflows/tsconfig.json b/packages/workflows/tsconfig.json index 8da7fcc8d..074ea6248 100644 --- a/packages/workflows/tsconfig.json +++ b/packages/workflows/tsconfig.json @@ -10,7 +10,8 @@ "app/*": ["../app/*"], "@my/wagmi": ["../wagmi/src"], "@my/wagmi/*": ["../wagmi/src/*"], - "@my/api/*": ["../api/src/*"] + "@my/api/*": ["../api/src/*"], + "@my/workflows": ["./packages/workflows/src/all-workflows.ts"] } }, "include": [ diff --git a/yarn.lock b/yarn.lock index 41208f9f9..471416289 100644 --- a/yarn.lock +++ b/yarn.lock @@ -31681,7 +31681,8 @@ __metadata: eslint: "npm:^8.46.0" lefthook: "npm:^1.5.5" node-gyp: "npm:^9.3.1" - turbo: "npm:^2.0.3" + snaplet: "npm:^0.42.1" + turbo: "npm:^2.1.2" typescript: "npm:^5.5.3" zx: "npm:^8.1.2" languageName: unknown @@ -34109,58 +34110,58 @@ __metadata: languageName: node linkType: hard -"turbo-darwin-64@npm:2.0.4": - version: 2.0.4 - resolution: "turbo-darwin-64@npm:2.0.4" +"turbo-darwin-64@npm:2.1.3": + version: 2.1.3 + resolution: "turbo-darwin-64@npm:2.1.3" conditions: os=darwin & cpu=x64 languageName: node linkType: hard -"turbo-darwin-arm64@npm:2.0.4": - version: 2.0.4 - resolution: "turbo-darwin-arm64@npm:2.0.4" +"turbo-darwin-arm64@npm:2.1.3": + version: 2.1.3 + resolution: "turbo-darwin-arm64@npm:2.1.3" conditions: os=darwin & cpu=arm64 languageName: node linkType: hard -"turbo-linux-64@npm:2.0.4": - version: 2.0.4 - resolution: "turbo-linux-64@npm:2.0.4" +"turbo-linux-64@npm:2.1.3": + version: 2.1.3 + resolution: "turbo-linux-64@npm:2.1.3" conditions: os=linux & cpu=x64 languageName: node linkType: hard -"turbo-linux-arm64@npm:2.0.4": - version: 2.0.4 - resolution: "turbo-linux-arm64@npm:2.0.4" +"turbo-linux-arm64@npm:2.1.3": + version: 2.1.3 + resolution: "turbo-linux-arm64@npm:2.1.3" conditions: os=linux & cpu=arm64 languageName: node linkType: hard -"turbo-windows-64@npm:2.0.4": - version: 2.0.4 - resolution: "turbo-windows-64@npm:2.0.4" +"turbo-windows-64@npm:2.1.3": + version: 2.1.3 + resolution: "turbo-windows-64@npm:2.1.3" conditions: os=win32 & cpu=x64 languageName: node linkType: hard -"turbo-windows-arm64@npm:2.0.4": - version: 2.0.4 - resolution: "turbo-windows-arm64@npm:2.0.4" +"turbo-windows-arm64@npm:2.1.3": + version: 2.1.3 + resolution: "turbo-windows-arm64@npm:2.1.3" conditions: os=win32 & cpu=arm64 languageName: node linkType: hard -"turbo@npm:^2.0.3": - version: 2.0.4 - resolution: "turbo@npm:2.0.4" - dependencies: - turbo-darwin-64: "npm:2.0.4" - turbo-darwin-arm64: "npm:2.0.4" - turbo-linux-64: "npm:2.0.4" - turbo-linux-arm64: "npm:2.0.4" - turbo-windows-64: "npm:2.0.4" - turbo-windows-arm64: "npm:2.0.4" +"turbo@npm:^2.1.2": + version: 2.1.3 + resolution: "turbo@npm:2.1.3" + dependencies: + turbo-darwin-64: "npm:2.1.3" + turbo-darwin-arm64: "npm:2.1.3" + turbo-linux-64: "npm:2.1.3" + turbo-linux-arm64: "npm:2.1.3" + turbo-windows-64: "npm:2.1.3" + turbo-windows-arm64: "npm:2.1.3" dependenciesMeta: turbo-darwin-64: optional: true @@ -34176,7 +34177,7 @@ __metadata: optional: true bin: turbo: bin/turbo - checksum: 10/689b54d58c04ef04c81ade5f91edbab0805ec270d55f8d878f6958024e216ec06a82bea3246e117d631f408e3c2b5dba3e5d58df0fba80470c231cfd5d698793 + checksum: 10/b8e90a38f47dc5c07e5f1c0bd708f9dc6b00b744847a45c06e5de5a5379a32bb155e8ad994eb03e60f697afc87f0815dd02fc680e22c0fad83d65c0a1fb6fc96 languageName: node linkType: hard From 10a0edbd3111d9dae0661f8e5e1922eccbb29d60 Mon Sep 17 00:00:00 2001 From: Beezy Date: Tue, 1 Oct 2024 03:13:39 +0000 Subject: [PATCH 10/58] remove debug env --- apps/next/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/next/package.json b/apps/next/package.json index f8cd50bd3..65c27f771 100644 --- a/apps/next/package.json +++ b/apps/next/package.json @@ -11,7 +11,7 @@ "serve": "NODE_ENV=production yarn with-env next start --port 8151", "lint": "next lint", "lint:fix": "next lint --fix", - "with-env": "TAMAGUI_TARGET=web dotenv -e ../../.env.localtemp -c --" + "with-env": "TAMAGUI_TARGET=web dotenv -e ../../.env -c --" }, "dependencies": { "@my/api": "workspace:*", From 3acabfa65cca59e1ae4dfd70647d81572d4b7546 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 11 Sep 2024 16:04:47 -0700 Subject: [PATCH 11/58] Show pending transfers from temporal in UI --- .gitignore | 2 + CONTRIBUTING.md | 6 +- apps/workers/package.json | 6 +- apps/workers/src/client.ts | 4 +- apps/workers/src/worker.ts | 25 +- apps/workers/tsconfig.json | 10 +- biome.json | 4 +- environment.d.ts | 7 + packages/api/package.json | 2 +- packages/api/src/routers/transfer.ts | 6 +- packages/api/tsconfig.json | 15 +- .../app/features/home/TokenActivityRow.tsx | 94 +- .../__snapshots__/TokenDetails.test.tsx.snap | 2528 +++++++++++------ packages/app/features/home/screen.tsx | 2 +- .../utils/__mocks__/useTokenActivityFeed.ts | 21 +- .../home/utils/usePendingTransfers.ts | 22 + .../home/utils/useTokenActivityFeed.ts | 65 +- packages/app/features/send/confirm/screen.tsx | 66 +- packages/app/package.json | 2 +- packages/app/tsconfig.json | 10 +- packages/app/utils/decodeTransferUserOp.ts | 23 + packages/app/utils/signUserOp.ts | 2 +- packages/playwright/tsconfig.json | 10 +- packages/temporal/.gitignore | 6 + packages/temporal/build/payload-converter.cjs | 882 ++++++ packages/temporal/package.json | 6 +- packages/temporal/src/client.ts | 6 +- packages/temporal/tsconfig.json | 13 + packages/workflows/.gitignore | 2 + packages/workflows/README.md | 16 + packages/workflows/package.json | 13 +- .../src/distribution-workflow/activities.ts | 546 ++-- .../src/scripts/build-workflow-bundle.ts | 21 + .../src/transfer-workflow/activities.ts | 100 +- .../src/transfer-workflow/supabase.ts | 11 +- .../workflows/src/transfer-workflow/wagmi.ts | 8 +- .../src/transfer-workflow/workflow.ts | 32 +- packages/workflows/src/utils/bootstrap.ts | 30 + packages/workflows/src/utils/index.ts | 1 + packages/workflows/tsconfig.json | 11 +- tilt/apps.Tiltfile | 1 + tilt/deps.Tiltfile | 10 + tsconfig.base.json | 1 + tsconfig.json | 2 + yarn.lock | 332 ++- 45 files changed, 3691 insertions(+), 1291 deletions(-) create mode 100644 packages/app/features/home/utils/usePendingTransfers.ts create mode 100644 packages/app/utils/decodeTransferUserOp.ts create mode 100644 packages/temporal/.gitignore create mode 100644 packages/temporal/build/payload-converter.cjs create mode 100644 packages/temporal/tsconfig.json create mode 100644 packages/workflows/src/scripts/build-workflow-bundle.ts create mode 100644 packages/workflows/src/utils/bootstrap.ts create mode 100644 packages/workflows/src/utils/index.ts diff --git a/.gitignore b/.gitignore index d215fe6d4..710033e73 100644 --- a/.gitignore +++ b/.gitignore @@ -65,3 +65,5 @@ Brewfile.lock.json # asdf .tool-versions + +var/** \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 91a65df6e..127c4f2d5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -36,9 +36,13 @@ Here is a quick peek at the send stack. Quickly jump to any of the submodules by │   ├── daimo-expo-passkeys │   ├── eslint-config-customs │   ├── playwright +| ├── shovel +│   ├── snaplet +| ├── temporal │   ├── ui │   ├── wagmi -│   └── webauthn-authenticator +| ├── webauthn-authenticator +│   └── workflows └── supabase diff --git a/apps/workers/package.json b/apps/workers/package.json index 069f7b071..fce90a4b1 100644 --- a/apps/workers/package.json +++ b/apps/workers/package.json @@ -8,11 +8,13 @@ ], "scripts": { "lint": "tsc", - "start": "node --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\"));' src/worker.ts", - "workflow": "node --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\"));' src/client.ts" + "start": "yarn with-env node --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\"));' src/worker.ts", + "workflow": "yarn with-env node --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\"));' src/client.ts", + "with-env": "dotenv -e ../../.env -c --" }, "devDependencies": { "@types/bun": "^1.1.6", + "dotenv-cli": "^7.3.0", "ts-node": "^10.9.2", "typescript": "^5.5.3" }, diff --git a/apps/workers/src/client.ts b/apps/workers/src/client.ts index 6fe55a7ab..7b7be2fed 100644 --- a/apps/workers/src/client.ts +++ b/apps/workers/src/client.ts @@ -1,5 +1,5 @@ import { Connection, Client } from '@temporalio/client' -import { TransferWorkflow } from '@my/workflows/workflows' +import { TransferWorkflow } from '@my/workflows/all-workflows' import type { UserOperation } from 'permissionless' // async function runDistributionWorkflow() { @@ -31,7 +31,7 @@ export async function runTransferWorkflow(userOp: UserOperation<'v0.7'>) { connection, }) - const handle = await client.workflow.start(SendTransferWorkflow, { + const handle = await client.workflow.start(TransferWorkflow, { taskQueue: 'monorepo', workflowId: `transfers-workflow-${userOp.sender}-${userOp.nonce.toString()}`, // TODO: remember to replace this with a meaningful business ID args: [userOp], diff --git a/apps/workers/src/worker.ts b/apps/workers/src/worker.ts index 151de0bbc..6a9fa3351 100644 --- a/apps/workers/src/worker.ts +++ b/apps/workers/src/worker.ts @@ -1,16 +1,21 @@ -import { Worker, NativeConnection } from '@temporalio/worker' -import { - createTransferActivities, - createDistributionActivities, -} from '@my/workflows/all-activities' +import { Worker, NativeConnection, bundleWorkflowCode } from '@temporalio/worker' +import { createTransferActivities } from '@my/workflows/all-activities' import fs from 'node:fs/promises' import { createRequire } from 'node:module' -import { dataConverter } from '@my/temporal/payload-converter' const require = createRequire(import.meta.url) const { NODE_ENV = 'development' } = process.env const isDeployed = ['production', 'staging'].includes(NODE_ENV) +const workflowOption = () => + isDeployed + ? { + workflowBundle: { + codePath: require.resolve('@my/workflows/workflow-bundle'), + }, + } + : { workflowsPath: require.resolve('@my/workflows/all-workflows') } + async function run() { const connection = isDeployed ? await NativeConnection.connect({ @@ -32,12 +37,14 @@ async function run() { const worker = await Worker.create({ connection, - dataConverter: dataConverter, - workflowsPath: require.resolve('@my/workflows'), + dataConverter: { + payloadConverterPath: require.resolve('@my/temporal/payload-converter'), + }, + ...workflowOption(), activities: { ...createTransferActivities(process.env), }, - namespace: 'default', + namespace: process.env.TEMPORAL_NAMESPACE ?? 'default', taskQueue: 'monorepo', bundlerOptions: { ignoreModules: ['@supabase/supabase-js'], diff --git a/apps/workers/tsconfig.json b/apps/workers/tsconfig.json index 6ad8297a0..ad6707fd2 100644 --- a/apps/workers/tsconfig.json +++ b/apps/workers/tsconfig.json @@ -12,16 +12,18 @@ "composite": true, "baseUrl": ".", "paths": { - "@my/workflows": ["../../packages/workflows/src/*"], - "@my/workflows/*": ["../../packages/workflows/src/*"] + "@my/workflows": ["../../packages/workflows/src/all-workflows.ts"], + "@my/workflows/*": ["../../packages/workflows/src/*"], + "@my/temporal": ["../../packages/temporal/src"], + "@my/temporal/*": ["../../packages/temporal/src/*"] } }, "references": [], "include": [ "./src", "../../packages/workflows/src", + "../../packages/temporal/src", "../../globals.d.ts", - "../../environment.d.ts", - "../../packages/temporal/src" + "../../environment.d.ts" ] } diff --git a/biome.json b/biome.json index 1cd9d848e..cc7f7359d 100644 --- a/biome.json +++ b/biome.json @@ -23,7 +23,9 @@ "./packages/app/components/img/**", "packages/shovel/etc/config.json", "./supabase/.temp/**", - "./packages/contracts/var/*.json" + "./packages/contracts/var/*.json", + "**/tsconfig.json", + "**/*.tsconfig.json" ] }, "organizeImports": { diff --git a/environment.d.ts b/environment.d.ts index 198b896e9..81c708568 100644 --- a/environment.d.ts +++ b/environment.d.ts @@ -16,6 +16,13 @@ declare global { NEXT_PUBLIC_SUPABASE_PROJECT_ID: string NEXT_PUBLIC_SUPABASE_GRAPHQL_URL: string NEXT_PUBLIC_MAINNET_RPC_URL: string + /** + * The URL of the ERC 4337 Account Abstraction Bundler RPC endpoint + */ + BUNDLER_RPC_URL: string + /** + * The URL of the ERC 4337 Account Abstraction Bundler RPC endpoint + */ NEXT_PUBLIC_BASE_RPC_URL: string NEXT_PUBLIC_BUNDLER_RPC_URL: string SUPABASE_DB_URL: string diff --git a/packages/api/package.json b/packages/api/package.json index 48699556b..b0a87f8fe 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -21,7 +21,7 @@ "ms": "^2.1.3", "p-queue": "^8.0.1", "permissionless": "^0.1.14", - "superjson": "^1.13.1", + "superjson": "^2.2.1", "viem": "^2.19.0", "zod": "^3.23.8" }, diff --git a/packages/api/src/routers/transfer.ts b/packages/api/src/routers/transfer.ts index be70743cc..5b3b79230 100644 --- a/packages/api/src/routers/transfer.ts +++ b/packages/api/src/routers/transfer.ts @@ -4,8 +4,7 @@ import { z } from 'zod' import { createTRPCRouter, protectedProcedure } from '../trpc' import { getTemporalClient } from '@my/temporal/client' import type { UserOperation } from 'permissionless' - -import { TransferWorkflow, type transferState } from '@my/workflows' +import { TransferWorkflow, type transferState } from '@my/workflows/all-workflows' import type { allCoins } from 'app/data/coins' const log = debug('api:transfer') @@ -24,7 +23,7 @@ export const transferRouter = createTRPCRouter({ const client = await getTemporalClient() const handle = await client.workflow.start(TransferWorkflow, { taskQueue: 'monorepo', - workflowId: `send-transfer-workflow-${token}-${sender}-${nonce}`, + workflowId: `transfer-workflow-${token}-${sender}-${nonce}`, args: [userOp], }) log(`Workflow Created: ${handle.workflowId}`) @@ -65,6 +64,7 @@ export const transferRouter = createTRPCRouter({ }) for await (const workflow of workflows) { const handle = client.workflow.getHandle(workflow.workflowId) + console.log('handle: ', handle) const state = await handle.query('getTransferState') states.push(state) diff --git a/packages/api/tsconfig.json b/packages/api/tsconfig.json index d2e8cdc17..6b58d1e2d 100644 --- a/packages/api/tsconfig.json +++ b/packages/api/tsconfig.json @@ -1,6 +1,14 @@ { "extends": "../../tsconfig.base", - "include": ["src", "../app", "../wagmi/src", "../ui/src", "../../supabase"], + "include": [ + "src", + "../app", + "../wagmi/src", + "../ui/src", + "../../supabase", + "../workflows/src", + "../temporal/src" + ], "compilerOptions": { "noEmit": true, "composite": true, @@ -10,7 +18,10 @@ "@my/wagmi": ["../wagmi/src"], "@my/wagmi/*": ["../wagmi/src/*"], "@my/supabase/*": ["../../supabase/*"], - "@my/workflows": ["./packages/workflows/src/all-workflows.ts"] + "@my/workflows": ["../workflows/src/all-workflows.ts"], + "@my/workflows/*": ["../workflows/src/*"], + "@my/temporal": ["../temporal/src"], + "@my/temporal/*": ["../temporal/src/*"] } }, "references": [] diff --git a/packages/app/features/home/TokenActivityRow.tsx b/packages/app/features/home/TokenActivityRow.tsx index 2be625064..bf229715c 100644 --- a/packages/app/features/home/TokenActivityRow.tsx +++ b/packages/app/features/home/TokenActivityRow.tsx @@ -1,4 +1,4 @@ -import { Paragraph, Text, XStack, YStack } from '@my/ui' +import { Avatar, LinkableAvatar, Spinner, Paragraph, Text, XStack, YStack, Stack } from '@my/ui' import { amountFromActivity, eventNameFromActivity, subtextFromActivity } from 'app/utils/activity' import { isSendAccountReceiveEvent, @@ -8,9 +8,14 @@ import { import { ActivityAvatar } from '../activity/ActivityAvatar' import { CommentsTime } from 'app/utils/dateHelper' import { Link } from 'solito/link' - +import type { CoinWithBalance } from 'app/data/coins' import { useUser } from 'app/utils/useUser' import { useHoverStyles } from 'app/utils/useHoverStyles' +import type { transferState } from '@my/workflows' +import { sendAccountAbi, erc20Abi } from '@my/wagmi' +import { decodeFunctionData, formatUnits } from 'viem' +import { useProfileLookup } from 'app/utils/useProfileLookup' +import formatAmount from 'app/utils/formatAmount' export function TokenActivityRow({ activity, @@ -109,3 +114,88 @@ export function TokenActivityRow({ ) } + +export function PendingTransferActivityRow({ + coin, + state, +}: { coin: CoinWithBalance; state: transferState }) { + const { userOp } = state + const { args } = decodeFunctionData({ abi: sendAccountAbi, data: userOp.callData }) + + const decodedTokenTransfer = + args?.[0]?.[0].data !== '0x' + ? decodeFunctionData({ abi: erc20Abi, data: args?.[0]?.[0].data }) + : undefined + + const amount = decodedTokenTransfer + ? formatUnits(decodedTokenTransfer.args[1] as bigint, coin.decimals) + : formatAmount(formatUnits(args?.[0]?.[0].value, 18), 5, 5) + + const to = decodedTokenTransfer ? decodedTokenTransfer.args[0] : args?.[0]?.[0].dest + + const { data: profile } = useProfileLookup('address', to) + + return ( + + + + + + + + + + + + + + Sending... + + + {`${amount} ${coin.symbol}`} + + + + + {profile?.name ?? profile?.tag ?? profile?.sendid} + + + + + + + ) +} diff --git a/packages/app/features/home/__snapshots__/TokenDetails.test.tsx.snap b/packages/app/features/home/__snapshots__/TokenDetails.test.tsx.snap index 4e6fc9b4a..b4cb0f2ff 100644 --- a/packages/app/features/home/__snapshots__/TokenDetails.test.tsx.snap +++ b/packages/app/features/home/__snapshots__/TokenDetails.test.tsx.snap @@ -238,36 +238,45 @@ exports[`TokenDetails renders correctly 1`] = ` } } > - - + - Activity - + } + testID="TokenDetailsHistory" + > -
-
-
+ + + - - - - - - - - - - - - - - - Withdraw - - - 10 USDC - - - - - 0x93F...761a - - - 7 mon ago - - + /> -
-
+ - - - - - - - - - - - - - - + - - - Deposit - - - 20 USDC - - - - - 0xa71...0000 - - - 7 mon ago - - - - + } + suppressHighlighting={true} + > + 10 USDC + -
-
- - + - - - - - - - - - - - - - - - + } + suppressHighlighting={true} + > + 7 mon ago + + + + + + + + + + + + + + + - - - Received - - - 30 USDC - - - - - - - /alice - - - - - 7 mon ago - - + /> -
-
-
-
-
+
+ + + + Deposit + + + 20 USDC + + + + + 0xa71...0000 + + + 7 mon ago + + + + + + + + + + + + + + + + + + + + + + + + + Received + + + 30 USDC + + + + + + /alice + + + + 7 mon ago + + + + + + + + + "backgroundColor": "#111f22", + "borderBottomLeftRadius": 16, + "borderBottomRightRadius": 16, + "borderTopLeftRadius": 16, + "borderTopRightRadius": 16, + "flexDirection": "column", + "paddingBottom": 7, + "paddingLeft": 7, + "paddingRight": 7, + "paddingTop": 7, + "position": "relative", + } + } + testID="TokenDetailsHistory" + > + + + + + + + + + + + + + + + + + + + Withdraw + + + 10 USDC + + + + + 0x93F...761a + + + 7 mon ago + + + + + + + + + + + + + + + + + + + + + + + + + Deposit + + + 20 USDC + + + + + 0xa71...0000 + + + 7 mon ago + + + + + + + + + + + + + + + + + + + + + + + + + + + Received + + + 30 USDC + + + + + + + /alice + + + + + 7 mon ago + + + + + + + diff --git a/packages/app/features/home/screen.tsx b/packages/app/features/home/screen.tsx index 9c588a0b2..3f9394fff 100644 --- a/packages/app/features/home/screen.tsx +++ b/packages/app/features/home/screen.tsx @@ -137,7 +137,7 @@ export function HomeScreen() { No send account found ) - case search !== undefined: + case search !== undefined: //@todo remove this return default: return ( diff --git a/packages/app/features/home/utils/__mocks__/useTokenActivityFeed.ts b/packages/app/features/home/utils/__mocks__/useTokenActivityFeed.ts index 59f2b667f..a0d33a661 100644 --- a/packages/app/features/home/utils/__mocks__/useTokenActivityFeed.ts +++ b/packages/app/features/home/utils/__mocks__/useTokenActivityFeed.ts @@ -1,5 +1,6 @@ import { SendAccountTransfersEventSchema } from 'app/utils/zod/activity' import { mockUsdcTransfers } from './mock-usdc-transfers' +import { hexToBytea } from 'app/utils/hexToBytea' const tokenTransfersByLogAddr = { '\\x833589fcd6edb6e08f4c7c32d4f71b54bda02913': mockUsdcTransfers.map((t) => @@ -7,15 +8,23 @@ const tokenTransfersByLogAddr = { ), } -const mockUseTokenActivityFeed = jest.fn(({ address }) => { - const pages = tokenTransfersByLogAddr[address] +const mockUseTokenActivityFeed = jest.fn(({ token }) => { + const logAddress = hexToBytea(token) + const pages = tokenTransfersByLogAddr[logAddress] if (!pages) throw new Error('No pages found') return { - data: { - pages: [tokenTransfersByLogAddr[address]], + pendingTransfers: { + data: [], //@todo maybe writes some mock data for temporal? + isLoading: false, + error: null, + }, + activityFeed: { + data: { + pages: [tokenTransfersByLogAddr[logAddress]], + }, + isLoading: false, + error: null, }, - isLoading: false, - error: null, } }) export const useTokenActivityFeed = mockUseTokenActivityFeed diff --git a/packages/app/features/home/utils/usePendingTransfers.ts b/packages/app/features/home/utils/usePendingTransfers.ts new file mode 100644 index 000000000..09a21d4c7 --- /dev/null +++ b/packages/app/features/home/utils/usePendingTransfers.ts @@ -0,0 +1,22 @@ +import type { Address } from 'viem' +import type { allCoins } from 'app/data/coins' +import { api } from 'app/utils/api' + +/** + * Fetch Pending transfers by token and send account address + */ +export function usePendingTransfers(params: { + address: Address + token: allCoins[number]['token'] + refetchInterval?: number + enabled?: boolean +}) { + const { address, token, refetchInterval, enabled } = params + return api.transfer.getPending.useQuery( + { token, sender: address }, + { + refetchInterval, + enabled, + } + ) +} diff --git a/packages/app/features/home/utils/useTokenActivityFeed.ts b/packages/app/features/home/utils/useTokenActivityFeed.ts index 87eeb10a7..33b7d385c 100644 --- a/packages/app/features/home/utils/useTokenActivityFeed.ts +++ b/packages/app/features/home/utils/useTokenActivityFeed.ts @@ -1,20 +1,18 @@ -import type { PgBytea } from '@my/supabase/database.types' import { sendTokenV0LockboxAddress, tokenPaymasterAddress } from '@my/wagmi' -import type { PostgrestError } from '@supabase/postgrest-js' -import { - useInfiniteQuery, - type InfiniteData, - type UseInfiniteQueryResult, -} from '@tanstack/react-query' +import { useInfiniteQuery } from '@tanstack/react-query' import { pgAddrCondValues } from 'app/utils/pgAddrCondValues' import { squish } from 'app/utils/strings' import { useSupabase } from 'app/utils/supabase/useSupabase' import { throwIf } from 'app/utils/throwIf' import { EventArraySchema, Events, type Activity } from 'app/utils/zod/activity' -import type { ZodError } from 'zod' +import { usePendingTransfers } from './usePendingTransfers' +import type { Address } from 'viem' +import type { allCoins } from 'app/data/coins' /** - * Infinite query to fetch ERC-20 token activity feed. + * Returns two hooks + * 1. useTokenActivityFeed - Infinite query to fetch ERC-20 token activity feed. + * 2. usePendingTransfers - Returns a list from temporal of pending transfers for the given address and token * * @note does not support ETH transfers. Need to add another shovel integration to handle ETH receives, and another one for ETH sends * @@ -22,11 +20,12 @@ import type { ZodError } from 'zod' */ export function useTokenActivityFeed(params: { pageSize?: number - address?: PgBytea + address: Address + token: allCoins[number]['token'] refetchInterval?: number enabled?: boolean -}): UseInfiniteQueryResult, PostgrestError | ZodError> { - const { pageSize = 10, address, refetchInterval = 30_000, enabled = true } = params +}) { + const { pageSize = 10, token, address, refetchInterval = 30_000, enabled = true } = params const supabase = useSupabase() async function fetchTokenActivityFeed({ pageParam }: { pageParam: number }): Promise { @@ -66,21 +65,29 @@ export function useTokenActivityFeed(params: { return EventArraySchema.parse(data) } - return useInfiniteQuery({ - queryKey: ['token_activity_feed', address], - initialPageParam: 0, - getNextPageParam: (lastPage, _allPages, lastPageParam) => { - if (lastPage !== null && lastPage.length < pageSize) return undefined - return lastPageParam + 1 - }, - getPreviousPageParam: (_firstPage, _allPages, firstPageParam) => { - if (firstPageParam <= 1) { - return undefined - } - return firstPageParam - 1 - }, - queryFn: fetchTokenActivityFeed, - refetchInterval, - enabled, - }) + return { + pendingTransfers: usePendingTransfers({ + address: address, + token, + refetchInterval, + enabled, + }), + activityFeed: useInfiniteQuery({ + queryKey: ['token_activity_feed', token], + initialPageParam: 0, + getNextPageParam: (lastPage, _allPages, lastPageParam) => { + if (lastPage !== null && lastPage.length < pageSize) return undefined + return lastPageParam + 1 + }, + getPreviousPageParam: (_firstPage, _allPages, firstPageParam) => { + if (firstPageParam <= 1) { + return undefined + } + return firstPageParam - 1 + }, + queryFn: fetchTokenActivityFeed, + refetchInterval, + enabled, + }), + } } diff --git a/packages/app/features/send/confirm/screen.tsx b/packages/app/features/send/confirm/screen.tsx index b35db9063..abf2785d3 100644 --- a/packages/app/features/send/confirm/screen.tsx +++ b/packages/app/features/send/confirm/screen.tsx @@ -30,7 +30,7 @@ import { useGenerateTransferUserOp } from 'app/utils/useUserOpTransferMutation' import { useAccountNonce } from 'app/utils/userop' import { useEffect, useRef, useState } from 'react' import { useRouter } from 'solito/router' -import { formatUnits, isAddress } from 'viem' +import { formatUnits, isAddress, zeroAddress } from 'viem' import { useEstimateFeesPerGas } from 'wagmi' import { useCoin } from 'app/provider/coins' import { useCoinFromSendTokenParam } from 'app/utils/useCoinFromTokenParam' @@ -41,10 +41,10 @@ import { useTokenPrices } from 'app/utils/useTokenPrices' const log = debug('app:features:send:confirm:screen') import { api } from 'app/utils/api' -import { TRPCClientError } from '@trpc/client' import { getUserOperationHash } from 'permissionless' import { signUserOp } from 'app/utils/signUserOp' import { byteaToBase64 } from 'app/utils/byteaToBase64' +import { usePendingTransfers } from 'app/features/home/utils/usePendingTransfers' export function SendConfirmScreen() { const [queryParams] = useSendScreenParams() @@ -73,17 +73,27 @@ export function SendConfirmScreen() { export function SendConfirm() { const submitButtonRef = useRef(null) + const router = useRouter() const [queryParams] = useSendScreenParams() const { sendToken, recipient, idType, amount } = queryParams - const { - mutateAsync: transfer, - isPending: isTransferPending, - isError: isTransferError, - } = api.transfer.withUserOp.useMutation() - - const queryClient = useQueryClient() const { data: sendAccount, isLoading: isSendAccountLoading } = useSendAccount() const { coin: selectedCoin, tokensQuery, ethQuery } = useCoinFromSendTokenParam() + + const { mutateAsync: transfer } = api.transfer.withUserOp.useMutation() + const { data: pendingTransfers, isLoading: isPendingTransfersLoading } = usePendingTransfers({ + address: sendAccount?.address ?? zeroAddress, + token: sendToken, + }) + + const [workflowId, setWorkflowId] = useState() + + useEffect(() => { + if (workflowId) { + router.replace({ pathname: '/', query: { token: sendToken } }) + } + }, [workflowId, router, sendToken]) + + const queryClient = useQueryClient() const isUSDCSelected = selectedCoin?.label === 'USDC' const { coin: usdc } = useCoin('USDC') const { data: prices, isLoading: isPricesLoading } = useTokenPrices() @@ -100,8 +110,6 @@ export function SendConfirm() { .filter((c) => !!c.webauthn_credentials) .map((c) => c.webauthn_credentials as NonNullable) ?? [] - const router = useRouter() - const { data: nonce, error: nonceError, @@ -115,7 +123,8 @@ export function SendConfirm() { to: profile?.address ?? recipient, token: sendToken === 'eth' ? undefined : sendToken, amount: BigInt(queryParams.amount ?? '0'), - nonce: nonce, + nonce: + nonce && pendingTransfers !== undefined ? nonce + BigInt(pendingTransfers.length) : nonce, }) const { @@ -211,18 +220,8 @@ export function SendConfirm() { }) userOp.signature = signature - const { data: workflowId, error } = await transfer({ - token: selectedCoin.token, - userOp, - }).catch((e) => { - console.error("Couldn't send the userOp", e) - if (e instanceof TRPCClientError) { - return { data: undefined, error: { message: e.message } } - } - return { data: undefined, error: { message: e.message } } - }) - console.log('workflowId', workflowId) - console.log('error', error) + const workflowId = await transfer({ userOp, token: sendToken }) + setWorkflowId(workflowId) if (selectedCoin?.token === 'eth') { await ethQuery.refetch() } else { @@ -241,7 +240,13 @@ export function SendConfirm() { } }, []) - if (isSendAccountLoading || nonceIsLoading || isProfileLoading) + useEffect(() => { + if (submitButtonRef.current) { + submitButtonRef.current.focus() + } + }, []) + + if (nonceIsLoading || isProfileLoading || isSendAccountLoading || isPendingTransfersLoading) return return ( @@ -383,7 +388,7 @@ export function SendConfirm() { onPress={onSubmit} br={'$4'} disabledStyle={{ opacity: 0.7, cursor: 'not-allowed', pointerEvents: 'none' }} - disabled={!canSubmit || isTransferPending} + disabled={!canSubmit || !!workflowId} gap={4} py={'$5'} width={'100%'} @@ -396,15 +401,6 @@ export function SendConfirm() { ) - case isTransferPending && !isTransferError: - return ( - <> - - - - Sending... - - ) case !hasEnoughBalance: return Insufficient Balance case !hasEnoughGas: diff --git a/packages/app/package.json b/packages/app/package.json index 15b332eb3..8206dd9a5 100644 --- a/packages/app/package.json +++ b/packages/app/package.json @@ -60,7 +60,7 @@ "react-native-svg": "15.2.0", "react-use-precision-timer": "^3.5.5", "solito": "^4.0.1", - "superjson": "^1.13.1", + "superjson": "^2.2.1", "viem": "^2.19.0", "wagmi": "^2.13.1", "zod": "^3.23.8" diff --git a/packages/app/tsconfig.json b/packages/app/tsconfig.json index f00c7bf5c..c7bc498e5 100644 --- a/packages/app/tsconfig.json +++ b/packages/app/tsconfig.json @@ -21,7 +21,9 @@ "./.eslintrc.cjs", "../../supabase", "../../globals.d.ts", - "../../environment.d.ts" + "../../environment.d.ts", + "../temporal/src", + "../workflows/src" ], "exclude": ["coverage"], "compilerOptions": { @@ -40,7 +42,11 @@ "@my/contracts/out/*": ["../contracts/out/*"], "@my/supabase": ["../../supabase"], "@my/supabase/*": ["../../supabase/*"], - "@daimo/expo-passkeys/*": ["../daimo-expo-passkeys/src/*"] + "@daimo/expo-passkeys/*": ["../daimo-expo-passkeys/src/*"], + "@my/temporal": ["../temporal/src"], + "@my/temporal/*": ["../temporal/src/*"], + "@my/workflows": ["../workflows/src/all-workflows.ts"], + "@my/workflows/*": ["../workflows/src/*"] } }, "references": [] diff --git a/packages/app/utils/decodeTransferUserOp.ts b/packages/app/utils/decodeTransferUserOp.ts new file mode 100644 index 000000000..2856c654e --- /dev/null +++ b/packages/app/utils/decodeTransferUserOp.ts @@ -0,0 +1,23 @@ +import { decodeFunctionData } from 'viem' +import { sendAccountAbi, erc20Abi } from '@my/wagmi' +import type { UserOperation } from 'permissionless' +import type { coinsDict } from 'app/data/coins' + +export function decodeTransferUserOp({ userOp }: { userOp: UserOperation<'v0.7'> }) { + const { args } = decodeFunctionData({ abi: sendAccountAbi, data: userOp.callData }) + + const decodedTokenTransfer = + args?.[0]?.[0].data !== '0x' + ? decodeFunctionData({ abi: erc20Abi, data: args?.[0]?.[0].data }) + : undefined + + const amount = ( + decodedTokenTransfer ? decodedTokenTransfer.args[1] : args?.[0]?.[0].value + ) as bigint + + const to = ( + decodedTokenTransfer ? decodedTokenTransfer.args[0] : args?.[0]?.[0].dest + ) as `0x${string}` + const token = (decodedTokenTransfer ? args?.[0]?.[0].dest : 'eth') as keyof coinsDict + return { from: userOp.sender, to, token, amount } +} diff --git a/packages/app/utils/signUserOp.ts b/packages/app/utils/signUserOp.ts index 0db3b437c..9476d2a37 100644 --- a/packages/app/utils/signUserOp.ts +++ b/packages/app/utils/signUserOp.ts @@ -52,7 +52,7 @@ export async function signUserOpHash({ allowedCredentials?: { id: string; userHandle: string }[] }) { version = version ?? USEROP_VERSION - validUntil = validUntil ?? Math.floor((Date.now() + 1000 * 120) / 1000) // default 120 seconds (2 minutes) + validUntil = validUntil ?? Math.floor((Date.now() + 1000 * 35) / 1000) // default 35 seconds) allowedCredentials = allowedCredentials ?? [] assert(version === USEROP_VERSION, 'version must be 1') assert(typeof validUntil === 'number', 'validUntil must be a number') diff --git a/packages/playwright/tsconfig.json b/packages/playwright/tsconfig.json index 663daa184..ee2e72832 100644 --- a/packages/playwright/tsconfig.json +++ b/packages/playwright/tsconfig.json @@ -8,7 +8,9 @@ "../wagmi", "../contracts/out", "../webauthn-authenticator", - "../../supabase" + "../../supabase", + "../temporal/src", + "../workflows/src" ], "compilerOptions": { "noEmit": true, @@ -28,7 +30,11 @@ "@0xsend/webauthn-authenticator": ["../webauthn-authenticator/src"], "@0xsend/webauthn-authenticator/*": ["../webauthn-authenticator/src/*"], "@my/supabase": ["../../supabase"], - "@my/supabase/*": ["../../supabase/*"] + "@my/supabase/*": ["../../supabase/*"], + "@my/temporal": ["../temporal/src"], + "@my/temporal/*": ["../temporal/src/*"], + "@my/workflows": ["../workflows/src"], + "@my/workflows/*": ["../workflows/src/*"] } }, "references": [] diff --git a/packages/temporal/.gitignore b/packages/temporal/.gitignore new file mode 100644 index 000000000..c88144480 --- /dev/null +++ b/packages/temporal/.gitignore @@ -0,0 +1,6 @@ +# Finder (MacOS) folder config +.DS_Store + +/build + +var/** \ No newline at end of file diff --git a/packages/temporal/build/payload-converter.cjs b/packages/temporal/build/payload-converter.cjs new file mode 100644 index 000000000..f8bad8dfe --- /dev/null +++ b/packages/temporal/build/payload-converter.cjs @@ -0,0 +1,882 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/payload-converter.ts +var payload_converter_exports = {}; +__export(payload_converter_exports, { + payloadConverter: () => payloadConverter +}); +module.exports = __toCommonJS(payload_converter_exports); +var import_common2 = require("@temporalio/common"); + +// src/superjson-payload-converter.ts +var import_common = require("@temporalio/common"); + +// ../../node_modules/superjson/dist/double-indexed-kv.js +var DoubleIndexedKV = class { + constructor() { + this.keyToValue = /* @__PURE__ */ new Map(); + this.valueToKey = /* @__PURE__ */ new Map(); + } + set(key, value) { + this.keyToValue.set(key, value); + this.valueToKey.set(value, key); + } + getByKey(key) { + return this.keyToValue.get(key); + } + getByValue(value) { + return this.valueToKey.get(value); + } + clear() { + this.keyToValue.clear(); + this.valueToKey.clear(); + } +}; + +// ../../node_modules/superjson/dist/registry.js +var Registry = class { + constructor(generateIdentifier) { + this.generateIdentifier = generateIdentifier; + this.kv = new DoubleIndexedKV(); + } + register(value, identifier) { + if (this.kv.getByValue(value)) { + return; + } + if (!identifier) { + identifier = this.generateIdentifier(value); + } + this.kv.set(identifier, value); + } + clear() { + this.kv.clear(); + } + getIdentifier(value) { + return this.kv.getByValue(value); + } + getValue(identifier) { + return this.kv.getByKey(identifier); + } +}; + +// ../../node_modules/superjson/dist/class-registry.js +var ClassRegistry = class extends Registry { + constructor() { + super((c) => c.name); + this.classToAllowedProps = /* @__PURE__ */ new Map(); + } + register(value, options) { + if (typeof options === "object") { + if (options.allowProps) { + this.classToAllowedProps.set(value, options.allowProps); + } + super.register(value, options.identifier); + } else { + super.register(value, options); + } + } + getAllowedProps(value) { + return this.classToAllowedProps.get(value); + } +}; + +// ../../node_modules/superjson/dist/util.js +function valuesOfObj(record) { + if ("values" in Object) { + return Object.values(record); + } + const values = []; + for (const key in record) { + if (record.hasOwnProperty(key)) { + values.push(record[key]); + } + } + return values; +} +function find(record, predicate) { + const values = valuesOfObj(record); + if ("find" in values) { + return values.find(predicate); + } + const valuesNotNever = values; + for (let i = 0; i < valuesNotNever.length; i++) { + const value = valuesNotNever[i]; + if (predicate(value)) { + return value; + } + } + return void 0; +} +function forEach(record, run) { + Object.entries(record).forEach(([key, value]) => run(value, key)); +} +function includes(arr, value) { + return arr.indexOf(value) !== -1; +} +function findArr(record, predicate) { + for (let i = 0; i < record.length; i++) { + const value = record[i]; + if (predicate(value)) { + return value; + } + } + return void 0; +} + +// ../../node_modules/superjson/dist/custom-transformer-registry.js +var CustomTransformerRegistry = class { + constructor() { + this.transfomers = {}; + } + register(transformer) { + this.transfomers[transformer.name] = transformer; + } + findApplicable(v) { + return find(this.transfomers, (transformer) => transformer.isApplicable(v)); + } + findByName(name) { + return this.transfomers[name]; + } +}; + +// ../../node_modules/superjson/dist/is.js +var getType = (payload) => Object.prototype.toString.call(payload).slice(8, -1); +var isUndefined = (payload) => typeof payload === "undefined"; +var isNull = (payload) => payload === null; +var isPlainObject = (payload) => { + if (typeof payload !== "object" || payload === null) + return false; + if (payload === Object.prototype) + return false; + if (Object.getPrototypeOf(payload) === null) + return true; + return Object.getPrototypeOf(payload) === Object.prototype; +}; +var isEmptyObject = (payload) => isPlainObject(payload) && Object.keys(payload).length === 0; +var isArray = (payload) => Array.isArray(payload); +var isString = (payload) => typeof payload === "string"; +var isNumber = (payload) => typeof payload === "number" && !isNaN(payload); +var isBoolean = (payload) => typeof payload === "boolean"; +var isRegExp = (payload) => payload instanceof RegExp; +var isMap = (payload) => payload instanceof Map; +var isSet = (payload) => payload instanceof Set; +var isSymbol = (payload) => getType(payload) === "Symbol"; +var isDate = (payload) => payload instanceof Date && !isNaN(payload.valueOf()); +var isError = (payload) => payload instanceof Error; +var isNaNValue = (payload) => typeof payload === "number" && isNaN(payload); +var isPrimitive = (payload) => isBoolean(payload) || isNull(payload) || isUndefined(payload) || isNumber(payload) || isString(payload) || isSymbol(payload); +var isBigint = (payload) => typeof payload === "bigint"; +var isInfinite = (payload) => payload === Infinity || payload === -Infinity; +var isTypedArray = (payload) => ArrayBuffer.isView(payload) && !(payload instanceof DataView); +var isURL = (payload) => payload instanceof URL; + +// ../../node_modules/superjson/dist/pathstringifier.js +var escapeKey = (key) => key.replace(/\./g, "\\."); +var stringifyPath = (path) => path.map(String).map(escapeKey).join("."); +var parsePath = (string) => { + const result = []; + let segment = ""; + for (let i = 0; i < string.length; i++) { + let char = string.charAt(i); + const isEscapedDot = char === "\\" && string.charAt(i + 1) === "."; + if (isEscapedDot) { + segment += "."; + i++; + continue; + } + const isEndOfSegment = char === "."; + if (isEndOfSegment) { + result.push(segment); + segment = ""; + continue; + } + segment += char; + } + const lastSegment = segment; + result.push(lastSegment); + return result; +}; + +// ../../node_modules/superjson/dist/transformer.js +function simpleTransformation(isApplicable, annotation, transform, untransform) { + return { + isApplicable, + annotation, + transform, + untransform + }; +} +var simpleRules = [ + simpleTransformation(isUndefined, "undefined", () => null, () => void 0), + simpleTransformation(isBigint, "bigint", (v) => v.toString(), (v) => { + if (typeof BigInt !== "undefined") { + return BigInt(v); + } + console.error("Please add a BigInt polyfill."); + return v; + }), + simpleTransformation(isDate, "Date", (v) => v.toISOString(), (v) => new Date(v)), + simpleTransformation(isError, "Error", (v, superJson) => { + const baseError = { + name: v.name, + message: v.message + }; + superJson.allowedErrorProps.forEach((prop) => { + baseError[prop] = v[prop]; + }); + return baseError; + }, (v, superJson) => { + const e = new Error(v.message); + e.name = v.name; + e.stack = v.stack; + superJson.allowedErrorProps.forEach((prop) => { + e[prop] = v[prop]; + }); + return e; + }), + simpleTransformation(isRegExp, "regexp", (v) => "" + v, (regex) => { + const body = regex.slice(1, regex.lastIndexOf("/")); + const flags = regex.slice(regex.lastIndexOf("/") + 1); + return new RegExp(body, flags); + }), + simpleTransformation( + isSet, + "set", + // (sets only exist in es6+) + // eslint-disable-next-line es5/no-es6-methods + (v) => [...v.values()], + (v) => new Set(v) + ), + simpleTransformation(isMap, "map", (v) => [...v.entries()], (v) => new Map(v)), + simpleTransformation((v) => isNaNValue(v) || isInfinite(v), "number", (v) => { + if (isNaNValue(v)) { + return "NaN"; + } + if (v > 0) { + return "Infinity"; + } else { + return "-Infinity"; + } + }, Number), + simpleTransformation((v) => v === 0 && 1 / v === -Infinity, "number", () => { + return "-0"; + }, Number), + simpleTransformation(isURL, "URL", (v) => v.toString(), (v) => new URL(v)) +]; +function compositeTransformation(isApplicable, annotation, transform, untransform) { + return { + isApplicable, + annotation, + transform, + untransform + }; +} +var symbolRule = compositeTransformation((s, superJson) => { + if (isSymbol(s)) { + const isRegistered = !!superJson.symbolRegistry.getIdentifier(s); + return isRegistered; + } + return false; +}, (s, superJson) => { + const identifier = superJson.symbolRegistry.getIdentifier(s); + return ["symbol", identifier]; +}, (v) => v.description, (_, a, superJson) => { + const value = superJson.symbolRegistry.getValue(a[1]); + if (!value) { + throw new Error("Trying to deserialize unknown symbol"); + } + return value; +}); +var constructorToName = [ + Int8Array, + Uint8Array, + Int16Array, + Uint16Array, + Int32Array, + Uint32Array, + Float32Array, + Float64Array, + Uint8ClampedArray +].reduce((obj, ctor) => { + obj[ctor.name] = ctor; + return obj; +}, {}); +var typedArrayRule = compositeTransformation(isTypedArray, (v) => ["typed-array", v.constructor.name], (v) => [...v], (v, a) => { + const ctor = constructorToName[a[1]]; + if (!ctor) { + throw new Error("Trying to deserialize unknown typed array"); + } + return new ctor(v); +}); +function isInstanceOfRegisteredClass(potentialClass, superJson) { + if (potentialClass?.constructor) { + const isRegistered = !!superJson.classRegistry.getIdentifier(potentialClass.constructor); + return isRegistered; + } + return false; +} +var classRule = compositeTransformation(isInstanceOfRegisteredClass, (clazz, superJson) => { + const identifier = superJson.classRegistry.getIdentifier(clazz.constructor); + return ["class", identifier]; +}, (clazz, superJson) => { + const allowedProps = superJson.classRegistry.getAllowedProps(clazz.constructor); + if (!allowedProps) { + return { ...clazz }; + } + const result = {}; + allowedProps.forEach((prop) => { + result[prop] = clazz[prop]; + }); + return result; +}, (v, a, superJson) => { + const clazz = superJson.classRegistry.getValue(a[1]); + if (!clazz) { + throw new Error(`Trying to deserialize unknown class '${a[1]}' - check https://github.com/blitz-js/superjson/issues/116#issuecomment-773996564`); + } + return Object.assign(Object.create(clazz.prototype), v); +}); +var customRule = compositeTransformation((value, superJson) => { + return !!superJson.customTransformerRegistry.findApplicable(value); +}, (value, superJson) => { + const transformer = superJson.customTransformerRegistry.findApplicable(value); + return ["custom", transformer.name]; +}, (value, superJson) => { + const transformer = superJson.customTransformerRegistry.findApplicable(value); + return transformer.serialize(value); +}, (v, a, superJson) => { + const transformer = superJson.customTransformerRegistry.findByName(a[1]); + if (!transformer) { + throw new Error("Trying to deserialize unknown custom value"); + } + return transformer.deserialize(v); +}); +var compositeRules = [classRule, symbolRule, customRule, typedArrayRule]; +var transformValue = (value, superJson) => { + const applicableCompositeRule = findArr(compositeRules, (rule) => rule.isApplicable(value, superJson)); + if (applicableCompositeRule) { + return { + value: applicableCompositeRule.transform(value, superJson), + type: applicableCompositeRule.annotation(value, superJson) + }; + } + const applicableSimpleRule = findArr(simpleRules, (rule) => rule.isApplicable(value, superJson)); + if (applicableSimpleRule) { + return { + value: applicableSimpleRule.transform(value, superJson), + type: applicableSimpleRule.annotation + }; + } + return void 0; +}; +var simpleRulesByAnnotation = {}; +simpleRules.forEach((rule) => { + simpleRulesByAnnotation[rule.annotation] = rule; +}); +var untransformValue = (json, type, superJson) => { + if (isArray(type)) { + switch (type[0]) { + case "symbol": + return symbolRule.untransform(json, type, superJson); + case "class": + return classRule.untransform(json, type, superJson); + case "custom": + return customRule.untransform(json, type, superJson); + case "typed-array": + return typedArrayRule.untransform(json, type, superJson); + default: + throw new Error("Unknown transformation: " + type); + } + } else { + const transformation = simpleRulesByAnnotation[type]; + if (!transformation) { + throw new Error("Unknown transformation: " + type); + } + return transformation.untransform(json, superJson); + } +}; + +// ../../node_modules/superjson/dist/accessDeep.js +var getNthKey = (value, n) => { + if (n > value.size) + throw new Error("index out of bounds"); + const keys = value.keys(); + while (n > 0) { + keys.next(); + n--; + } + return keys.next().value; +}; +function validatePath(path) { + if (includes(path, "__proto__")) { + throw new Error("__proto__ is not allowed as a property"); + } + if (includes(path, "prototype")) { + throw new Error("prototype is not allowed as a property"); + } + if (includes(path, "constructor")) { + throw new Error("constructor is not allowed as a property"); + } +} +var getDeep = (object, path) => { + validatePath(path); + for (let i = 0; i < path.length; i++) { + const key = path[i]; + if (isSet(object)) { + object = getNthKey(object, +key); + } else if (isMap(object)) { + const row = +key; + const type = +path[++i] === 0 ? "key" : "value"; + const keyOfRow = getNthKey(object, row); + switch (type) { + case "key": + object = keyOfRow; + break; + case "value": + object = object.get(keyOfRow); + break; + } + } else { + object = object[key]; + } + } + return object; +}; +var setDeep = (object, path, mapper) => { + validatePath(path); + if (path.length === 0) { + return mapper(object); + } + let parent = object; + for (let i = 0; i < path.length - 1; i++) { + const key = path[i]; + if (isArray(parent)) { + const index = +key; + parent = parent[index]; + } else if (isPlainObject(parent)) { + parent = parent[key]; + } else if (isSet(parent)) { + const row = +key; + parent = getNthKey(parent, row); + } else if (isMap(parent)) { + const isEnd = i === path.length - 2; + if (isEnd) { + break; + } + const row = +key; + const type = +path[++i] === 0 ? "key" : "value"; + const keyOfRow = getNthKey(parent, row); + switch (type) { + case "key": + parent = keyOfRow; + break; + case "value": + parent = parent.get(keyOfRow); + break; + } + } + } + const lastKey = path[path.length - 1]; + if (isArray(parent)) { + parent[+lastKey] = mapper(parent[+lastKey]); + } else if (isPlainObject(parent)) { + parent[lastKey] = mapper(parent[lastKey]); + } + if (isSet(parent)) { + const oldValue = getNthKey(parent, +lastKey); + const newValue = mapper(oldValue); + if (oldValue !== newValue) { + parent.delete(oldValue); + parent.add(newValue); + } + } + if (isMap(parent)) { + const row = +path[path.length - 2]; + const keyToRow = getNthKey(parent, row); + const type = +lastKey === 0 ? "key" : "value"; + switch (type) { + case "key": { + const newKey = mapper(keyToRow); + parent.set(newKey, parent.get(keyToRow)); + if (newKey !== keyToRow) { + parent.delete(keyToRow); + } + break; + } + case "value": { + parent.set(keyToRow, mapper(parent.get(keyToRow))); + break; + } + } + } + return object; +}; + +// ../../node_modules/superjson/dist/plainer.js +function traverse(tree, walker2, origin = []) { + if (!tree) { + return; + } + if (!isArray(tree)) { + forEach(tree, (subtree, key) => traverse(subtree, walker2, [...origin, ...parsePath(key)])); + return; + } + const [nodeValue, children] = tree; + if (children) { + forEach(children, (child, key) => { + traverse(child, walker2, [...origin, ...parsePath(key)]); + }); + } + walker2(nodeValue, origin); +} +function applyValueAnnotations(plain, annotations, superJson) { + traverse(annotations, (type, path) => { + plain = setDeep(plain, path, (v) => untransformValue(v, type, superJson)); + }); + return plain; +} +function applyReferentialEqualityAnnotations(plain, annotations) { + function apply(identicalPaths, path) { + const object = getDeep(plain, parsePath(path)); + identicalPaths.map(parsePath).forEach((identicalObjectPath) => { + plain = setDeep(plain, identicalObjectPath, () => object); + }); + } + if (isArray(annotations)) { + const [root, other] = annotations; + root.forEach((identicalPath) => { + plain = setDeep(plain, parsePath(identicalPath), () => plain); + }); + if (other) { + forEach(other, apply); + } + } else { + forEach(annotations, apply); + } + return plain; +} +var isDeep = (object, superJson) => isPlainObject(object) || isArray(object) || isMap(object) || isSet(object) || isInstanceOfRegisteredClass(object, superJson); +function addIdentity(object, path, identities) { + const existingSet = identities.get(object); + if (existingSet) { + existingSet.push(path); + } else { + identities.set(object, [path]); + } +} +function generateReferentialEqualityAnnotations(identitites, dedupe) { + const result = {}; + let rootEqualityPaths = void 0; + identitites.forEach((paths) => { + if (paths.length <= 1) { + return; + } + if (!dedupe) { + paths = paths.map((path) => path.map(String)).sort((a, b) => a.length - b.length); + } + const [representativePath, ...identicalPaths] = paths; + if (representativePath.length === 0) { + rootEqualityPaths = identicalPaths.map(stringifyPath); + } else { + result[stringifyPath(representativePath)] = identicalPaths.map(stringifyPath); + } + }); + if (rootEqualityPaths) { + if (isEmptyObject(result)) { + return [rootEqualityPaths]; + } else { + return [rootEqualityPaths, result]; + } + } else { + return isEmptyObject(result) ? void 0 : result; + } +} +var walker = (object, identities, superJson, dedupe, path = [], objectsInThisPath = [], seenObjects = /* @__PURE__ */ new Map()) => { + const primitive = isPrimitive(object); + if (!primitive) { + addIdentity(object, path, identities); + const seen = seenObjects.get(object); + if (seen) { + return dedupe ? { + transformedValue: null + } : seen; + } + } + if (!isDeep(object, superJson)) { + const transformed2 = transformValue(object, superJson); + const result2 = transformed2 ? { + transformedValue: transformed2.value, + annotations: [transformed2.type] + } : { + transformedValue: object + }; + if (!primitive) { + seenObjects.set(object, result2); + } + return result2; + } + if (includes(objectsInThisPath, object)) { + return { + transformedValue: null + }; + } + const transformationResult = transformValue(object, superJson); + const transformed = transformationResult?.value ?? object; + const transformedValue = isArray(transformed) ? [] : {}; + const innerAnnotations = {}; + forEach(transformed, (value, index) => { + if (index === "__proto__" || index === "constructor" || index === "prototype") { + throw new Error(`Detected property ${index}. This is a prototype pollution risk, please remove it from your object.`); + } + const recursiveResult = walker(value, identities, superJson, dedupe, [...path, index], [...objectsInThisPath, object], seenObjects); + transformedValue[index] = recursiveResult.transformedValue; + if (isArray(recursiveResult.annotations)) { + innerAnnotations[index] = recursiveResult.annotations; + } else if (isPlainObject(recursiveResult.annotations)) { + forEach(recursiveResult.annotations, (tree, key) => { + innerAnnotations[escapeKey(index) + "." + key] = tree; + }); + } + }); + const result = isEmptyObject(innerAnnotations) ? { + transformedValue, + annotations: !!transformationResult ? [transformationResult.type] : void 0 + } : { + transformedValue, + annotations: !!transformationResult ? [transformationResult.type, innerAnnotations] : innerAnnotations + }; + if (!primitive) { + seenObjects.set(object, result); + } + return result; +}; + +// ../../node_modules/is-what/dist/index.js +function getType2(payload) { + return Object.prototype.toString.call(payload).slice(8, -1); +} +function isArray2(payload) { + return getType2(payload) === "Array"; +} +function isPlainObject2(payload) { + if (getType2(payload) !== "Object") + return false; + const prototype = Object.getPrototypeOf(payload); + return !!prototype && prototype.constructor === Object && prototype === Object.prototype; +} +function isNull2(payload) { + return getType2(payload) === "Null"; +} +function isOneOf(a, b, c, d, e) { + return (value) => a(value) || b(value) || !!c && c(value) || !!d && d(value) || !!e && e(value); +} +function isUndefined2(payload) { + return getType2(payload) === "Undefined"; +} +var isNullOrUndefined = isOneOf(isNull2, isUndefined2); + +// ../../node_modules/copy-anything/dist/index.js +function assignProp(carry, key, newVal, originalObject, includeNonenumerable) { + const propType = {}.propertyIsEnumerable.call(originalObject, key) ? "enumerable" : "nonenumerable"; + if (propType === "enumerable") + carry[key] = newVal; + if (includeNonenumerable && propType === "nonenumerable") { + Object.defineProperty(carry, key, { + value: newVal, + enumerable: false, + writable: true, + configurable: true + }); + } +} +function copy(target, options = {}) { + if (isArray2(target)) { + return target.map((item) => copy(item, options)); + } + if (!isPlainObject2(target)) { + return target; + } + const props = Object.getOwnPropertyNames(target); + const symbols = Object.getOwnPropertySymbols(target); + return [...props, ...symbols].reduce((carry, key) => { + if (isArray2(options.props) && !options.props.includes(key)) { + return carry; + } + const val = target[key]; + const newVal = copy(val, options); + assignProp(carry, key, newVal, target, options.nonenumerable); + return carry; + }, {}); +} + +// ../../node_modules/superjson/dist/index.js +var SuperJSON = class { + /** + * @param dedupeReferentialEqualities If true, SuperJSON will make sure only one instance of referentially equal objects are serialized and the rest are replaced with `null`. + */ + constructor({ dedupe = false } = {}) { + this.classRegistry = new ClassRegistry(); + this.symbolRegistry = new Registry((s) => s.description ?? ""); + this.customTransformerRegistry = new CustomTransformerRegistry(); + this.allowedErrorProps = []; + this.dedupe = dedupe; + } + serialize(object) { + const identities = /* @__PURE__ */ new Map(); + const output = walker(object, identities, this, this.dedupe); + const res = { + json: output.transformedValue + }; + if (output.annotations) { + res.meta = { + ...res.meta, + values: output.annotations + }; + } + const equalityAnnotations = generateReferentialEqualityAnnotations(identities, this.dedupe); + if (equalityAnnotations) { + res.meta = { + ...res.meta, + referentialEqualities: equalityAnnotations + }; + } + return res; + } + deserialize(payload) { + const { json, meta } = payload; + let result = copy(json); + if (meta?.values) { + result = applyValueAnnotations(result, meta.values, this); + } + if (meta?.referentialEqualities) { + result = applyReferentialEqualityAnnotations(result, meta.referentialEqualities); + } + return result; + } + stringify(object) { + return JSON.stringify(this.serialize(object)); + } + parse(string) { + return this.deserialize(JSON.parse(string)); + } + registerClass(v, options) { + this.classRegistry.register(v, options); + } + registerSymbol(v, identifier) { + this.symbolRegistry.register(v, identifier); + } + registerCustom(transformer, name) { + this.customTransformerRegistry.register({ + name, + ...transformer + }); + } + allowErrorProps(...props) { + this.allowedErrorProps.push(...props); + } +}; +SuperJSON.defaultInstance = new SuperJSON(); +SuperJSON.serialize = SuperJSON.defaultInstance.serialize.bind(SuperJSON.defaultInstance); +SuperJSON.deserialize = SuperJSON.defaultInstance.deserialize.bind(SuperJSON.defaultInstance); +SuperJSON.stringify = SuperJSON.defaultInstance.stringify.bind(SuperJSON.defaultInstance); +SuperJSON.parse = SuperJSON.defaultInstance.parse.bind(SuperJSON.defaultInstance); +SuperJSON.registerClass = SuperJSON.defaultInstance.registerClass.bind(SuperJSON.defaultInstance); +SuperJSON.registerSymbol = SuperJSON.defaultInstance.registerSymbol.bind(SuperJSON.defaultInstance); +SuperJSON.registerCustom = SuperJSON.defaultInstance.registerCustom.bind(SuperJSON.defaultInstance); +SuperJSON.allowErrorProps = SuperJSON.defaultInstance.allowErrorProps.bind(SuperJSON.defaultInstance); +var serialize = SuperJSON.serialize; +var deserialize = SuperJSON.deserialize; +var stringify = SuperJSON.stringify; +var parse = SuperJSON.parse; +var registerClass = SuperJSON.registerClass; +var registerCustom = SuperJSON.registerCustom; +var registerSymbol = SuperJSON.registerSymbol; +var allowErrorProps = SuperJSON.allowErrorProps; + +// src/superjson-payload-converter.ts +var import_encoding = require("@temporalio/common/lib/encoding"); +var SuperjsonPayloadConverter = class { + // Use 'json/plain' so that Payloads are displayed in the UI + encodingType = "json/plain"; + toPayload(value) { + if (value === void 0) return void 0; + let sjson = ""; + try { + sjson = SuperJSON.stringify(value); + } catch (e) { + throw new UnsupportedSuperjsonTypeError( + `Can't run SUPERJSON.stringify on this value: ${value}. Either convert it (or its properties) to SUPERJSON-serializable values (see https://github.com/flightcontrolhq/superjson#readme ), or create a custom data converter. SJSON.stringify error message: ${errorMessage( + e + )}`, + e + ); + } + return { + metadata: { + [import_common.METADATA_ENCODING_KEY]: (0, import_encoding.encode)("json/plain"), + // Include an additional metadata field to indicate that this is an SuperJSON payload + format: (0, import_encoding.encode)("extended") + }, + data: (0, import_encoding.encode)(sjson) + }; + } + fromPayload(content) { + try { + if (!content.data) { + throw new UnsupportedSuperjsonTypeError( + `Can't run SUPERJSON.parse on this value: ${content.data}. Either convert it (or its properties) to SUPERJSON-serializable values (see https://github.com/flightcontrolhq/superjson#readme ), or create a custom data converter. No data found in payload.` + ); + } + return SuperJSON.parse((0, import_encoding.decode)(content.data)); + } catch (e) { + throw new UnsupportedSuperjsonTypeError( + `Can't run SUPERJSON.parse on this value: ${content.data}. Either convert it (or its properties) to SUPERJSON-serializable values (see https://github.com/flightcontrolhq/superjson#readme ), or create a custom data converter. SJSON.parse error message: ${errorMessage( + e + )}`, + e + ); + } + } +}; +var UnsupportedSuperjsonTypeError = class extends import_common.PayloadConverterError { + constructor(message, cause) { + super(message ?? void 0); + this.cause = cause; + } + name = "UnsupportedJsonTypeError"; +}; +function errorMessage(error) { + if (typeof error === "string") { + return error; + } + if (error instanceof Error) { + return error.message; + } + return void 0; +} + +// src/payload-converter.ts +var payloadConverter = new import_common2.CompositePayloadConverter( + new import_common2.UndefinedPayloadConverter(), + new SuperjsonPayloadConverter() +); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + payloadConverter +}); diff --git a/packages/temporal/package.json b/packages/temporal/package.json index ce6dd2db2..8b5925e5e 100644 --- a/packages/temporal/package.json +++ b/packages/temporal/package.json @@ -15,17 +15,13 @@ "./client": { "types": "./src/client.ts", "default": "./src/client.ts" - }, - "./superjson-payload-converter": { - "types": "./src/superjson-payload-converter.ts", - "default": "./src/superjson-payload-converter.ts" } }, "scripts": { "lint": "tsc", "server": "temporal server start-dev --db-filename ./var/temporal.db", "build": "esbuild --bundle --outfile=build/payload-converter.cjs --target=esnext --platform=node --external:@temporalio/common --external:@bufbuild/protobuf src/payload-converter.ts" - }, + }, "dependencies": { "@temporalio/client": "^1.10.1", "@temporalio/common": "^1.11.1", diff --git a/packages/temporal/src/client.ts b/packages/temporal/src/client.ts index 298224e21..87bd3f35d 100644 --- a/packages/temporal/src/client.ts +++ b/packages/temporal/src/client.ts @@ -1,5 +1,5 @@ import { Client, Connection } from '@temporalio/client' -import { dataConverter } from './payload-converter' +import { payloadConverter } from './payload-converter' import { createRequire } from 'node:module' const require = createRequire(import.meta.url) import debug from 'debug' @@ -37,7 +37,9 @@ export async function getTemporalClient(): Promise { client = new Client({ connection, namespace: process.env.TEMPORAL_NAMESPACE ?? 'default', - dataConverter: dataConverter, + dataConverter: { + payloadConverterPath: require.resolve('../build/payload-converter.cjs'), + }, }) } return client diff --git a/packages/temporal/tsconfig.json b/packages/temporal/tsconfig.json new file mode 100644 index 000000000..688134c0a --- /dev/null +++ b/packages/temporal/tsconfig.json @@ -0,0 +1,13 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "noEmit": true, + "composite": true, + "baseUrl": ".", + "paths": { + "@my/temporal": ["../temporal"], + "@my/temporal/*": ["../temporal/*"] + } + }, + "include": ["src/**/*.ts", "../../globals.d.ts", "../../environment.d.ts"] +} diff --git a/packages/workflows/.gitignore b/packages/workflows/.gitignore index 1548a47d0..3c6e5b4df 100644 --- a/packages/workflows/.gitignore +++ b/packages/workflows/.gitignore @@ -175,3 +175,5 @@ dist .DS_Store lib + +workflow-bundle.js \ No newline at end of file diff --git a/packages/workflows/README.md b/packages/workflows/README.md index bba8b12cb..a203ec5ce 100644 --- a/packages/workflows/README.md +++ b/packages/workflows/README.md @@ -1,5 +1,21 @@ # Temporal Workflows +## How to develop Workflow logic + +Workflow logic is constrained by deterministic execution requirements. Therefore, each language is limited to the use of certain idiomatic techniques. However, each Temporal SDK provides a set of APIs that can be used inside your Workflow to interact with external (to the Workflow) application code. + +In the Temporal TypeScript SDK, Workflows run in a deterministic sandboxed environment. The code is bundled on Worker creation using Webpack, and can import any package as long as it does not reference Node.js or DOM APIs. + +> [!NOTE] +> If you must use a library that references a Node.js or DOM API and you are certain that those APIs are not used at runtime, add that module to the ignoreModules list. +> The Workflow sandbox can run only deterministic code, so side effects and access to external state must be done through Activities because Activity outputs are recorded in the Event History and can read deterministically by the Workflow. + +This limitation also means that Workflow code cannot directly import the [Activity Definition](https://docs.temporal.io/activities#activity-definition). [Activity Types](https://docs.temporal.io/activities#activity-type) can be imported, so they can be invoked in a type-safe manner. + +To make the Workflow runtime deterministic, functions like `Math.random()`, `Date`, and `setTimeout()` are replaced by deterministic versions. + +[FinalizationRegistry](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry) and [WeakRef](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakRef) are removed because v8's garbage collector is not deterministic. + - **Workflows require one file**: you can organize Workflow code however you like, but each Worker needs to reference a single file that exports all the Workflows it handles (so you have to handle name conflicts instead of us) - **Activities are top level**: - Inside the Temporal Worker, Activities are registered at the same level Workflows are. diff --git a/packages/workflows/package.json b/packages/workflows/package.json index af90b30c5..f344f91c0 100644 --- a/packages/workflows/package.json +++ b/packages/workflows/package.json @@ -4,8 +4,9 @@ "package.json", "src" ], + "type": "module", "exports": { - "./activities": { + "./all-activities": { "types": "./src/all-activities.ts", "default": "./src/all-activities.ts" }, @@ -17,15 +18,15 @@ "types": "./src/all-workflows.ts", "default": "./src/all-workflows.ts" }, - "/": { - "types": "./src/all-workflows.ts", - "default": "./src/all-workflows.ts" + "./workflow-bundle": { + "types": "./workflow-bundle.d.ts", + "default": "./workflow-bundle.js" } }, - "type": "module", "scripts": { "lint": "tsc", - "test": "jest" + "test": "jest", + "bundle": "node --loader ts-node/esm --experimental-specifier-resolution=node src/scripts/build-workflow-bundle.ts" }, "devDependencies": { "@jest/globals": "^29.7.0", diff --git a/packages/workflows/src/distribution-workflow/activities.ts b/packages/workflows/src/distribution-workflow/activities.ts index 93b4a3df3..d977f60f0 100644 --- a/packages/workflows/src/distribution-workflow/activities.ts +++ b/packages/workflows/src/distribution-workflow/activities.ts @@ -22,302 +22,302 @@ const inBatches = (array: T[], batchSize = Math.max(8, cpuCount - 1)) => { export function createDistributionActivities(env: Record) { bootstrap(env) -} - -async function fetchAllOpenDistributionsActivity() { - const { data: distributions, error } = await fetchAllOpenDistributions() - if (error) { - if (error.code === 'PGRST116') { - log.info('fetchAllOpenDistributionsActivity', { error }) - return null - } - throw ApplicationFailure.nonRetryable('Error fetching distributions.', error.code, error) - } - log.info('fetchAllOpenDistributionsActivity', { distributions }) - return distributions -} - -async function fetchDistributionActivity(distributionId: string) { - const { data: distribution, error } = await fetchDistribution(distributionId) - if (error) { - if (error.code === 'PGRST116') { - log.info('fetchDistributionActivity', { distributionId, error }) - return null - } - throw ApplicationFailure.nonRetryable('Error fetching distribution.', error.code, error) - } - log.info('fetchDistributionActivity', { distribution }) - return distribution -} - -/** - * Calculates distribution shares for a single distribution. - */ -async function calculateDistributionSharesActivity( - distribution: Tables<'distributions'> & { - distribution_verification_values: Tables<'distribution_verification_values'>[] - } -): Promise { - log.info('calculateDistributionSharesActivity', { distribution }) - // verify tranche is not created when in production - if (await isMerkleDropActive(distribution)) { - throw ApplicationFailure.nonRetryable( - 'Tranche is active. Cannot calculate distribution shares.' - ) - } - - log.info('Calculating distribution shares') - - const { - data: verifications, - error: verificationsError, - count, - } = await fetchAllVerifications(distribution.id) - if (verificationsError) { - throw verificationsError - } - - if (verifications === null || verifications.length === 0) { - log.warn('No verifications found. Skipping distribution.') - return - } + return { + async fetchAllOpenDistributionsActivity() { + const { data: distributions, error } = await fetchAllOpenDistributions() + if (error) { + if (error.code === 'PGRST116') { + log.info('fetchAllOpenDistributionsActivity', { error }) + return null + } + throw ApplicationFailure.nonRetryable('Error fetching distributions.', error.code, error) + } + log.info('fetchAllOpenDistributionsActivity', { distributions }) + return distributions + }, + async fetchDistributionActivity(distributionId: string) { + const { data: distribution, error } = await fetchDistribution(distributionId) + if (error) { + if (error.code === 'PGRST116') { + log.info('fetchDistributionActivity', { distributionId, error }) + return null + } + throw ApplicationFailure.nonRetryable('Error fetching distribution.', error.code, error) + } + log.info('fetchDistributionActivity', { distribution }) + return distribution + }, + /** + * Calculates distribution shares for a single distribution. + */ + async calculateDistributionSharesActivity( + distribution: Tables<'distributions'> & { + distribution_verification_values: Tables<'distribution_verification_values'>[] + } + ): Promise { + log.info('calculateDistributionSharesActivity', { distribution }) + // verify tranche is not created when in production + if (await isMerkleDropActive(distribution)) { + throw ApplicationFailure.nonRetryable( + 'Tranche is active. Cannot calculate distribution shares.' + ) + } - if (count !== verifications.length) { - throw new Error('Verifications count does not match expected count') - } + log.info('Calculating distribution shares') - log.info(`Found ${verifications.length} verifications.`) + const { + data: verifications, + error: verificationsError, + count, + } = await fetchAllVerifications(distribution.id) - const verificationValues = distribution.distribution_verification_values.reduce( - (acc, verification) => { - acc[verification.type] = { - fixedValue: BigInt(verification.fixed_value), - bipsValue: BigInt(verification.bips_value), + if (verificationsError) { + throw verificationsError } - return acc - }, - {} as Record< - Database['public']['Enums']['verification_type'], - { fixedValue?: bigint; bipsValue?: bigint } - > - ) - const verificationsByUserId = verifications.reduce( - (acc, verification) => { - acc[verification.user_id] = acc[verification.user_id] || [] - acc[verification.user_id]?.push(verification) - return acc - }, - {} as Record - ) - log.info(`Found ${Object.keys(verificationsByUserId).length} users with verifications.`) + if (verifications === null || verifications.length === 0) { + log.warn('No verifications found. Skipping distribution.') + return + } - const { data: hodlerAddresses, error: hodlerAddressesError } = await fetchAllHodlers( - distribution.id - ) + if (count !== verifications.length) { + throw new Error('Verifications count does not match expected count') + } - if (hodlerAddressesError) { - throw hodlerAddressesError - } + log.info(`Found ${verifications.length} verifications.`) + + const verificationValues = distribution.distribution_verification_values.reduce( + (acc, verification) => { + acc[verification.type] = { + fixedValue: BigInt(verification.fixed_value), + bipsValue: BigInt(verification.bips_value), + } + return acc + }, + {} as Record< + Database['public']['Enums']['verification_type'], + { fixedValue?: bigint; bipsValue?: bigint } + > + ) + const verificationsByUserId = verifications.reduce( + (acc, verification) => { + acc[verification.user_id] = acc[verification.user_id] || [] + acc[verification.user_id]?.push(verification) + return acc + }, + {} as Record + ) + + log.info(`Found ${Object.keys(verificationsByUserId).length} users with verifications.`) + + const { data: hodlerAddresses, error: hodlerAddressesError } = await fetchAllHodlers( + distribution.id + ) + + if (hodlerAddressesError) { + throw hodlerAddressesError + } - if (hodlerAddresses === null || hodlerAddresses.length === 0) { - throw new Error('No hodler addresses found') - } + if (hodlerAddresses === null || hodlerAddresses.length === 0) { + throw new Error('No hodler addresses found') + } - const hodlerAddressesByUserId = hodlerAddresses.reduce( - (acc, address) => { - acc[address.user_id] = address - return acc - }, - {} as Record - ) - const hodlerUserIdByAddress = hodlerAddresses.reduce( - (acc, address) => { - acc[address.address] = address.user_id - return acc - }, - {} as Record - ) + const hodlerAddressesByUserId = hodlerAddresses.reduce( + (acc, address) => { + acc[address.user_id] = address + return acc + }, + {} as Record + ) + const hodlerUserIdByAddress = hodlerAddresses.reduce( + (acc, address) => { + acc[address.address] = address.user_id + return acc + }, + {} as Record + ) + + log.info(`Found ${hodlerAddresses.length} addresses.`) + + // lookup balances of all hodler addresses in qualification period + const batches = inBatches(hodlerAddresses).flatMap(async (addresses) => { + return await Promise.all( + fetchAllBalances({ + addresses, + distribution, + }) + ) + }) - log.info(`Found ${hodlerAddresses.length} addresses.`) + let minBalanceAddresses: { user_id: string; address: `0x${string}`; balance: string }[] = [] + for await (const batch of batches) { + minBalanceAddresses = minBalanceAddresses.concat(...batch) + } - // lookup balances of all hodler addresses in qualification period - const batches = inBatches(hodlerAddresses).flatMap(async (addresses) => { - return await Promise.all( - fetchAllBalances({ - addresses, - distribution, + log.info(`Found ${minBalanceAddresses.length} balances.`) + // log.debug({ balances }) + + // Filter out hodler with not enough send token balance + minBalanceAddresses = minBalanceAddresses.filter( + ({ balance }) => BigInt(balance) >= BigInt(distribution.hodler_min_balance) + ) + + log.info( + `Found ${minBalanceAddresses.length} balances after filtering hodler_min_balance of ${distribution.hodler_min_balance}` + ) + // log.debug({ balances }) + + // Calculate hodler pool share weights + const distAmt = BigInt(distribution.amount) + const hodlerPoolBips = BigInt(distribution.hodler_pool_bips) + const fixedPoolBips = BigInt(distribution.fixed_pool_bips) + const bonusPoolBips = BigInt(distribution.bonus_pool_bips) + const hodlerPoolAvailableAmount = calculatePercentageWithBips(distAmt, hodlerPoolBips) + const minBalanceByAddress: Record = minBalanceAddresses.reduce( + (acc, balance) => { + acc[balance.address] = BigInt(balance.balance) + return acc + }, + {} as Record + ) + const { totalWeight, weightPerSend, poolWeights, weightedShares } = calculateWeights( + minBalanceAddresses, + hodlerPoolAvailableAmount + ) + + log.info(`Calculated ${Object.keys(poolWeights).length} weights.`, { + totalWeight, + hodlerPoolAvailableAmount, + weightPerSend, }) - ) - }) + // log.debug({ poolWeights }) - let minBalanceAddresses: { user_id: string; address: `0x${string}`; balance: string }[] = [] - for await (const batch of batches) { - minBalanceAddresses = minBalanceAddresses.concat(...batch) - } + if (totalWeight === 0n) { + log.warn('Total weight is 0. Skipping distribution.') + return + } - log.info(`Found ${minBalanceAddresses.length} balances.`) - // log.debug({ balances }) + const fixedPoolAvailableAmount = calculatePercentageWithBips(distAmt, fixedPoolBips) + let fixedPoolAllocatedAmount = 0n + const fixedPoolAmountsByAddress: Record = {} + const bonusPoolBipsByAddress: Record = {} + const maxBonusPoolBips = (bonusPoolBips * PERC_DENOM) / hodlerPoolBips // 3500*10000/6500 = 5384.615384615385% 1.53X - // Filter out hodler with not enough send token balance - minBalanceAddresses = minBalanceAddresses.filter( - ({ balance }) => BigInt(balance) >= BigInt(distribution.hodler_min_balance) - ) + for (const [userId, verifications] of Object.entries(verificationsByUserId)) { + const hodler = hodlerAddressesByUserId[userId] + if (!hodler || !hodler.address) { + continue + } + const { address } = hodler + if (!minBalanceByAddress[address]) { + continue + } + for (const verification of verifications) { + const { fixedValue, bipsValue } = verificationValues[verification.type] + if (fixedValue && fixedPoolAllocatedAmount + fixedValue <= fixedPoolAvailableAmount) { + if (fixedPoolAmountsByAddress[address] === undefined) { + fixedPoolAmountsByAddress[address] = 0n + } + fixedPoolAmountsByAddress[address] += fixedValue + fixedPoolAllocatedAmount += fixedValue + } + if (bipsValue) { + bonusPoolBipsByAddress[address] = (bonusPoolBipsByAddress[address] || 0n) as bigint + bonusPoolBipsByAddress[address] += bipsValue + bonusPoolBipsByAddress[address] = + (bonusPoolBipsByAddress[address] as bigint) > maxBonusPoolBips + ? maxBonusPoolBips + : (bonusPoolBipsByAddress[address] as bigint) // cap at max bonus pool bips + } + } + } - log.info( - `Found ${minBalanceAddresses.length} balances after filtering hodler_min_balance of ${distribution.hodler_min_balance}` - ) - // log.debug({ balances }) - - // Calculate hodler pool share weights - const distAmt = BigInt(distribution.amount) - const hodlerPoolBips = BigInt(distribution.hodler_pool_bips) - const fixedPoolBips = BigInt(distribution.fixed_pool_bips) - const bonusPoolBips = BigInt(distribution.bonus_pool_bips) - const hodlerPoolAvailableAmount = calculatePercentageWithBips(distAmt, hodlerPoolBips) - const minBalanceByAddress: Record = minBalanceAddresses.reduce( - (acc, balance) => { - acc[balance.address] = BigInt(balance.balance) - return acc - }, - {} as Record - ) - const { totalWeight, weightPerSend, poolWeights, weightedShares } = calculateWeights( - minBalanceAddresses, - hodlerPoolAvailableAmount - ) + const hodlerShares = Object.values(weightedShares) + let totalAmount = 0n + let totalHodlerPoolAmount = 0n + let totalBonusPoolAmount = 0n + let totalFixedPoolAmount = 0n - log.info(`Calculated ${Object.keys(poolWeights).length} weights.`, { - totalWeight, - hodlerPoolAvailableAmount, - weightPerSend, - }) - // log.debug({ poolWeights }) + log.info('Calculated fixed & bonus pool amounts.', { + maxBonusPoolBips, + }) - if (totalWeight === 0n) { - log.warn('Total weight is 0. Skipping distribution.') - return - } + const shares = hodlerShares + .map((share) => { + const userId = hodlerUserIdByAddress[share.address] + const bonusBips = bonusPoolBipsByAddress[share.address] || 0n + const hodlerPoolAmount = share.amount + const bonusPoolAmount = calculatePercentageWithBips(hodlerPoolAmount, bonusBips) + const fixedPoolAmount = fixedPoolAmountsByAddress[share.address] || 0n + const amount = hodlerPoolAmount + bonusPoolAmount + fixedPoolAmount + totalAmount += amount + totalHodlerPoolAmount += hodlerPoolAmount + totalBonusPoolAmount += bonusPoolAmount + totalFixedPoolAmount += fixedPoolAmount + + if (!userId) { + log.debug('Hodler not found for address. Skipping share.', { share }) + return null + } + + // log.debug( + // { + // address: share.address, + // balance: balancesByAddress[share.address], + // amount: amount, + // bonusBips, + // hodlerPoolAmount, + // bonusPoolAmount, + // fixedPoolAmount, + // }, + // 'Calculated share.' + // ) + + // @ts-expect-error supabase-js does not support bigint + return { + address: share.address, + distribution_id: distribution.id, + user_id: userId, + amount: amount.toString(), + bonus_pool_amount: bonusPoolAmount.toString(), + fixed_pool_amount: fixedPoolAmount.toString(), + hodler_pool_amount: hodlerPoolAmount.toString(), + } as Tables<'distribution_shares'> + }) + .filter(Boolean) as Tables<'distribution_shares'>[] + + log.info('Distribution totals', { + totalAmount, + totalHodlerPoolAmount, + hodlerPoolAvailableAmount, + totalBonusPoolAmount, + totalFixedPoolAmount, + fixedPoolAllocatedAmount, + fixedPoolAvailableAmount, + maxBonusPoolBips, + name: distribution.name, + shares: shares.length, + }) + log.info(`Calculated ${shares.length} shares.`) - const fixedPoolAvailableAmount = calculatePercentageWithBips(distAmt, fixedPoolBips) - let fixedPoolAllocatedAmount = 0n - const fixedPoolAmountsByAddress: Record = {} - const bonusPoolBipsByAddress: Record = {} - const maxBonusPoolBips = (bonusPoolBips * PERC_DENOM) / hodlerPoolBips // 3500*10000/6500 = 5384.615384615385% 1.53X - - for (const [userId, verifications] of Object.entries(verificationsByUserId)) { - const hodler = hodlerAddressesByUserId[userId] - if (!hodler || !hodler.address) { - continue - } - const { address } = hodler - if (!minBalanceByAddress[address]) { - continue - } - for (const verification of verifications) { - const { fixedValue, bipsValue } = verificationValues[verification.type] - if (fixedValue && fixedPoolAllocatedAmount + fixedValue <= fixedPoolAvailableAmount) { - if (fixedPoolAmountsByAddress[address] === undefined) { - fixedPoolAmountsByAddress[address] = 0n - } - fixedPoolAmountsByAddress[address] += fixedValue - fixedPoolAllocatedAmount += fixedValue + if (totalFixedPoolAmount > fixedPoolAvailableAmount) { + log.warn( + 'Fixed pool amount is greater than available amount. This is not a problem, but it means the fixed pool is exhausted.' + ) } - if (bipsValue) { - bonusPoolBipsByAddress[address] = (bonusPoolBipsByAddress[address] || 0n) as bigint - bonusPoolBipsByAddress[address] += bipsValue - bonusPoolBipsByAddress[address] = - (bonusPoolBipsByAddress[address] as bigint) > maxBonusPoolBips - ? maxBonusPoolBips - : (bonusPoolBipsByAddress[address] as bigint) // cap at max bonus pool bips - } - } - } - const hodlerShares = Object.values(weightedShares) - let totalAmount = 0n - let totalHodlerPoolAmount = 0n - let totalBonusPoolAmount = 0n - let totalFixedPoolAmount = 0n - - log.info('Calculated fixed & bonus pool amounts.', { - maxBonusPoolBips, - }) - - const shares = hodlerShares - .map((share) => { - const userId = hodlerUserIdByAddress[share.address] - const bonusBips = bonusPoolBipsByAddress[share.address] || 0n - const hodlerPoolAmount = share.amount - const bonusPoolAmount = calculatePercentageWithBips(hodlerPoolAmount, bonusBips) - const fixedPoolAmount = fixedPoolAmountsByAddress[share.address] || 0n - const amount = hodlerPoolAmount + bonusPoolAmount + fixedPoolAmount - totalAmount += amount - totalHodlerPoolAmount += hodlerPoolAmount - totalBonusPoolAmount += bonusPoolAmount - totalFixedPoolAmount += fixedPoolAmount - - if (!userId) { - log.debug('Hodler not found for address. Skipping share.', { share }) - return null + // ensure share amounts do not exceed the total distribution amount, ideally this should be done in the database + const totalShareAmounts = shares.reduce((acc, share) => acc + BigInt(share.amount), 0n) + if (totalShareAmounts > distAmt) { + throw new Error('Share amounts exceed total distribution amount') } - // log.debug( - // { - // address: share.address, - // balance: balancesByAddress[share.address], - // amount: amount, - // bonusBips, - // hodlerPoolAmount, - // bonusPoolAmount, - // fixedPoolAmount, - // }, - // 'Calculated share.' - // ) - - // @ts-expect-error supabase-js does not support bigint - return { - address: share.address, - distribution_id: distribution.id, - user_id: userId, - amount: amount.toString(), - bonus_pool_amount: bonusPoolAmount.toString(), - fixed_pool_amount: fixedPoolAmount.toString(), - hodler_pool_amount: hodlerPoolAmount.toString(), - } as Tables<'distribution_shares'> - }) - .filter(Boolean) as Tables<'distribution_shares'>[] - - log.info('Distribution totals', { - totalAmount, - totalHodlerPoolAmount, - hodlerPoolAvailableAmount, - totalBonusPoolAmount, - totalFixedPoolAmount, - fixedPoolAllocatedAmount, - fixedPoolAvailableAmount, - maxBonusPoolBips, - name: distribution.name, - shares: shares.length, - }) - log.info(`Calculated ${shares.length} shares.`) - - if (totalFixedPoolAmount > fixedPoolAvailableAmount) { - log.warn( - 'Fixed pool amount is greater than available amount. This is not a problem, but it means the fixed pool is exhausted.' - ) - } - - // ensure share amounts do not exceed the total distribution amount, ideally this should be done in the database - const totalShareAmounts = shares.reduce((acc, share) => acc + BigInt(share.amount), 0n) - if (totalShareAmounts > distAmt) { - throw new Error('Share amounts exceed total distribution amount') - } - - const { error } = await createDistributionShares(distribution.id, shares) - if (error) { - log.error('Error saving shares.', { error: error.message, code: error.code }) - throw error + const { error } = await createDistributionShares(distribution.id, shares) + if (error) { + log.error('Error saving shares.', { error: error.message, code: error.code }) + throw error + } + }, } } diff --git a/packages/workflows/src/scripts/build-workflow-bundle.ts b/packages/workflows/src/scripts/build-workflow-bundle.ts new file mode 100644 index 000000000..511062d61 --- /dev/null +++ b/packages/workflows/src/scripts/build-workflow-bundle.ts @@ -0,0 +1,21 @@ +import { bundleWorkflowCode } from '@temporalio/worker' +import { writeFile } from 'node:fs/promises' +import path, { dirname } from 'node:path' +import { createRequire } from 'node:module' +const require = createRequire(import.meta.url) +import { fileURLToPath } from 'node:url' + +export const __filename = fileURLToPath(import.meta.url) +export const __dirname = dirname(__filename) + +async function bundle() { + const { code } = await bundleWorkflowCode({ + workflowsPath: require.resolve('../all-workflows.ts'), + }) + const codePath = path.join(__dirname, '../../workflow-bundle.js') + + await writeFile(codePath, code) + console.log(`Bundle written to ${codePath}`) +} + +await bundle() diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index 67581eb37..f03d997e9 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -3,75 +3,47 @@ import { isTransferIndexed } from './supabase' import { simulateUserOperation, sendUserOperation, waitForTransactionReceipt } from './wagmi' import type { UserOperation } from 'permissionless' import { bootstrap } from '@my/workflows/utils' +import superjson from 'superjson' export const createTransferActivities = (env: Record) => { bootstrap(env) return { - simulateUserOpActivity, - sendUserOpActivity, - waitForTransactionReceiptActivity, - isTransferIndexedActivity, - } -} -async function simulateUserOpActivity(userOp: UserOperation<'v0.7'>) { - if (!userOp.signature) { - throw ApplicationFailure.nonRetryable('UserOp signature is required') - } - try { - await simulateUserOperation(userOp) - } catch (error) { - throw ApplicationFailure.nonRetryable('Error simulating user operation', error.code, error) - } -} - -async function sendUserOpActivity(userOp: UserOperation<'v0.7'>) { - const creationTime = Date.now() - - try { - const hash = await sendUserOperation(userOp) - log.info('UserOperation sent', { - hash, - sendTime: Date.now(), - userOp: JSON.stringify(userOp, null, 2), - }) - return hash - } catch (error) { - const errorMessage = - error instanceof Error ? `${error.name}: ${error.message}` : 'Unknown error occurred' + async simulateUserOpActivity(userOp: UserOperation<'v0.7'>) { + if (!userOp.signature) { + throw ApplicationFailure.nonRetryable('UserOp signature is required') + } + try { + await simulateUserOperation(userOp) + } catch (error) { + throw ApplicationFailure.nonRetryable('Error simulating user operation', error.code, error) + } + }, + async sendUserOpActivity(userOp: UserOperation<'v0.7'>) { + try { + const hash = await sendUserOperation(userOp) + log.info('sendUserOperationActivity', { hash, userOp: superjson.stringify(userOp) }) + return hash + } catch (error) { + throw ApplicationFailure.nonRetryable('Error sending user operation', error.code, error) + } + }, - log.error('Error in sendUserOpActivity', { - error: errorMessage, - creationTime, - sendTime: Date.now(), - userOp: JSON.stringify(userOp, null, 2), - }) - - throw ApplicationFailure.nonRetryable(errorMessage) - } -} - -async function waitForTransactionReceiptActivity(hash: `0x${string}`) { - if (!hash) { - throw ApplicationFailure.nonRetryable('Invalid hash: hash is undefined') - } - try { - const receipt = await waitForTransactionReceipt(hash) - if (!receipt.success) - throw ApplicationFailure.nonRetryable('Tx failed', receipt.sender, receipt.userOpHash) - log.info('waitForTransactionReceiptActivity', { receipt }) - return receipt - } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) - log.error('Error in waitForTransactionReceiptActivity', { hash, error: errorMessage }) - throw ApplicationFailure.nonRetryable('Error waiting for tx receipt', errorMessage) - } -} -async function isTransferIndexedActivity(hash: `0x${string}`) { - const isIndexed = await isTransferIndexed(hash) - log.info('isTransferIndexedActivity', { isIndexed }) - if (!isIndexed) { - throw ApplicationFailure.retryable('Transfer not yet indexed in db') + async waitForTransactionReceiptActivity(hash: `0x${string}`) { + try { + const receipt = await waitForTransactionReceipt(hash) + if (!receipt.success) + throw ApplicationFailure.nonRetryable('Tx failed', receipt.sender, receipt.userOpHash) + log.info('waitForTransactionReceiptActivity', { receipt: superjson.stringify(receipt) }) + return receipt + } catch (error) { + throw ApplicationFailure.nonRetryable('Error waiting for tx receipt', error.code, error) + } + }, + async isTransferIndexedActivity(hash: `0x${string}`) { + const isIndexed = await isTransferIndexed(hash) + log.info('isTransferIndexedActivity', { isIndexed }) + return isIndexed + }, } - return isIndexed } diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 94c0352e0..f2aeab645 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -3,11 +3,10 @@ import { hexToBytea } from 'app/utils/hexToBytea' import { supabaseAdmin } from 'app/utils/supabase/admin' export async function isTransferIndexed(hash: `0x${string}`) { - const { data, error } = await supabaseAdmin + const { count, error, status, statusText } = await supabaseAdmin .from('send_account_transfers') .select('*', { count: 'exact', head: true }) .eq('tx_hash', hexToBytea(hash)) - .single() log.info('isTransferIndexed', { count, error, status, statusText }) if (error) { @@ -18,8 +17,12 @@ export async function isTransferIndexed(hash: `0x${string}`) { throw ApplicationFailure.nonRetryable( 'Error reading transfer from send_account_transfers column.', error.code, - error + { + ...error, + status, + statusText, + } ) } - return data !== null + return count !== null && count > 0 } diff --git a/packages/workflows/src/transfer-workflow/wagmi.ts b/packages/workflows/src/transfer-workflow/wagmi.ts index eefab60ac..5433d982c 100644 --- a/packages/workflows/src/transfer-workflow/wagmi.ts +++ b/packages/workflows/src/transfer-workflow/wagmi.ts @@ -1,7 +1,9 @@ -import { log, ApplicationFailure } from '@temporalio/activity' import type { UserOperation } from 'permissionless' import { baseMainnetBundlerClient, baseMainnetClient, entryPointAddress } from '@my/wagmi' import type { Hex } from 'viem' +import superjson from 'superjson' + +import { log } from '@temporalio/activity' /** * default user op with preset gas values that work will probably need to move this to the database. @@ -40,7 +42,7 @@ export async function simulateUserOperation(userOp: UserOperation<'v0.7'>) { } export async function sendUserOperation(userOp: UserOperation<'v0.7'>) { - log.info('Sending UserOperation', { userOp: JSON.stringify(userOp, null, 2) }) + log.info('Sending UserOperation', { userOp: superjson.stringify(userOp) }) try { const hash = await baseMainnetBundlerClient.sendUserOperation({ userOperation: userOp, @@ -50,7 +52,7 @@ export async function sendUserOperation(userOp: UserOperation<'v0.7'>) { } catch (error) { log.error('Error in sendUserOperation', { error: error instanceof Error ? error.message : String(error), - userOp: JSON.stringify(userOp, null, 2), + userOp: superjson.stringify(userOp), }) throw error } diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index e1b423434..0f55be92f 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -16,41 +16,43 @@ const { startToCloseTimeout: '45 seconds', }) -type simulating = { status: 'simulating'; data: { userOp: UserOperation<'v0.7'> } } -type sending = { status: 'sending'; data: { userOp: UserOperation<'v0.7'> } } -type waiting = { status: 'waiting'; data: { hash: string; userOp: UserOperation<'v0.7'> } } -type indexing = { +type BaseState = { userOp: UserOperation<'v0.7'> } + +type Simulating = { status: 'simulating' } & BaseState +type Sending = { status: 'sending' } & BaseState +type Waiting = { status: 'waiting'; hash: string } & BaseState +type Indexing = { status: 'indexing' - data: { receipt: GetUserOperationReceiptReturnType; userOp: UserOperation<'v0.7'> } -} -type confirmed = { + receipt: GetUserOperationReceiptReturnType +} & BaseState +type Confirmed = { status: 'confirmed' receipt: GetUserOperationReceiptReturnType | boolean } & BaseState -export type transferState = simulating | sending | waiting | indexing | confirmed +export type transferState = Simulating | Sending | Waiting | Indexing | Confirmed export const getTransferStateQuery = defineQuery('getTransferState') export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { - setHandler(getTransferStateQuery, () => ({ status: 'simulating', data: { userOp } })) - log('SendTransferWorkflow started with userOp:', JSON.stringify(parsedUserOp, null, 2)) + setHandler(getTransferStateQuery, () => ({ status: 'simulating', userOp })) + log('SendTransferWorkflow started with userOp:', superjson.stringify(userOp)) await simulateUserOpActivity(userOp) log('Simulation completed') - setHandler(getTransferStateQuery, () => ({ status: 'sending', data: { userOp } })) + setHandler(getTransferStateQuery, () => ({ status: 'sending', userOp })) log('Sending UserOperation') const hash = await sendUserOpActivity(userOp) if (!hash) throw ApplicationFailure.nonRetryable('No hash returned from sendUserOperation') log('UserOperation sent, hash:', hash) - setHandler(getTransferStateQuery, () => ({ status: 'waiting', data: { userOp, hash } })) + setHandler(getTransferStateQuery, () => ({ status: 'waiting', userOp, hash })) const receipt = await waitForTransactionReceiptActivity(hash) if (!receipt) throw ApplicationFailure.nonRetryable('No receipt returned from waitForTransactionReceipt') log('Receipt received:', superjson.stringify(receipt)) - setHandler(getTransferStateQuery, () => ({ status: 'indexing', data: { userOp, receipt } })) - const transfer = await isTransferIndexedActivity(receipt.userOpHash) + setHandler(getTransferStateQuery, () => ({ status: 'indexing', userOp, receipt })) + const transfer = await isTransferIndexedActivity(receipt.receipt.transactionHash) if (!transfer) throw ApplicationFailure.retryable('Transfer not yet indexed in db') log('Transfer indexed:', superjson.stringify(transfer)) - setHandler(getTransferStateQuery, () => ({ status: 'confirmed', data: { userOp, receipt } })) + setHandler(getTransferStateQuery, () => ({ status: 'confirmed', userOp, receipt })) return transfer } diff --git a/packages/workflows/src/utils/bootstrap.ts b/packages/workflows/src/utils/bootstrap.ts new file mode 100644 index 000000000..f5c21b2bf --- /dev/null +++ b/packages/workflows/src/utils/bootstrap.ts @@ -0,0 +1,30 @@ +const requiredEnvVars = [ + 'NEXT_PUBLIC_BASE_CHAIN_ID', + 'NEXT_PUBLIC_BASE_RPC_URL', + 'NEXT_PUBLIC_BUNDLER_RPC_URL', + 'NEXT_PUBLIC_SUPABASE_URL', + 'SUPABASE_DB_URL', + 'SUPABASE_JWT_SECRET', + 'SUPABASE_SERVICE_ROLE', +] as const + +/** + * Bootstraps the workflow by setting up the environment variables that many of our clients depend on. + * This is due to Temporal's deterministic execution requirements. + * + * In the Temporal TypeScript SDK, Workflows run in a deterministic sandboxed environment. + * The code is bundled on Worker creation using Webpack, and can import any package as long as it does not reference Node.js or DOM APIs. + * + * @link https://docs.temporal.io/develop/typescript/core-application#workflow-logic-requirements + */ +export const bootstrap = (env: Record) => { + const varsSet: string[] = [] + for (const envVar of requiredEnvVars) { + if (!env[envVar]) { + throw new Error(`Missing required environment variable: ${envVar}`) + } + varsSet.push(envVar) + globalThis.process.env[envVar] = env[envVar] + } + console.log('Bootstrapped environment variables:', varsSet) +} diff --git a/packages/workflows/src/utils/index.ts b/packages/workflows/src/utils/index.ts new file mode 100644 index 000000000..642ebc387 --- /dev/null +++ b/packages/workflows/src/utils/index.ts @@ -0,0 +1 @@ +export * from './bootstrap' diff --git a/packages/workflows/tsconfig.json b/packages/workflows/tsconfig.json index 074ea6248..d60ea4bd6 100644 --- a/packages/workflows/tsconfig.json +++ b/packages/workflows/tsconfig.json @@ -10,19 +10,20 @@ "app/*": ["../app/*"], "@my/wagmi": ["../wagmi/src"], "@my/wagmi/*": ["../wagmi/src/*"], - "@my/api/*": ["../api/src/*"], - "@my/workflows": ["./packages/workflows/src/all-workflows.ts"] + "@my/workflows": ["./src/all-workflows.ts"], + "@my/workflows/*": ["./src/*"], + "@my/temporal": ["../temporal/src"], + "@my/temporal/*": ["../temporal/src/*"] } }, "include": [ "./src", + "../temporal/src", "./jest.config.ts", "../../supabase", "../app", "../wagmi/src", - "../api/src", "../../globals.d.ts", "../../environment.d.ts" - ], - "references": [] + ] } diff --git a/tilt/apps.Tiltfile b/tilt/apps.Tiltfile index 6ef49b0c5..50eb8d121 100644 --- a/tilt/apps.Tiltfile +++ b/tilt/apps.Tiltfile @@ -117,6 +117,7 @@ local_resource( "wagmi:generate", "temporal:build", "temporal", + "workflows:bundle", ], serve_cmd = "yarn workspace workers start", deps = ts_files( diff --git a/tilt/deps.Tiltfile b/tilt/deps.Tiltfile index 42d24a05f..655fbec46 100644 --- a/tilt/deps.Tiltfile +++ b/tilt/deps.Tiltfile @@ -258,6 +258,16 @@ local_resource( deps = ui_files, ) +local_resource( + name="workflows:bundle", + allow_parallel = True, + cmd = "yarn workspace @my/workflows bundle", + labels = labels, + resource_deps = [ + "yarn:install", + ], +) + local_resource( name = "shovel:generate-config", allow_parallel = True, diff --git a/tsconfig.base.json b/tsconfig.base.json index 79af3e529..808e75519 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -13,6 +13,7 @@ "@my/wagmi": ["./packages/wagmi/src"], "@my/wagmi/*": ["./packages/wagmi/src/*"], "@my/workflows/*": ["./packages/workflows/src/*"], + "@my/temporal/*": ["./packages/temporal/src/*"], "app/*": ["packages/app/*"] }, "importHelpers": true, diff --git a/tsconfig.json b/tsconfig.json index bc6b3ff1f..9e1979ddd 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -11,6 +11,7 @@ "references": [ { "path": "apps/expo" }, { "path": "apps/next" }, + { "path": "apps/workers" }, { "path": "packages/app" }, { "path": "packages/ui" }, { "path": "packages/api" }, @@ -21,6 +22,7 @@ { "path": "packages/daimo-expo-passkeys" }, { "path": "packages/wagmi" }, { "path": "packages/workflows" }, + { "path": "packages/temporal" }, { "path": "supabase" } ] } diff --git a/yarn.lock b/yarn.lock index 471416289..6a0a0deed 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3184,6 +3184,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/aix-ppc64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/aix-ppc64@npm:0.23.1" + conditions: os=aix & cpu=ppc64 + languageName: node + linkType: hard + "@esbuild/android-arm64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/android-arm64@npm:0.17.18" @@ -3212,6 +3219,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/android-arm64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/android-arm64@npm:0.23.1" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/android-arm@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/android-arm@npm:0.17.18" @@ -3240,6 +3254,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/android-arm@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/android-arm@npm:0.23.1" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + "@esbuild/android-x64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/android-x64@npm:0.17.18" @@ -3268,6 +3289,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/android-x64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/android-x64@npm:0.23.1" + conditions: os=android & cpu=x64 + languageName: node + linkType: hard + "@esbuild/darwin-arm64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/darwin-arm64@npm:0.17.18" @@ -3296,6 +3324,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/darwin-arm64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/darwin-arm64@npm:0.23.1" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/darwin-x64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/darwin-x64@npm:0.17.18" @@ -3324,6 +3359,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/darwin-x64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/darwin-x64@npm:0.23.1" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + "@esbuild/freebsd-arm64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/freebsd-arm64@npm:0.17.18" @@ -3352,6 +3394,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/freebsd-arm64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/freebsd-arm64@npm:0.23.1" + conditions: os=freebsd & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/freebsd-x64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/freebsd-x64@npm:0.17.18" @@ -3380,6 +3429,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/freebsd-x64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/freebsd-x64@npm:0.23.1" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + "@esbuild/linux-arm64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/linux-arm64@npm:0.17.18" @@ -3408,6 +3464,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-arm64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/linux-arm64@npm:0.23.1" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/linux-arm@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/linux-arm@npm:0.17.18" @@ -3436,6 +3499,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-arm@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/linux-arm@npm:0.23.1" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + "@esbuild/linux-ia32@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/linux-ia32@npm:0.17.18" @@ -3464,6 +3534,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-ia32@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/linux-ia32@npm:0.23.1" + conditions: os=linux & cpu=ia32 + languageName: node + linkType: hard + "@esbuild/linux-loong64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/linux-loong64@npm:0.17.18" @@ -3492,6 +3569,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-loong64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/linux-loong64@npm:0.23.1" + conditions: os=linux & cpu=loong64 + languageName: node + linkType: hard + "@esbuild/linux-mips64el@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/linux-mips64el@npm:0.17.18" @@ -3520,6 +3604,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-mips64el@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/linux-mips64el@npm:0.23.1" + conditions: os=linux & cpu=mips64el + languageName: node + linkType: hard + "@esbuild/linux-ppc64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/linux-ppc64@npm:0.17.18" @@ -3548,6 +3639,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-ppc64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/linux-ppc64@npm:0.23.1" + conditions: os=linux & cpu=ppc64 + languageName: node + linkType: hard + "@esbuild/linux-riscv64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/linux-riscv64@npm:0.17.18" @@ -3576,6 +3674,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-riscv64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/linux-riscv64@npm:0.23.1" + conditions: os=linux & cpu=riscv64 + languageName: node + linkType: hard + "@esbuild/linux-s390x@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/linux-s390x@npm:0.17.18" @@ -3604,6 +3709,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-s390x@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/linux-s390x@npm:0.23.1" + conditions: os=linux & cpu=s390x + languageName: node + linkType: hard + "@esbuild/linux-x64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/linux-x64@npm:0.17.18" @@ -3632,6 +3744,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-x64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/linux-x64@npm:0.23.1" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + "@esbuild/netbsd-x64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/netbsd-x64@npm:0.17.18" @@ -3660,6 +3779,20 @@ __metadata: languageName: node linkType: hard +"@esbuild/netbsd-x64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/netbsd-x64@npm:0.23.1" + conditions: os=netbsd & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/openbsd-arm64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/openbsd-arm64@npm:0.23.1" + conditions: os=openbsd & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/openbsd-x64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/openbsd-x64@npm:0.17.18" @@ -3688,6 +3821,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/openbsd-x64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/openbsd-x64@npm:0.23.1" + conditions: os=openbsd & cpu=x64 + languageName: node + linkType: hard + "@esbuild/sunos-x64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/sunos-x64@npm:0.17.18" @@ -3716,6 +3856,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/sunos-x64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/sunos-x64@npm:0.23.1" + conditions: os=sunos & cpu=x64 + languageName: node + linkType: hard + "@esbuild/win32-arm64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/win32-arm64@npm:0.17.18" @@ -3744,6 +3891,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/win32-arm64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/win32-arm64@npm:0.23.1" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/win32-ia32@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/win32-ia32@npm:0.17.18" @@ -3772,6 +3926,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/win32-ia32@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/win32-ia32@npm:0.23.1" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + "@esbuild/win32-x64@npm:0.17.18": version: 0.17.18 resolution: "@esbuild/win32-x64@npm:0.17.18" @@ -3800,6 +3961,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/win32-x64@npm:0.23.1": + version: 0.23.1 + resolution: "@esbuild/win32-x64@npm:0.23.1" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + "@eslint-community/eslint-utils@npm:^4.2.0, @eslint-community/eslint-utils@npm:^4.4.0": version: 4.4.0 resolution: "@eslint-community/eslint-utils@npm:4.4.0" @@ -5020,6 +5188,16 @@ __metadata: languageName: node linkType: hard +"@grpc/grpc-js@npm:^1.10.7": + version: 1.12.5 + resolution: "@grpc/grpc-js@npm:1.12.5" + dependencies: + "@grpc/proto-loader": "npm:^0.7.13" + "@js-sdsl/ordered-map": "npm:^4.4.2" + checksum: 10/4f8ead236dcab4d94e15e62d65ad2d93732d37f5cc52ffafe67ae00f69eae4a4c97d6d34a1b9eac9f30206468f2d15302ea6649afcba1d38929afa9d1e7c12d5 + languageName: node + linkType: hard + "@grpc/proto-loader@npm:^0.7.13": version: 0.7.13 resolution: "@grpc/proto-loader@npm:0.7.13" @@ -6342,8 +6520,8 @@ __metadata: jsonwebtoken: "npm:^9.0.2" ms: "npm:^2.1.3" p-queue: "npm:^8.0.1" + superjson: "npm:^2.2.1" permissionless: "npm:^0.1.14" - superjson: "npm:^1.13.1" viem: "npm:^2.19.0" zod: "npm:^3.23.8" languageName: unknown @@ -6433,6 +6611,21 @@ __metadata: languageName: unknown linkType: soft +"@my/temporal@workspace:*, @my/temporal@workspace:packages/temporal": + version: 0.0.0-use.local + resolution: "@my/temporal@workspace:packages/temporal" + dependencies: + "@temporalio/client": "npm:^1.10.1" + "@temporalio/common": "npm:^1.11.1" + esbuild: "npm:^0.23.1" + superjson: "npm:^2.2.1" + temporal: "npm:^0.7.1" + typescript: "npm:^5.5.3" + peerDependencies: + typescript: ^5.5.3 + languageName: unknown + linkType: soft + "@my/ui@workspace:*, @my/ui@workspace:packages/ui": version: 0.0.0-use.local resolution: "@my/ui@workspace:packages/ui" @@ -11161,6 +11354,20 @@ __metadata: languageName: node linkType: hard +"@temporalio/client@npm:^1.10.1": + version: 1.11.5 + resolution: "@temporalio/client@npm:1.11.5" + dependencies: + "@grpc/grpc-js": "npm:^1.10.7" + "@temporalio/common": "npm:1.11.5" + "@temporalio/proto": "npm:1.11.5" + abort-controller: "npm:^3.0.0" + long: "npm:^5.2.3" + uuid: "npm:^9.0.1" + checksum: 10/d77c300dac950e080fb3662be02a2af1fa88c22ebbc374275d85977f2ec5e5fd547c0ca88f6dee5b6264506436823485898097c6fe08c086e239d8bd4e116913 + languageName: node + linkType: hard + "@temporalio/common@npm:1.10.1": version: 1.10.1 resolution: "@temporalio/common@npm:1.10.1" @@ -11173,6 +11380,18 @@ __metadata: languageName: node linkType: hard +"@temporalio/common@npm:1.11.5, @temporalio/common@npm:^1.11.1": + version: 1.11.5 + resolution: "@temporalio/common@npm:1.11.5" + dependencies: + "@temporalio/proto": "npm:1.11.5" + long: "npm:^5.2.3" + ms: "npm:^3.0.0-canary.1" + proto3-json-serializer: "npm:^2.0.0" + checksum: 10/31e90f0fc9520d1ab19cda99e771a8a02dc5531a1320bc963bfb3c46fbca64a1837945188d36624d79d34960790bfe2831f1b862ad805eda02ba270834046c56 + languageName: node + linkType: hard + "@temporalio/core-bridge@npm:1.10.1": version: 1.10.1 resolution: "@temporalio/core-bridge@npm:1.10.1" @@ -11212,6 +11431,16 @@ __metadata: languageName: node linkType: hard +"@temporalio/proto@npm:1.11.5": + version: 1.11.5 + resolution: "@temporalio/proto@npm:1.11.5" + dependencies: + long: "npm:^5.2.3" + protobufjs: "npm:^7.2.5" + checksum: 10/2be4fedc06e0d04e8726e5bab18a055ac3d6197cffd3801ae3b3c3e60e188c05706eefa2fc31c0196e2cfdedf593e23c881c40655e3696cf7bac5f809e67f1e8 + languageName: node + linkType: hard + "@temporalio/testing@npm:^1.10.1": version: 1.10.1 resolution: "@temporalio/testing@npm:1.10.1" @@ -14463,6 +14692,7 @@ __metadata: react-test-renderer: "npm:^18.3.1" react-use-precision-timer: "npm:^3.5.5" solito: "npm:^4.0.1" + superjson: "npm:^2.2.1" superjson: "npm:^1.13.1" type-fest: "npm:^4.32.0" typescript: "npm:^5.5.3" @@ -19368,6 +19598,89 @@ __metadata: languageName: node linkType: hard +"esbuild@npm:^0.23.1": + version: 0.23.1 + resolution: "esbuild@npm:0.23.1" + dependencies: + "@esbuild/aix-ppc64": "npm:0.23.1" + "@esbuild/android-arm": "npm:0.23.1" + "@esbuild/android-arm64": "npm:0.23.1" + "@esbuild/android-x64": "npm:0.23.1" + "@esbuild/darwin-arm64": "npm:0.23.1" + "@esbuild/darwin-x64": "npm:0.23.1" + "@esbuild/freebsd-arm64": "npm:0.23.1" + "@esbuild/freebsd-x64": "npm:0.23.1" + "@esbuild/linux-arm": "npm:0.23.1" + "@esbuild/linux-arm64": "npm:0.23.1" + "@esbuild/linux-ia32": "npm:0.23.1" + "@esbuild/linux-loong64": "npm:0.23.1" + "@esbuild/linux-mips64el": "npm:0.23.1" + "@esbuild/linux-ppc64": "npm:0.23.1" + "@esbuild/linux-riscv64": "npm:0.23.1" + "@esbuild/linux-s390x": "npm:0.23.1" + "@esbuild/linux-x64": "npm:0.23.1" + "@esbuild/netbsd-x64": "npm:0.23.1" + "@esbuild/openbsd-arm64": "npm:0.23.1" + "@esbuild/openbsd-x64": "npm:0.23.1" + "@esbuild/sunos-x64": "npm:0.23.1" + "@esbuild/win32-arm64": "npm:0.23.1" + "@esbuild/win32-ia32": "npm:0.23.1" + "@esbuild/win32-x64": "npm:0.23.1" + dependenciesMeta: + "@esbuild/aix-ppc64": + optional: true + "@esbuild/android-arm": + optional: true + "@esbuild/android-arm64": + optional: true + "@esbuild/android-x64": + optional: true + "@esbuild/darwin-arm64": + optional: true + "@esbuild/darwin-x64": + optional: true + "@esbuild/freebsd-arm64": + optional: true + "@esbuild/freebsd-x64": + optional: true + "@esbuild/linux-arm": + optional: true + "@esbuild/linux-arm64": + optional: true + "@esbuild/linux-ia32": + optional: true + "@esbuild/linux-loong64": + optional: true + "@esbuild/linux-mips64el": + optional: true + "@esbuild/linux-ppc64": + optional: true + "@esbuild/linux-riscv64": + optional: true + "@esbuild/linux-s390x": + optional: true + "@esbuild/linux-x64": + optional: true + "@esbuild/netbsd-x64": + optional: true + "@esbuild/openbsd-arm64": + optional: true + "@esbuild/openbsd-x64": + optional: true + "@esbuild/sunos-x64": + optional: true + "@esbuild/win32-arm64": + optional: true + "@esbuild/win32-ia32": + optional: true + "@esbuild/win32-x64": + optional: true + bin: + esbuild: bin/esbuild + checksum: 10/f55fbd0bfb0f86ce67a6d2c6f6780729d536c330999ecb9f5a38d578fb9fda820acbbc67d6d1d377eed8fed50fc38f14ff9cb014f86dafab94269a7fb2177018 + languageName: node + linkType: hard + "escalade@npm:^3.1.1": version: 3.1.1 resolution: "escalade@npm:3.1.1" @@ -31681,7 +31994,6 @@ __metadata: eslint: "npm:^8.46.0" lefthook: "npm:^1.5.5" node-gyp: "npm:^9.3.1" - snaplet: "npm:^0.42.1" turbo: "npm:^2.1.2" typescript: "npm:^5.5.3" zx: "npm:^8.1.2" @@ -33056,12 +33368,12 @@ __metadata: languageName: node linkType: hard -"superjson@npm:^1.13.1": - version: 1.13.3 - resolution: "superjson@npm:1.13.3" +"superjson@npm:^2.2.1": + version: 2.2.2 + resolution: "superjson@npm:2.2.2" dependencies: copy-anything: "npm:^3.0.2" - checksum: 10/71a186c513a9821e58264c0563cd1b3cf07d3b5ba53a09cc5c1a604d8ffeacac976a6ba1b5d5b3c71b6ab5a1941dfba5a15e3f106ad3ef22fe8d5eee3e2be052 + checksum: 10/6fdc709db4f69d586a18379948e0ade8268c851c791701fea960e29cea12672d7561b4ca89c4049c2e787eb1cec08a51df51d357aa6852078bc0d71d7e17b401 languageName: node linkType: hard @@ -33417,6 +33729,13 @@ __metadata: languageName: node linkType: hard +"temporal@npm:^0.7.1": + version: 0.7.1 + resolution: "temporal@npm:0.7.1" + checksum: 10/ec1b5403229b553577aac55345ea2e6b1445db8628c362ce7ca635e999fce6c3dbbec8f27df5df9341a78ad07e775946730406837d7dd454af7f07596e3c284f + languageName: node + linkType: hard + "tempy@npm:0.3.0": version: 0.3.0 resolution: "tempy@npm:0.3.0" @@ -36036,6 +36355,7 @@ __metadata: "@my/workflows": "workspace:*" "@temporalio/worker": "npm:^1.10.1" "@types/bun": "npm:^1.1.6" + dotenv-cli: "npm:^7.3.0" ts-node: "npm:^10.9.2" typescript: "npm:^5.5.3" peerDependencies: From 43ead98557b9fe54d1bc861eb42e77227d351ca7 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 15 Jan 2025 14:16:54 -0800 Subject: [PATCH 12/58] workflows build command --- apps/workers/src/worker.ts | 2 +- biome.json | 3 ++- package.json | 1 + packages/workflows/package.json | 5 ++++- tilt/apps.Tiltfile | 2 +- tilt/deps.Tiltfile | 4 ++-- yarn.lock | 1 + 7 files changed, 12 insertions(+), 6 deletions(-) diff --git a/apps/workers/src/worker.ts b/apps/workers/src/worker.ts index 6a9fa3351..3aa6172f7 100644 --- a/apps/workers/src/worker.ts +++ b/apps/workers/src/worker.ts @@ -1,4 +1,4 @@ -import { Worker, NativeConnection, bundleWorkflowCode } from '@temporalio/worker' +import { Worker, NativeConnection } from '@temporalio/worker' import { createTransferActivities } from '@my/workflows/all-activities' import fs from 'node:fs/promises' import { createRequire } from 'node:module' diff --git a/biome.json b/biome.json index cc7f7359d..463609fb8 100644 --- a/biome.json +++ b/biome.json @@ -25,7 +25,8 @@ "./supabase/.temp/**", "./packages/contracts/var/*.json", "**/tsconfig.json", - "**/*.tsconfig.json" + "**/*.tsconfig.json", + "./packages/workflows/workflow-bundle.js" ] }, "organizeImports": { diff --git a/package.json b/package.json index 71b32a100..617acc91b 100644 --- a/package.json +++ b/package.json @@ -35,6 +35,7 @@ "snaplet": "yarn workspace @my/snaplet", "workers": "yarn workspace workers", "shovel": "yarn workspace @my/shovel", + "workflows": "yarn workspace @my/workflows", "clean": "yarn workspaces foreach --all -pi run clean" }, "resolutions": { diff --git a/packages/workflows/package.json b/packages/workflows/package.json index f344f91c0..f02fa3bc8 100644 --- a/packages/workflows/package.json +++ b/packages/workflows/package.json @@ -26,13 +26,16 @@ "scripts": { "lint": "tsc", "test": "jest", - "bundle": "node --loader ts-node/esm --experimental-specifier-resolution=node src/scripts/build-workflow-bundle.ts" + "build": "yarn bundle", + "bundle": "yarn with-env node --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\"));' src/scripts/build-workflow-bundle.ts", + "with-env": "dotenv -e ../../.env -c --" }, "devDependencies": { "@jest/globals": "^29.7.0", "@temporalio/nyc-test-coverage": "^1.10.1", "@temporalio/testing": "^1.10.1", "@types/source-map-support": "^0", + "dotenv-cli": "^7.3.0", "jest": "^29.7.0", "nyc": "^17.0.0", "source-map-support": "^0.5.21", diff --git a/tilt/apps.Tiltfile b/tilt/apps.Tiltfile index 50eb8d121..2db398e8c 100644 --- a/tilt/apps.Tiltfile +++ b/tilt/apps.Tiltfile @@ -117,7 +117,7 @@ local_resource( "wagmi:generate", "temporal:build", "temporal", - "workflows:bundle", + "workflows:build", ], serve_cmd = "yarn workspace workers start", deps = ts_files( diff --git a/tilt/deps.Tiltfile b/tilt/deps.Tiltfile index 655fbec46..3382ce987 100644 --- a/tilt/deps.Tiltfile +++ b/tilt/deps.Tiltfile @@ -259,9 +259,9 @@ local_resource( ) local_resource( - name="workflows:bundle", + name="workflows:build", allow_parallel = True, - cmd = "yarn workspace @my/workflows bundle", + cmd = "yarn workspace @my/workflows build", labels = labels, resource_deps = [ "yarn:install", diff --git a/yarn.lock b/yarn.lock index 6a0a0deed..6ab851096 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6687,6 +6687,7 @@ __metadata: "@temporalio/workflow": "npm:^1.10.1" "@types/source-map-support": "npm:^0" app: "workspace:*" + dotenv-cli: "npm:^7.3.0" jest: "npm:^29.7.0" nyc: "npm:^17.0.0" source-map-support: "npm:^0.5.21" From 30fe644903896aa036fb7422fd35004cc4eef848 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 15 Jan 2025 15:22:05 -0800 Subject: [PATCH 13/58] fix client imports --- apps/next/next.config.js | 14 + .../send/__snapshots__/screen.test.tsx.snap | 1477 +++++++++++++++++ packages/app/features/send/confirm/screen.tsx | 20 +- packages/temporal/src/client.ts | 5 +- yarn.lock | 3 +- 5 files changed, 1498 insertions(+), 21 deletions(-) create mode 100644 packages/app/features/send/__snapshots__/screen.test.tsx.snap diff --git a/apps/next/next.config.js b/apps/next/next.config.js index 2155bfe45..89749536e 100644 --- a/apps/next/next.config.js +++ b/apps/next/next.config.js @@ -38,6 +38,20 @@ const plugins = [ }, excludeReactNativeWebExports: ['Switch', 'ProgressBar', 'Picker', 'CheckBox', 'Touchable'], }), + (nextConfig) => { + return { + webpack: (webpackConfig, options) => { + // Add Temporal to externals when building for server + if (options.isServer) { + webpackConfig.externals = [...(webpackConfig.externals || []), '@temporalio/client'] + } + if (typeof nextConfig.webpack === 'function') { + return nextConfig.webpack(webpackConfig, options) + } + return webpackConfig + }, + } + }, (nextConfig) => { return { webpack: (webpackConfig, options) => { diff --git a/packages/app/features/send/__snapshots__/screen.test.tsx.snap b/packages/app/features/send/__snapshots__/screen.test.tsx.snap new file mode 100644 index 000000000..be4fdde90 --- /dev/null +++ b/packages/app/features/send/__snapshots__/screen.test.tsx.snap @@ -0,0 +1,1477 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`SendScreen should render /send check when no send account: render 1`] = ` + + + + + TO + + + + edit + + + + + + + + + + + + + + + + + + + test + + + /test + + + + + + No send account + + + + /test + + + + has no send account! Ask them to create one or write a /send Check. + + + + + Write /send Check + + + +`; + +exports[`SendScreen should render with search when on /send and no recipient in params: render 1`] = ` + + + + + + + + + + + + + + + } + id=":r0:" + name="query" + onBlur={[Function]} + onChangeText={[Function]} + onFocus={[Function]} + placeholder="Sendtag, Phone, Send ID, Address" + placeholderTextColor="#081619" + readOnly={false} + style={ + { + "backgroundColor": "#111f22", + "borderBottomColor": "#343434", + "borderBottomLeftRadius": 9, + "borderBottomRightRadius": 9, + "borderBottomWidth": 1, + "borderLeftColor": "#343434", + "borderLeftWidth": 1, + "borderRightColor": "#343434", + "borderRightWidth": 1, + "borderStyle": "solid", + "borderTopColor": "#343434", + "borderTopLeftRadius": 9, + "borderTopRightRadius": 9, + "borderTopWidth": 1, + "color": "#FFFFFF", + "fontFamily": "System", + "fontSize": 19.2, + "fontStyle": "normal", + "fontWeight": "bold", + "height": 44, + "minWidth": 0, + "paddingLeft": 46, + "paddingRight": 40, + "position": "relative", + } + } + value="test" + /> + + + + + + + + +`; + +exports[`SendScreen should render with search when on /send and no recipient in params: search 1`] = ` + + + + + + + + + + + + + + + } + id=":r0:" + name="query" + onBlur={[Function]} + onChangeText={[Function]} + onFocus={[Function]} + placeholder="Sendtag, Phone, Send ID, Address" + placeholderTextColor="#081619" + readOnly={false} + style={ + { + "backgroundColor": "#111f22", + "borderBottomColor": "#343434", + "borderBottomLeftRadius": 9, + "borderBottomRightRadius": 9, + "borderBottomWidth": 1, + "borderLeftColor": "#343434", + "borderLeftWidth": 1, + "borderRightColor": "#343434", + "borderRightWidth": 1, + "borderStyle": "solid", + "borderTopColor": "#343434", + "borderTopLeftRadius": 9, + "borderTopRightRadius": 9, + "borderTopWidth": 1, + "color": "#FFFFFF", + "fontFamily": "System", + "fontSize": 19.2, + "fontStyle": "normal", + "fontWeight": "bold", + "height": 44, + "minWidth": 0, + "paddingLeft": 46, + "paddingRight": 40, + "position": "relative", + } + } + value="testtest" + /> + + + + + + + + + + + + tag + + + + + + + + + + + + + + + + + ?? + + + + + + + + + test + + + + /test + + + + + + + + + + +`; diff --git a/packages/app/features/send/confirm/screen.tsx b/packages/app/features/send/confirm/screen.tsx index abf2785d3..fea852769 100644 --- a/packages/app/features/send/confirm/screen.tsx +++ b/packages/app/features/send/confirm/screen.tsx @@ -41,9 +41,7 @@ import { useTokenPrices } from 'app/utils/useTokenPrices' const log = debug('app:features:send:confirm:screen') import { api } from 'app/utils/api' -import { getUserOperationHash } from 'permissionless' import { signUserOp } from 'app/utils/signUserOp' -import { byteaToBase64 } from 'app/utils/byteaToBase64' import { usePendingTransfers } from 'app/features/home/utils/usePendingTransfers' export function SendConfirmScreen() { @@ -189,7 +187,6 @@ export function SendConfirm() { assert(nonce !== undefined, 'Nonce is not available') throwIf(feesPerGasError) assert(!!feesPerGas, 'Fees per gas is not available') - assert(!!profile?.address, 'Could not resolve recipients send account') assert(selectedCoin?.balance >= BigInt(amount ?? '0'), 'Insufficient balance') const sender = sendAccount?.address as `0x${string}` @@ -204,19 +201,12 @@ export function SendConfirm() { console.log('feesPerGas', feesPerGas) console.log('userOp', _userOp) const chainId = baseMainnetClient.chain.id - const entryPoint = entryPointAddress[chainId] - const userOpHash = getUserOperationHash({ - userOperation: userOp, - entryPoint, - chainId, - }) + const signature = await signUserOp({ - userOpHash, - allowedCredentials: - webauthnCreds?.map((c) => ({ - id: byteaToBase64(c.raw_credential_id), - userHandle: c.name, - })) ?? [], + userOp, + chainId, + webauthnCreds, + entryPoint: entryPointAddress[chainId], }) userOp.signature = signature diff --git a/packages/temporal/src/client.ts b/packages/temporal/src/client.ts index 87bd3f35d..6ec6ca80f 100644 --- a/packages/temporal/src/client.ts +++ b/packages/temporal/src/client.ts @@ -1,7 +1,4 @@ import { Client, Connection } from '@temporalio/client' -import { payloadConverter } from './payload-converter' -import { createRequire } from 'node:module' -const require = createRequire(import.meta.url) import debug from 'debug' import fs from 'node:fs/promises' const { NODE_ENV = 'development' } = process.env @@ -38,7 +35,7 @@ export async function getTemporalClient(): Promise { connection, namespace: process.env.TEMPORAL_NAMESPACE ?? 'default', dataConverter: { - payloadConverterPath: require.resolve('../build/payload-converter.cjs'), + payloadConverterPath: new URL('../build/payload-converter.cjs', import.meta.url).pathname, }, }) } diff --git a/yarn.lock b/yarn.lock index 6ab851096..091a11be5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6520,8 +6520,8 @@ __metadata: jsonwebtoken: "npm:^9.0.2" ms: "npm:^2.1.3" p-queue: "npm:^8.0.1" - superjson: "npm:^2.2.1" permissionless: "npm:^0.1.14" + superjson: "npm:^2.2.1" viem: "npm:^2.19.0" zod: "npm:^3.23.8" languageName: unknown @@ -14694,7 +14694,6 @@ __metadata: react-use-precision-timer: "npm:^3.5.5" solito: "npm:^4.0.1" superjson: "npm:^2.2.1" - superjson: "npm:^1.13.1" type-fest: "npm:^4.32.0" typescript: "npm:^5.5.3" viem: "npm:^2.19.0" From 30d0f40342268aa78335e0dbe61aeab0f884003d Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 29 Jan 2025 08:10:00 -0800 Subject: [PATCH 14/58] save temporal transfer workflows in supabase --- apps/workers/src/client.ts | 28 +- packages/api/src/routers/_app.ts | 4 +- packages/api/src/routers/temporal.ts | 41 +++ packages/api/src/routers/transfer.ts | 108 ------- .../app/features/home/TokenActivityRow.tsx | 94 +----- .../utils/__mocks__/useTokenActivityFeed.ts | 15 +- .../home/utils/usePendingTransfers.ts | 22 -- .../home/utils/useTokenActivityFeed.ts | 65 ++-- packages/app/features/send/confirm/screen.tsx | 32 +- packages/app/utils/decodeTransferUserOp.ts | 4 +- packages/snaplet/.snaplet/dataModel.json | 147 ++++++++- .../src/transfer-workflow/activities.ts | 150 ++++++++- .../src/transfer-workflow/supabase.ts | 107 +++++-- .../workflows/src/transfer-workflow/wagmi.ts | 51 +--- .../src/transfer-workflow/workflow.ts | 52 +--- supabase/config.toml | 4 +- supabase/database-generated.types.ts | 79 +++++ ...035940_create_temporal_transfers_table.sql | 288 ++++++++++++++++++ supabase/package.json | 4 +- 19 files changed, 855 insertions(+), 440 deletions(-) create mode 100644 packages/api/src/routers/temporal.ts delete mode 100644 packages/api/src/routers/transfer.ts delete mode 100644 packages/app/features/home/utils/usePendingTransfers.ts create mode 100644 supabase/migrations/20250205035940_create_temporal_transfers_table.sql diff --git a/apps/workers/src/client.ts b/apps/workers/src/client.ts index 7b7be2fed..2c1b927af 100644 --- a/apps/workers/src/client.ts +++ b/apps/workers/src/client.ts @@ -1,6 +1,5 @@ import { Connection, Client } from '@temporalio/client' import { TransferWorkflow } from '@my/workflows/all-workflows' -import type { UserOperation } from 'permissionless' // async function runDistributionWorkflow() { // const connection = await Connection.connect() // Connect to localhost with default ConnectionOptions. @@ -25,7 +24,7 @@ import type { UserOperation } from 'permissionless' // return result // } -export async function runTransferWorkflow(userOp: UserOperation<'v0.7'>) { +export async function runTransferWorkflow(userId: string, userOpHash: `0x${string}`) { const connection = await Connection.connect() const client = new Client({ connection, @@ -33,8 +32,8 @@ export async function runTransferWorkflow(userOp: UserOperation<'v0.7'>) { const handle = await client.workflow.start(TransferWorkflow, { taskQueue: 'monorepo', - workflowId: `transfers-workflow-${userOp.sender}-${userOp.nonce.toString()}`, // TODO: remember to replace this with a meaningful business ID - args: [userOp], + workflowId: `transfers-workflow-${userId}-${userOpHash}`, // TODO: remember to replace this with a meaningful business ID + args: [userOpHash], }) console.log('Started handle', handle.workflowId) // optional: wait for client result @@ -48,24 +47,3 @@ export async function runTransferWorkflow(userOp: UserOperation<'v0.7'>) { // console.error(err) // process.exit(1) // }) - -runTransferWorkflow({ - callData: - '0x34fcd5be000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000833589fcd6edb6e08f4c7c32d4f71b54bda02913000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000044a9059cbb000000000000000000000000713ddc85a615beaec95333736d80c406732f6d7600000000000000000000000000000000000000000000000000000000000f424000000000000000000000000000000000000000000000000000000000', - callGasLimit: 100000n, - maxFeePerGas: 1000000110n, - maxPriorityFeePerGas: 1000000000n, - nonce: 1n, - paymaster: '0x592e1224D203Be4214B15e205F6081FbbaCFcD2D', - paymasterData: '0x', - paymasterPostOpGasLimit: 100000n, - paymasterVerificationGasLimit: 150000n, - preVerificationGas: 70000n, - sender: '0x713ddC85a615BEaec95333736D80C406732f6d76', - signature: - '0x01000066ce986500000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000017000000000000000000000000000000000000000000000000000000000000000193e778a488b82629b608dabe2a0979742f065662e670ca4b3e365162bff5457e6fd8931f1d72ab0ba388a92725cf7dba903799639c4cffb45bc232ef9dcb1da2000000000000000000000000000000000000000000000000000000000000002549960de5880e8c687434170f6476605b8fe4aeb9a28632c7995cf3ba831d97631d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008f7b2274797065223a22776562617574686e2e676574222c226368616c6c656e6765223a22415141415a7336595a66796251683649754d4d6a4e774f35657171626f3930573058594f644b714d33345742314e4c35484c4250222c226f726967696e223a22687474703a2f2f6c6f63616c686f73743a33303030222c2263726f73734f726967696e223a66616c73657d0000000000000000000000000000000000', - verificationGasLimit: 550000n, -}).catch((err) => { - console.error(err) - process.exit(1) -}) diff --git a/packages/api/src/routers/_app.ts b/packages/api/src/routers/_app.ts index 311fe3629..83720c1d9 100644 --- a/packages/api/src/routers/_app.ts +++ b/packages/api/src/routers/_app.ts @@ -6,7 +6,7 @@ import { distributionRouter } from './distribution' import { tagRouter } from './tag' import { secretShopRouter } from './secretShop' import { sendAccountRouter } from './sendAccount' -import { transferRouter } from './transfer' +import { temporalRouter } from './temporal' import { accountRecoveryRouter } from './account-recovery/router' import { referralsRouter } from './referrals' @@ -19,7 +19,7 @@ export const appRouter = createTRPCRouter({ secretShop: secretShopRouter, sendAccount: sendAccountRouter, referrals: referralsRouter, - transfer: transferRouter, + temporal: temporalRouter, }) export type AppRouter = typeof appRouter diff --git a/packages/api/src/routers/temporal.ts b/packages/api/src/routers/temporal.ts new file mode 100644 index 000000000..dcf9f9fde --- /dev/null +++ b/packages/api/src/routers/temporal.ts @@ -0,0 +1,41 @@ +import { TRPCError } from '@trpc/server' +import debug from 'debug' +import { z } from 'zod' +import { createTRPCRouter, protectedProcedure } from '../trpc' +import { getTemporalClient } from '@my/temporal/client' +import { TransferWorkflow } from '@my/workflows/all-workflows' + +const log = debug('api:temporal') + +export const temporalRouter = createTRPCRouter({ + transfer: protectedProcedure + .input( + z.object({ + userOpHash: z.custom<`0x${string}`>(), + }) + ) + .mutation( + async ({ + input: { userOpHash }, + ctx: { + session: { user }, + }, + }) => { + try { + const client = await getTemporalClient() + const { workflowId } = await client.workflow.start(TransferWorkflow, { + taskQueue: 'monorepo', + workflowId: `transfer-workflow-${user.id}-${userOpHash}`, + args: [userOpHash], + }) + log(`Workflow Created: ${workflowId}`) + return workflowId + } catch (error) { + throw new TRPCError({ + code: 'INTERNAL_SERVER_ERROR', + message: error instanceof Error ? error.message : 'Unknown error', + }) + } + } + ), +}) diff --git a/packages/api/src/routers/transfer.ts b/packages/api/src/routers/transfer.ts deleted file mode 100644 index 5b3b79230..000000000 --- a/packages/api/src/routers/transfer.ts +++ /dev/null @@ -1,108 +0,0 @@ -import { TRPCError } from '@trpc/server' -import debug from 'debug' -import { z } from 'zod' -import { createTRPCRouter, protectedProcedure } from '../trpc' -import { getTemporalClient } from '@my/temporal/client' -import type { UserOperation } from 'permissionless' -import { TransferWorkflow, type transferState } from '@my/workflows/all-workflows' -import type { allCoins } from 'app/data/coins' - -const log = debug('api:transfer') - -export const transferRouter = createTRPCRouter({ - withUserOp: protectedProcedure - .input( - z.object({ - userOp: z.custom>(), - token: z.custom(), //@ todo: might be safer to decode the token from the userOp, to ensure we don't apply the wrong token - }) - ) - .mutation(async ({ input: { token, userOp } }) => { - const { sender, nonce } = userOp - try { - const client = await getTemporalClient() - const handle = await client.workflow.start(TransferWorkflow, { - taskQueue: 'monorepo', - workflowId: `transfer-workflow-${token}-${sender}-${nonce}`, - args: [userOp], - }) - log(`Workflow Created: ${handle.workflowId}`) - return handle.workflowId - } catch (error) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: error instanceof Error ? error.message : 'Unknown error', - }) - } - }), - getState: protectedProcedure.input(z.string()).query(async ({ input: workflowId }) => { - try { - const client = await getTemporalClient() - const handle = client.workflow.getHandle(workflowId) - const state = await handle.query('getTransferState') - return state - } catch (error) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: error instanceof Error ? error.message : 'Unknown error', - }) - } - }), - getPending: protectedProcedure - .input( - z.object({ - token: z.custom(), - sender: z.string(), - }) - ) - .query(async ({ input: { token, sender } }) => { - try { - const states: transferState[] = [] - const client = await getTemporalClient() - const workflows = client.workflow.list({ - query: `ExecutionStatus = "Running" AND WorkflowId BETWEEN "transfer-workflow-${token}-${sender}-" AND "transfer-workflow-${token}-${sender}-~"`, - }) - for await (const workflow of workflows) { - const handle = client.workflow.getHandle(workflow.workflowId) - console.log('handle: ', handle) - - const state = await handle.query('getTransferState') - states.push(state) - } - return states - } catch (error) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: error instanceof Error ? error.message : 'Unknown error', - }) - } - }), - getFailed: protectedProcedure - .input( - z.object({ - token: z.custom(), - sender: z.string(), - }) - ) - .query(async ({ input: { token, sender } }) => { - try { - const states: transferState[] = [] - const client = await getTemporalClient() - const workflows = client.workflow.list({ - query: `ExecutionStatus = "Failed" AND WorkflowId BETWEEN "transfer-workflow-${token}-${sender}-" AND "transfer-workflow-${token}-${sender}-~"`, - }) - for await (const workflow of workflows) { - const handle = client.workflow.getHandle(workflow.workflowId) - const state = await handle.query('getTransferState') - states.push(state) - } - return states - } catch (error) { - throw new TRPCError({ - code: 'INTERNAL_SERVER_ERROR', - message: error instanceof Error ? error.message : 'Unknown error', - cause: error, - }) - } - }), -}) diff --git a/packages/app/features/home/TokenActivityRow.tsx b/packages/app/features/home/TokenActivityRow.tsx index bf229715c..2be625064 100644 --- a/packages/app/features/home/TokenActivityRow.tsx +++ b/packages/app/features/home/TokenActivityRow.tsx @@ -1,4 +1,4 @@ -import { Avatar, LinkableAvatar, Spinner, Paragraph, Text, XStack, YStack, Stack } from '@my/ui' +import { Paragraph, Text, XStack, YStack } from '@my/ui' import { amountFromActivity, eventNameFromActivity, subtextFromActivity } from 'app/utils/activity' import { isSendAccountReceiveEvent, @@ -8,14 +8,9 @@ import { import { ActivityAvatar } from '../activity/ActivityAvatar' import { CommentsTime } from 'app/utils/dateHelper' import { Link } from 'solito/link' -import type { CoinWithBalance } from 'app/data/coins' + import { useUser } from 'app/utils/useUser' import { useHoverStyles } from 'app/utils/useHoverStyles' -import type { transferState } from '@my/workflows' -import { sendAccountAbi, erc20Abi } from '@my/wagmi' -import { decodeFunctionData, formatUnits } from 'viem' -import { useProfileLookup } from 'app/utils/useProfileLookup' -import formatAmount from 'app/utils/formatAmount' export function TokenActivityRow({ activity, @@ -114,88 +109,3 @@ export function TokenActivityRow({ ) } - -export function PendingTransferActivityRow({ - coin, - state, -}: { coin: CoinWithBalance; state: transferState }) { - const { userOp } = state - const { args } = decodeFunctionData({ abi: sendAccountAbi, data: userOp.callData }) - - const decodedTokenTransfer = - args?.[0]?.[0].data !== '0x' - ? decodeFunctionData({ abi: erc20Abi, data: args?.[0]?.[0].data }) - : undefined - - const amount = decodedTokenTransfer - ? formatUnits(decodedTokenTransfer.args[1] as bigint, coin.decimals) - : formatAmount(formatUnits(args?.[0]?.[0].value, 18), 5, 5) - - const to = decodedTokenTransfer ? decodedTokenTransfer.args[0] : args?.[0]?.[0].dest - - const { data: profile } = useProfileLookup('address', to) - - return ( - - - - - - - - - - - - - - Sending... - - - {`${amount} ${coin.symbol}`} - - - - - {profile?.name ?? profile?.tag ?? profile?.sendid} - - - - - - - ) -} diff --git a/packages/app/features/home/utils/__mocks__/useTokenActivityFeed.ts b/packages/app/features/home/utils/__mocks__/useTokenActivityFeed.ts index a0d33a661..8fe6970ee 100644 --- a/packages/app/features/home/utils/__mocks__/useTokenActivityFeed.ts +++ b/packages/app/features/home/utils/__mocks__/useTokenActivityFeed.ts @@ -13,18 +13,11 @@ const mockUseTokenActivityFeed = jest.fn(({ token }) => { const pages = tokenTransfersByLogAddr[logAddress] if (!pages) throw new Error('No pages found') return { - pendingTransfers: { - data: [], //@todo maybe writes some mock data for temporal? - isLoading: false, - error: null, - }, - activityFeed: { - data: { - pages: [tokenTransfersByLogAddr[logAddress]], - }, - isLoading: false, - error: null, + data: { + pages: [tokenTransfersByLogAddr[logAddress]], }, + isLoading: false, + error: null, } }) export const useTokenActivityFeed = mockUseTokenActivityFeed diff --git a/packages/app/features/home/utils/usePendingTransfers.ts b/packages/app/features/home/utils/usePendingTransfers.ts deleted file mode 100644 index 09a21d4c7..000000000 --- a/packages/app/features/home/utils/usePendingTransfers.ts +++ /dev/null @@ -1,22 +0,0 @@ -import type { Address } from 'viem' -import type { allCoins } from 'app/data/coins' -import { api } from 'app/utils/api' - -/** - * Fetch Pending transfers by token and send account address - */ -export function usePendingTransfers(params: { - address: Address - token: allCoins[number]['token'] - refetchInterval?: number - enabled?: boolean -}) { - const { address, token, refetchInterval, enabled } = params - return api.transfer.getPending.useQuery( - { token, sender: address }, - { - refetchInterval, - enabled, - } - ) -} diff --git a/packages/app/features/home/utils/useTokenActivityFeed.ts b/packages/app/features/home/utils/useTokenActivityFeed.ts index 33b7d385c..87eeb10a7 100644 --- a/packages/app/features/home/utils/useTokenActivityFeed.ts +++ b/packages/app/features/home/utils/useTokenActivityFeed.ts @@ -1,18 +1,20 @@ +import type { PgBytea } from '@my/supabase/database.types' import { sendTokenV0LockboxAddress, tokenPaymasterAddress } from '@my/wagmi' -import { useInfiniteQuery } from '@tanstack/react-query' +import type { PostgrestError } from '@supabase/postgrest-js' +import { + useInfiniteQuery, + type InfiniteData, + type UseInfiniteQueryResult, +} from '@tanstack/react-query' import { pgAddrCondValues } from 'app/utils/pgAddrCondValues' import { squish } from 'app/utils/strings' import { useSupabase } from 'app/utils/supabase/useSupabase' import { throwIf } from 'app/utils/throwIf' import { EventArraySchema, Events, type Activity } from 'app/utils/zod/activity' -import { usePendingTransfers } from './usePendingTransfers' -import type { Address } from 'viem' -import type { allCoins } from 'app/data/coins' +import type { ZodError } from 'zod' /** - * Returns two hooks - * 1. useTokenActivityFeed - Infinite query to fetch ERC-20 token activity feed. - * 2. usePendingTransfers - Returns a list from temporal of pending transfers for the given address and token + * Infinite query to fetch ERC-20 token activity feed. * * @note does not support ETH transfers. Need to add another shovel integration to handle ETH receives, and another one for ETH sends * @@ -20,12 +22,11 @@ import type { allCoins } from 'app/data/coins' */ export function useTokenActivityFeed(params: { pageSize?: number - address: Address - token: allCoins[number]['token'] + address?: PgBytea refetchInterval?: number enabled?: boolean -}) { - const { pageSize = 10, token, address, refetchInterval = 30_000, enabled = true } = params +}): UseInfiniteQueryResult, PostgrestError | ZodError> { + const { pageSize = 10, address, refetchInterval = 30_000, enabled = true } = params const supabase = useSupabase() async function fetchTokenActivityFeed({ pageParam }: { pageParam: number }): Promise { @@ -65,29 +66,21 @@ export function useTokenActivityFeed(params: { return EventArraySchema.parse(data) } - return { - pendingTransfers: usePendingTransfers({ - address: address, - token, - refetchInterval, - enabled, - }), - activityFeed: useInfiniteQuery({ - queryKey: ['token_activity_feed', token], - initialPageParam: 0, - getNextPageParam: (lastPage, _allPages, lastPageParam) => { - if (lastPage !== null && lastPage.length < pageSize) return undefined - return lastPageParam + 1 - }, - getPreviousPageParam: (_firstPage, _allPages, firstPageParam) => { - if (firstPageParam <= 1) { - return undefined - } - return firstPageParam - 1 - }, - queryFn: fetchTokenActivityFeed, - refetchInterval, - enabled, - }), - } + return useInfiniteQuery({ + queryKey: ['token_activity_feed', address], + initialPageParam: 0, + getNextPageParam: (lastPage, _allPages, lastPageParam) => { + if (lastPage !== null && lastPage.length < pageSize) return undefined + return lastPageParam + 1 + }, + getPreviousPageParam: (_firstPage, _allPages, firstPageParam) => { + if (firstPageParam <= 1) { + return undefined + } + return firstPageParam - 1 + }, + queryFn: fetchTokenActivityFeed, + refetchInterval, + enabled, + }) } diff --git a/packages/app/features/send/confirm/screen.tsx b/packages/app/features/send/confirm/screen.tsx index fea852769..73901f611 100644 --- a/packages/app/features/send/confirm/screen.tsx +++ b/packages/app/features/send/confirm/screen.tsx @@ -30,7 +30,7 @@ import { useGenerateTransferUserOp } from 'app/utils/useUserOpTransferMutation' import { useAccountNonce } from 'app/utils/userop' import { useEffect, useRef, useState } from 'react' import { useRouter } from 'solito/router' -import { formatUnits, isAddress, zeroAddress } from 'viem' +import { formatUnits, isAddress } from 'viem' import { useEstimateFeesPerGas } from 'wagmi' import { useCoin } from 'app/provider/coins' import { useCoinFromSendTokenParam } from 'app/utils/useCoinFromTokenParam' @@ -42,7 +42,7 @@ import { useTokenPrices } from 'app/utils/useTokenPrices' const log = debug('app:features:send:confirm:screen') import { api } from 'app/utils/api' import { signUserOp } from 'app/utils/signUserOp' -import { usePendingTransfers } from 'app/features/home/utils/usePendingTransfers' +import { getUserOperationHash } from 'permissionless/utils' export function SendConfirmScreen() { const [queryParams] = useSendScreenParams() @@ -77,11 +77,7 @@ export function SendConfirm() { const { data: sendAccount, isLoading: isSendAccountLoading } = useSendAccount() const { coin: selectedCoin, tokensQuery, ethQuery } = useCoinFromSendTokenParam() - const { mutateAsync: transfer } = api.transfer.withUserOp.useMutation() - const { data: pendingTransfers, isLoading: isPendingTransfersLoading } = usePendingTransfers({ - address: sendAccount?.address ?? zeroAddress, - token: sendToken, - }) + const { mutateAsync: transfer } = api.temporal.transfer.useMutation() const [workflowId, setWorkflowId] = useState() @@ -121,8 +117,7 @@ export function SendConfirm() { to: profile?.address ?? recipient, token: sendToken === 'eth' ? undefined : sendToken, amount: BigInt(queryParams.amount ?? '0'), - nonce: - nonce && pendingTransfers !== undefined ? nonce + BigInt(pendingTransfers.length) : nonce, + nonce, }) const { @@ -201,16 +196,23 @@ export function SendConfirm() { console.log('feesPerGas', feesPerGas) console.log('userOp', _userOp) const chainId = baseMainnetClient.chain.id + const entryPoint = entryPointAddress[chainId] const signature = await signUserOp({ userOp, chainId, webauthnCreds, - entryPoint: entryPointAddress[chainId], + entryPoint, }) userOp.signature = signature - const workflowId = await transfer({ userOp, token: sendToken }) + const userOpHash = getUserOperationHash({ + userOperation: userOp, + entryPoint, + chainId, + }) + + const workflowId = await transfer({ userOpHash }) setWorkflowId(workflowId) if (selectedCoin?.token === 'eth') { await ethQuery.refetch() @@ -230,13 +232,7 @@ export function SendConfirm() { } }, []) - useEffect(() => { - if (submitButtonRef.current) { - submitButtonRef.current.focus() - } - }, []) - - if (nonceIsLoading || isProfileLoading || isSendAccountLoading || isPendingTransfersLoading) + if (nonceIsLoading || isProfileLoading || isSendAccountLoading) return return ( diff --git a/packages/app/utils/decodeTransferUserOp.ts b/packages/app/utils/decodeTransferUserOp.ts index 2856c654e..83035043a 100644 --- a/packages/app/utils/decodeTransferUserOp.ts +++ b/packages/app/utils/decodeTransferUserOp.ts @@ -1,7 +1,7 @@ import { decodeFunctionData } from 'viem' import { sendAccountAbi, erc20Abi } from '@my/wagmi' import type { UserOperation } from 'permissionless' -import type { coinsDict } from 'app/data/coins' +import type { allCoinsDict } from 'app/data/coins' export function decodeTransferUserOp({ userOp }: { userOp: UserOperation<'v0.7'> }) { const { args } = decodeFunctionData({ abi: sendAccountAbi, data: userOp.callData }) @@ -18,6 +18,6 @@ export function decodeTransferUserOp({ userOp }: { userOp: UserOperation<'v0.7'> const to = ( decodedTokenTransfer ? decodedTokenTransfer.args[0] : args?.[0]?.[0].dest ) as `0x${string}` - const token = (decodedTokenTransfer ? args?.[0]?.[0].dest : 'eth') as keyof coinsDict + const token = (decodedTokenTransfer ? args?.[0]?.[0].dest : 'eth') as keyof allCoinsDict return { from: userOp.sender, to, token, amount } } diff --git a/packages/snaplet/.snaplet/dataModel.json b/packages/snaplet/.snaplet/dataModel.json index 2da03e9a0..96292af88 100644 --- a/packages/snaplet/.snaplet/dataModel.json +++ b/packages/snaplet/.snaplet/dataModel.json @@ -5910,7 +5910,7 @@ } ] }, - "send_account_transfers": { + "public_send_account_transfers": { "id": "public.send_account_transfers", "schemaName": "public", "tableName": "send_account_transfers", @@ -6152,6 +6152,131 @@ } ] }, + "temporal_send_account_transfers": { + "id": "temporal.send_account_transfers", + "schemaName": "temporal", + "tableName": "send_account_transfers", + "fields": [ + { + "id": "temporal.send_account_transfers.id", + "name": "id", + "columnName": "id", + "type": "int4", + "isRequired": true, + "kind": "scalar", + "isList": false, + "isGenerated": false, + "sequence": { + "identifier": "\"temporal\".\"send_account_transfers_id_seq\"", + "increment": 1, + "start": 1 + }, + "hasDefaultValue": true, + "isId": true, + "maxLength": null + }, + { + "id": "temporal.send_account_transfers.workflow_id", + "name": "workflow_id", + "columnName": "workflow_id", + "type": "text", + "isRequired": true, + "kind": "scalar", + "isList": false, + "isGenerated": false, + "sequence": false, + "hasDefaultValue": false, + "isId": false, + "maxLength": null + }, + { + "id": "temporal.send_account_transfers.user_id", + "name": "user_id", + "columnName": "user_id", + "type": "uuid", + "isRequired": true, + "kind": "scalar", + "isList": false, + "isGenerated": false, + "sequence": false, + "hasDefaultValue": false, + "isId": false, + "maxLength": null + }, + { + "id": "temporal.send_account_transfers.status", + "name": "status", + "columnName": "status", + "type": "transfer_status", + "isRequired": true, + "kind": "scalar", + "isList": false, + "isGenerated": false, + "sequence": false, + "hasDefaultValue": false, + "isId": false, + "maxLength": null + }, + { + "id": "temporal.send_account_transfers.data", + "name": "data", + "columnName": "data", + "type": "jsonb", + "isRequired": true, + "kind": "scalar", + "isList": false, + "isGenerated": false, + "sequence": false, + "hasDefaultValue": false, + "isId": false, + "maxLength": null + }, + { + "id": "temporal.send_account_transfers.created_at", + "name": "created_at", + "columnName": "created_at", + "type": "timestamptz", + "isRequired": false, + "kind": "scalar", + "isList": false, + "isGenerated": false, + "sequence": false, + "hasDefaultValue": true, + "isId": false, + "maxLength": null + }, + { + "id": "temporal.send_account_transfers.updated_at", + "name": "updated_at", + "columnName": "updated_at", + "type": "timestamptz", + "isRequired": false, + "kind": "scalar", + "isList": false, + "isGenerated": false, + "sequence": false, + "hasDefaultValue": true, + "isId": false, + "maxLength": null + } + ], + "uniqueConstraints": [ + { + "name": "send_account_transfers_pkey", + "fields": [ + "id" + ], + "nullNotDistinct": false + }, + { + "name": "temporal_send_account_transfers_workflow_id_idx", + "fields": [ + "workflow_id" + ], + "nullNotDistinct": false + } + ] + }, "send_accounts": { "id": "public.send_accounts", "schemaName": "public", @@ -9604,6 +9729,26 @@ "name": "individual" } ] + }, + "transfer_status": { + "schemaName": "temporal", + "values": [ + { + "name": "confirmed" + }, + { + "name": "failed" + }, + { + "name": "indexed" + }, + { + "name": "initialized" + }, + { + "name": "sent" + } + ] } } } \ No newline at end of file diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index f03d997e9..b3375ac8d 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -1,15 +1,43 @@ import { log, ApplicationFailure } from '@temporalio/activity' -import { isTransferIndexed } from './supabase' -import { simulateUserOperation, sendUserOperation, waitForTransactionReceipt } from './wagmi' +import { + isTokenTransferIndexed, + isEthTransferIndexed, + insertTemporalTokenSendAccountTransfer, + updateTemporalSendAccountTransfer, + insertTemporalEthSendAccountTransfer, +} from './supabase' +import { + simulateUserOperation, + sendUserOperation, + waitForTransactionReceipt, + getUserOperationByHash, +} from './wagmi' import type { UserOperation } from 'permissionless' import { bootstrap } from '@my/workflows/utils' -import superjson from 'superjson' +import { decodeTransferUserOp } from 'app/utils/decodeTransferUserOp' +import { hexToBytea } from 'app/utils/hexToBytea' +import type { allCoinsDict } from 'app/data/coins' export const createTransferActivities = (env: Record) => { bootstrap(env) return { - async simulateUserOpActivity(userOp: UserOperation<'v0.7'>) { + async initializeTransferActivity(workflowId: string, userOpHash: `0x${string}`) { + const userOpData = await getUserOperationByHash(userOpHash) + if (!userOpData) { + throw ApplicationFailure.nonRetryable('User Operation hash is not a valid user op') + } + + const userOp = userOpData.userOperation + + const { from, to, token, amount } = decodeTransferUserOp({ userOp }) + if (!from || !to || !amount || !token) { + throw ApplicationFailure.nonRetryable('User Operation is not a valid transfer') + } + if (amount <= 0n) { + throw ApplicationFailure.nonRetryable('User Operation has amount <= 0') + } + if (!userOp.signature) { throw ApplicationFailure.nonRetryable('UserOp signature is required') } @@ -18,30 +46,122 @@ export const createTransferActivities = (env: Record } catch (error) { throw ApplicationFailure.nonRetryable('Error simulating user operation', error.code, error) } + + // Convert hex addresses to bytea for database + const fromBytea = hexToBytea(from) + const toBytea = hexToBytea(to) + const { error } = + token === 'eth' + ? await insertTemporalEthSendAccountTransfer({ + workflow_id: workflowId, + user_op_hash: userOpHash, + status: 'initialized', + sender: fromBytea, + value: amount, + log_addr: toBytea, + }) + : await insertTemporalTokenSendAccountTransfer({ + workflow_id: workflowId, + user_op_hash: userOpHash, + status: 'initialized', + f: fromBytea, + t: toBytea, + v: amount, + log_addr: hexToBytea(token), + }) + + if (error) { + throw ApplicationFailure.retryable( + 'Error inserting transfer into temporal_send_account_transfers', + error.code, + { + error, + workflowId, + } + ) + } + + return { userOp, from, to, amount, token } }, async sendUserOpActivity(userOp: UserOperation<'v0.7'>) { try { const hash = await sendUserOperation(userOp) - log.info('sendUserOperationActivity', { hash, userOp: superjson.stringify(userOp) }) + log.info('UserOperation sent successfully', { hash }) return hash } catch (error) { - throw ApplicationFailure.nonRetryable('Error sending user operation', error.code, error) + throw ApplicationFailure.retryable('Error sending user operation', error.code, error) } }, - - async waitForTransactionReceiptActivity(hash: `0x${string}`) { + async updateTemporalTransferSentStatusActivity(workflowId: string) { + const { error } = await updateTemporalSendAccountTransfer({ + workflow_id: workflowId, + status: 'sent', + }) + if (error) { + throw ApplicationFailure.retryable( + 'Error updating entry in temporal_send_account_transfers with sent status', + error.code, + { + error, + workflowId, + } + ) + } + return + }, + async waitForTransactionReceiptActivity(workflowId: string, hash: `0x${string}`) { try { - const receipt = await waitForTransactionReceipt(hash) - if (!receipt.success) - throw ApplicationFailure.nonRetryable('Tx failed', receipt.sender, receipt.userOpHash) - log.info('waitForTransactionReceiptActivity', { receipt: superjson.stringify(receipt) }) + const res = await waitForTransactionReceipt(hash) + if (!res) { + throw ApplicationFailure.retryable('No receipt returned from waitForTransactionReceipt') + } + if (!res.success) { + throw ApplicationFailure.nonRetryable('Tx failed', res.sender, res.userOpHash) + } + log.info('waitForTransactionReceiptActivity', { tx_hash: res.receipt.transactionHash }) + const { receipt } = res + await updateTemporalSendAccountTransfer({ + workflow_id: workflowId, + status: 'confirmed', + data: { + tx_hash: receipt.transactionHash, + block_num: receipt.blockNumber.toString(), + tx_idx: receipt.transactionIndex.toString(), + // log_idx: logs[0].logIndex.toString(), -- Need to look into how to get this + }, + }) return receipt } catch (error) { - throw ApplicationFailure.nonRetryable('Error waiting for tx receipt', error.code, error) + throw ApplicationFailure.retryable('Error waiting for tx receipt', error.code, error) } }, - async isTransferIndexedActivity(hash: `0x${string}`) { - const isIndexed = await isTransferIndexed(hash) + async isTransferIndexedActivity( + workflowId: string, + tx_hash: `0x${string}`, + token: keyof allCoinsDict + ) { + const isIndexed = + token === 'eth' + ? await isEthTransferIndexed(tx_hash) + : await isTokenTransferIndexed(tx_hash) + + if (!isIndexed) { + throw ApplicationFailure.retryable('Transfer not indexed in db') + } + const { error } = await updateTemporalSendAccountTransfer({ + workflow_id: workflowId, + status: 'indexed', + }) + if (error) { + throw ApplicationFailure.retryable( + 'Error updating entry in temporal_send_account_transfers with indexed status', + error.code, + { + error, + workflowId, + } + ) + } log.info('isTransferIndexedActivity', { isIndexed }) return isIndexed }, diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index f2aeab645..09381419a 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -1,28 +1,95 @@ -import { log, ApplicationFailure } from '@temporalio/activity' +import type { Database, Json } from '@my/supabase/database-generated.types' +import { log } from '@temporalio/activity' import { hexToBytea } from 'app/utils/hexToBytea' import { supabaseAdmin } from 'app/utils/supabase/admin' -export async function isTransferIndexed(hash: `0x${string}`) { +export async function insertTemporalTokenSendAccountTransfer({ + workflow_id, + user_op_hash, + status, + f, + t, + v, + log_addr, +}: { + workflow_id: string + user_op_hash: string + status: Database['public']['Enums']['temporal_transfer_status'] + f: `\\x${string}` + t: `\\x${string}` + v: bigint + log_addr: `\\x${string}` +}) { + return await supabaseAdmin.rpc('insert_temporal_token_send_account_transfer', { + workflow_id, + user_op_hash, + status, + f, + t, + v: v.toString(), + log_addr, + }) +} + +export async function insertTemporalEthSendAccountTransfer({ + workflow_id, + user_op_hash, + status, + sender, + log_addr, + value, +}: { + workflow_id: string + user_op_hash: string + status: Database['public']['Enums']['temporal_transfer_status'] + sender: `\\x${string}` + log_addr: `\\x${string}` + value: bigint +}) { + return await supabaseAdmin.rpc('insert_temporal_eth_send_account_transfer', { + workflow_id, + user_op_hash, + status, + sender, + log_addr, + value: value.toString(), + }) +} + +export async function updateTemporalSendAccountTransfer({ + workflow_id, + status, + data, +}: { + workflow_id: string + status: Database['public']['Enums']['temporal_transfer_status'] + data?: Json +}) { + return await supabaseAdmin.rpc('update_temporal_send_account_transfer', { + workflow_id, + status, + data, + }) +} + +export async function isTokenTransferIndexed(tx_hash: `0x${string}`) { const { count, error, status, statusText } = await supabaseAdmin .from('send_account_transfers') .select('*', { count: 'exact', head: true }) - .eq('tx_hash', hexToBytea(hash)) - - log.info('isTransferIndexed', { count, error, status, statusText }) - if (error) { - if (error.code === 'PGRST116') { - log.info('isTransferIndexedActivity', { error }) - return null - } - throw ApplicationFailure.nonRetryable( - 'Error reading transfer from send_account_transfers column.', - error.code, - { - ...error, - status, - statusText, - } - ) - } + .eq('tx_hash', hexToBytea(tx_hash)) + + log.info('isTokenTransferIndexed', { count, error, status, statusText }) + + return count !== null && count > 0 +} + +export async function isEthTransferIndexed(tx_hash: `0x${string}`) { + const { count, error, status, statusText } = await supabaseAdmin + .from('send_account_receives') + .select('*', { count: 'exact', head: true }) + .eq('tx_hash', hexToBytea(tx_hash)) + + log.info('isEthTransferIndexed', { count, error, status, statusText }) + return count !== null && count > 0 } diff --git a/packages/workflows/src/transfer-workflow/wagmi.ts b/packages/workflows/src/transfer-workflow/wagmi.ts index 5433d982c..06511dfed 100644 --- a/packages/workflows/src/transfer-workflow/wagmi.ts +++ b/packages/workflows/src/transfer-workflow/wagmi.ts @@ -1,36 +1,9 @@ import type { UserOperation } from 'permissionless' import { baseMainnetBundlerClient, baseMainnetClient, entryPointAddress } from '@my/wagmi' import type { Hex } from 'viem' -import superjson from 'superjson' -import { log } from '@temporalio/activity' - -/** - * default user op with preset gas values that work will probably need to move this to the database. - * Paymaster post-op gas limit could be set dynamically based on the status of the paymaster if the price cache is - * outdated, otherwise, a lower post op gas limit around only 50K is needed. In case of needing to update cached price, - * the post op uses around 75K gas. - * - * - [example no update price](https://www.tdly.co/shared/simulation/a0122fae-a88c-47cd-901c-02de87901b45) - * - [Failed due to OOG](https://www.tdly.co/shared/simulation/c259922c-8248-4b43-b340-6ebbfc69bcea) - */ -export const defaultUserOp: Pick< - UserOperation<'v0.7'>, - | 'callGasLimit' - | 'verificationGasLimit' - | 'preVerificationGas' - | 'maxFeePerGas' - | 'maxPriorityFeePerGas' - | 'paymasterVerificationGasLimit' - | 'paymasterPostOpGasLimit' -> = { - callGasLimit: 100000n, - verificationGasLimit: 550000n, - preVerificationGas: 70000n, - maxFeePerGas: 10000000n, - maxPriorityFeePerGas: 10000000n, - paymasterVerificationGasLimit: 150000n, - paymasterPostOpGasLimit: 100000n, +export async function getUserOperationByHash(hash: `0x${string}`) { + return await baseMainnetBundlerClient.getUserOperationByHash({ hash }) } export async function simulateUserOperation(userOp: UserOperation<'v0.7'>) { @@ -42,23 +15,11 @@ export async function simulateUserOperation(userOp: UserOperation<'v0.7'>) { } export async function sendUserOperation(userOp: UserOperation<'v0.7'>) { - log.info('Sending UserOperation', { userOp: superjson.stringify(userOp) }) - try { - const hash = await baseMainnetBundlerClient.sendUserOperation({ - userOperation: userOp, - }) - log.info('UserOperation sent successfully', { hash }) - return hash - } catch (error) { - log.error('Error in sendUserOperation', { - error: error instanceof Error ? error.message : String(error), - userOp: superjson.stringify(userOp), - }) - throw error - } + return await baseMainnetBundlerClient.sendUserOperation({ + userOperation: userOp, + }) } export async function waitForTransactionReceipt(hash: `0x${string}`) { - const receipt = await baseMainnetBundlerClient.waitForUserOperationReceipt({ hash }) - return receipt + return await baseMainnetBundlerClient.waitForUserOperationReceipt({ hash }) } diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index 0f55be92f..ff65cb584 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -1,14 +1,14 @@ -import { proxyActivities, ApplicationFailure, defineQuery, setHandler } from '@temporalio/workflow' +import { proxyActivities, workflowInfo } from '@temporalio/workflow' import type { createTransferActivities } from './activities' -import type { UserOperation, GetUserOperationReceiptReturnType } from 'permissionless' + import debug from 'debug' -import superjson from 'superjson' const log = debug('workflows:transfer') const { - simulateUserOpActivity, + initializeTransferActivity, sendUserOpActivity, + updateTemporalTransferSentStatusActivity, waitForTransactionReceiptActivity, isTransferIndexedActivity, } = proxyActivities>({ @@ -16,43 +16,17 @@ const { startToCloseTimeout: '45 seconds', }) -type BaseState = { userOp: UserOperation<'v0.7'> } - -type Simulating = { status: 'simulating' } & BaseState -type Sending = { status: 'sending' } & BaseState -type Waiting = { status: 'waiting'; hash: string } & BaseState -type Indexing = { - status: 'indexing' - receipt: GetUserOperationReceiptReturnType -} & BaseState -type Confirmed = { - status: 'confirmed' - receipt: GetUserOperationReceiptReturnType | boolean -} & BaseState - -export type transferState = Simulating | Sending | Waiting | Indexing | Confirmed - -export const getTransferStateQuery = defineQuery('getTransferState') - -export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { - setHandler(getTransferStateQuery, () => ({ status: 'simulating', userOp })) - log('SendTransferWorkflow started with userOp:', superjson.stringify(userOp)) - await simulateUserOpActivity(userOp) - log('Simulation completed') - setHandler(getTransferStateQuery, () => ({ status: 'sending', userOp })) +export async function TransferWorkflow(userOpHash: `0x${string}`) { + const workflowId = workflowInfo().workflowId + log('SendTransferWorkflow started with hash:', userOpHash) + const { userOp, token } = await initializeTransferActivity(workflowId, userOpHash) log('Sending UserOperation') const hash = await sendUserOpActivity(userOp) - if (!hash) throw ApplicationFailure.nonRetryable('No hash returned from sendUserOperation') log('UserOperation sent, hash:', hash) - setHandler(getTransferStateQuery, () => ({ status: 'waiting', userOp, hash })) - const receipt = await waitForTransactionReceiptActivity(hash) - if (!receipt) - throw ApplicationFailure.nonRetryable('No receipt returned from waitForTransactionReceipt') - log('Receipt received:', superjson.stringify(receipt)) - setHandler(getTransferStateQuery, () => ({ status: 'indexing', userOp, receipt })) - const transfer = await isTransferIndexedActivity(receipt.receipt.transactionHash) - if (!transfer) throw ApplicationFailure.retryable('Transfer not yet indexed in db') - log('Transfer indexed:', superjson.stringify(transfer)) - setHandler(getTransferStateQuery, () => ({ status: 'confirmed', userOp, receipt })) + await updateTemporalTransferSentStatusActivity(workflowId) + const receipt = await waitForTransactionReceiptActivity(workflowId, hash) + log('Receipt received:', { tx_hash: receipt.transactionHash, user_op_hash: userOpHash }) + const transfer = await isTransferIndexedActivity(workflowId, receipt.transactionHash, token) + log('Transfer indexed') return transfer } diff --git a/supabase/config.toml b/supabase/config.toml index aa9107d52..579d7e2e6 100644 --- a/supabase/config.toml +++ b/supabase/config.toml @@ -5,8 +5,8 @@ project_id = "send" [api] enabled = true port = 54321 -schemas = ["public", "storage", "graphql_public"] -extra_search_path = ["public", "extensions"] +schemas = ["public", "storage", "graphql_public", "temporal"] +extra_search_path = ["public", "extensions", "temporal"] max_rows = 100 [db] diff --git a/supabase/database-generated.types.ts b/supabase/database-generated.types.ts index 6f631055a..8b58badd0 100644 --- a/supabase/database-generated.types.ts +++ b/supabase/database-generated.types.ts @@ -1462,6 +1462,85 @@ export type Database = { } } } + temporal: { + Tables: { + send_account_transfers: { + Row: { + created_at: string | null + data: Json + id: number + status: Database["temporal"]["Enums"]["transfer_status"] + updated_at: string | null + user_id: string + workflow_id: string + } + Insert: { + created_at?: string | null + data: Json + id?: number + status: Database["temporal"]["Enums"]["transfer_status"] + updated_at?: string | null + user_id: string + workflow_id: string + } + Update: { + created_at?: string | null + data?: Json + id?: number + status?: Database["temporal"]["Enums"]["transfer_status"] + updated_at?: string | null + user_id?: string + workflow_id?: string + } + Relationships: [] + } + } + Views: { + [_ in never]: never + } + Functions: { + insert_temporal_eth_send_account_transfer: { + Args: { + workflow_id: string + status: Database["temporal"]["Enums"]["transfer_status"] + sender: string + log_addr: string + value: string + } + Returns: undefined + } + insert_temporal_token_send_account_transfer: { + Args: { + workflow_id: string + status: Database["temporal"]["Enums"]["transfer_status"] + f: string + t: string + v: string + log_addr: string + } + Returns: undefined + } + update_temporal_send_account_transfer: { + Args: { + workflow_id: string + status: Database["temporal"]["Enums"]["transfer_status"] + data?: Json + } + Returns: undefined + } + } + Enums: { + transfer_status: + | "initialized" + | "sent" + | "confirmed" + | "indexed" + | "failed" + } + CompositeTypes: { + [_ in never]: never + } + } } export type Tables< diff --git a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql new file mode 100644 index 000000000..d5cf54c47 --- /dev/null +++ b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql @@ -0,0 +1,288 @@ +SET check_function_bodies = OFF; + +-- Create temporal schema +CREATE SCHEMA IF NOT EXISTS temporal; + +-- Grant permissions for temporal schema +GRANT USAGE ON SCHEMA temporal TO authenticated, service_role; + +-- Grant execute on functions to service_role +GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA temporal TO service_role; +ALTER DEFAULT PRIVILEGES IN SCHEMA temporal + GRANT EXECUTE ON FUNCTIONS TO service_role; + +CREATE TYPE temporal.transfer_status AS ENUM( + 'initialized', + 'sent', + 'confirmed', + 'indexed', + 'failed' +); + +CREATE TABLE temporal.send_account_transfers( + id serial primary key, + workflow_id text NOT NULL, + user_id uuid NOT NULL, + status temporal.transfer_status NOT NULL, + data jsonb NOT NULL, + created_at timestamp with time zone DEFAULT NOW(), + updated_at timestamp with time zone DEFAULT NOW() +); + +alter table "temporal"."send_account_transfers" + enable row level security; + +create policy "users can see their own temporal transfers" +on "temporal"."send_account_transfers" as permissive +for select to authenticated +using ( + user_id = auth.uid() +); + + +CREATE INDEX temporal_send_account_transfers_user_id_idx ON temporal.send_account_transfers(user_id); +CREATE INDEX temporal_send_account_transfers_status_idx ON temporal.send_account_transfers(status); +CREATE UNIQUE INDEX temporal_send_account_transfers_workflow_id_idx ON temporal.send_account_transfers(workflow_id); + +CREATE OR REPLACE FUNCTION temporal.insert_temporal_token_send_account_transfer( + workflow_id text, + status temporal.transfer_status, + f bytea, + t bytea, + v text, + log_addr bytea +) +RETURNS void +LANGUAGE plpgsql +SECURITY DEFINER +AS $$ +DECLARE + f_user_id uuid; +BEGIN + SELECT user_id INTO f_user_id + FROM send_accounts + WHERE address = concat('0x', encode(f, 'hex'))::citext; + + INSERT INTO temporal.send_account_transfers( + workflow_id, + user_id, + status, + data + ) + VALUES ( + workflow_id, + f_user_id, + status, + json_build_object( + 'f', f, + 't', t, + 'v', v, + 'log_addr', log_addr + ) + ); +END; +$$; + +CREATE OR REPLACE FUNCTION temporal.insert_temporal_eth_send_account_transfer( + workflow_id text, + status temporal.transfer_status, + sender bytea, + log_addr bytea, + value text +) +RETURNS void +LANGUAGE plpgsql +SECURITY DEFINER +AS $$ +DECLARE + sender_user_id uuid; +BEGIN + SELECT user_id INTO sender_user_id + FROM send_accounts + WHERE address = concat('0x', encode(sender, 'hex'))::citext; + + INSERT INTO temporal.send_account_transfers( + workflow_id, + user_id, + status, + data + ) + VALUES ( + workflow_id, + sender_user_id, + status, + json_build_object( + 'log_addr', log_addr, + 'sender', sender, + 'value', value + ) + ); +END; +$$; + +CREATE OR REPLACE FUNCTION temporal.update_temporal_send_account_transfer( + workflow_id text, + status temporal.transfer_status, + data jsonb DEFAULT NULL +) +RETURNS void +LANGUAGE plpgsql +SECURITY DEFINER +AS $$ +DECLARE + _data jsonb; +BEGIN + -- Only construct _data if input data is not null + IF data IS NOT NULL THEN + _data := json_build_object( + 'user_op_hash', (data->>'user_op_hash')::bytea, + 'tx_hash', (data->>'tx_hash')::bytea, + 'block_num', data->>'block_num', + 'tx_idx', data->>'tx_idx', + 'log_idx', data->>'log_idx' + ); + ELSE + _data := '{}'::jsonb; + END IF; + + UPDATE temporal.send_account_transfers + SET + status = update_temporal_send_account_transfer.status, + data = CASE + WHEN _data = '{}'::jsonb THEN temporal.send_account_transfers.data + ELSE temporal.send_account_transfers.data || _data + END, + updated_at = NOW() + WHERE + temporal.send_account_transfers.workflow_id = update_temporal_send_account_transfer.workflow_id; +END; +$$; + +-- Token transfer triggers +CREATE OR REPLACE FUNCTION temporal.temporal_token_send_account_transfers_trigger_insert_activity() + RETURNS TRIGGER + LANGUAGE plpgsql + SECURITY DEFINER + AS $$ +DECLARE + _f_user_id uuid; + _t_user_id uuid; +BEGIN + SELECT user_id INTO _f_user_id + FROM send_accounts + WHERE address = concat('0x', encode((NEW.data->>'f')::bytea, 'hex'))::citext; + + SELECT user_id INTO _t_user_id + FROM send_accounts + WHERE address = concat('0x', encode((NEW.data->>'t')::bytea, 'hex'))::citext; + + INSERT INTO activity( + event_name, + event_id, + from_user_id, + to_user_id, + data, + created_at + ) + VALUES ( + 'temporal_send_account_transfer', + NEW.workflow_id, + _f_user_id, + _t_user_id, + json_build_object( + 'status', NEW.status, + 'user_op_hash', (NEW.data->>'user_op_hash')::bytea, + 'log_addr', (NEW.data->>'log_addr')::bytea, + 'f', (NEW.data->>'f')::bytea, + 't', (NEW.data->>'t')::bytea, + 'v', NEW.data->>'v', + 'tx_hash', (NEW.data->>'tx_hash')::bytea, + 'block_num', NEW.data->>'block_num', + 'tx_idx', NEW.data->>'tx_idx', + 'log_idx', NEW.data->>'log_idx' + ), + NEW.created_at + ); + RETURN NEW; +END; +$$; + +-- ETH transfer triggers +CREATE OR REPLACE FUNCTION temporal.temporal_eth_send_account_transfers_trigger_insert_activity() + RETURNS TRIGGER + LANGUAGE plpgsql + SECURITY DEFINER + AS $$ +DECLARE + _from_user_id uuid; + _to_user_id uuid; +BEGIN + SELECT user_id INTO _from_user_id + FROM send_accounts + WHERE address = concat('0x', encode((NEW.data->>'sender')::bytea, 'hex'))::citext; + + SELECT user_id INTO _to_user_id + FROM send_accounts + WHERE address = concat('0x', encode((NEW.data->>'log_addr')::bytea, 'hex'))::citext; + + INSERT INTO activity( + event_name, + event_id, + from_user_id, + to_user_id, + data, + created_at + ) + VALUES ( + 'temporal_send_account_transfer', + NEW.workflow_id, + _from_user_id, + _to_user_id, + json_build_object( + 'status', NEW.status, + 'user_op_hash', (NEW.data->>'user_op_hash')::bytea, + 'log_addr', (NEW.data->>'log_addr')::bytea, + 'sender', (NEW.data->>'sender')::bytea, + 'value', NEW.data->>'value', + 'tx_hash', (NEW.data->>'tx_hash')::bytea, + 'block_num', NEW.data->>'block_num', + 'tx_idx', NEW.data->>'tx_idx', + 'log_idx', NEW.data->>'log_idx' + ), + NEW.created_at + ); + RETURN NEW; +END; +$$; + +-- Create triggers with conditions +CREATE TRIGGER temporal_token_send_account_transfers_trigger_insert_activity + AFTER INSERT ON temporal.send_account_transfers + FOR EACH ROW + WHEN (NEW.data ? 'f') + EXECUTE FUNCTION temporal.temporal_token_send_account_transfers_trigger_insert_activity(); + +CREATE TRIGGER temporal_eth_send_account_transfers_trigger_insert_activity + AFTER INSERT ON temporal.send_account_transfers + FOR EACH ROW + WHEN (NEW.data ? 'sender') + EXECUTE FUNCTION temporal.temporal_eth_send_account_transfers_trigger_insert_activity(); + +CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_update_activity() + RETURNS TRIGGER + LANGUAGE plpgsql + SECURITY DEFINER + AS $$ +BEGIN + UPDATE activity + SET data = NEW.data + WHERE event_name = 'temporal_send_account_transfer' + AND event_id = NEW.workflow_id; + RETURN NEW; +END; +$$; + +CREATE TRIGGER temporal_send_account_transfers_trigger_update_activity + AFTER UPDATE ON temporal.send_account_transfers + FOR EACH ROW + EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_update_activity(); \ No newline at end of file diff --git a/supabase/package.json b/supabase/package.json index 310e69975..ed4280935 100644 --- a/supabase/package.json +++ b/supabase/package.json @@ -20,9 +20,9 @@ "_with-env": "dotenv -e ../.env -c -- ", "_link-project": "npx supabase link --project-ref $NEXT_PUBLIC_SUPABASE_PROJECT_ID --workdir ../", "link-project": "yarn _with-env yarn _link-project", - "_generate:remote": "npx supabase gen types typescript --project-id $NEXT_PUBLIC_SUPABASE_PROJECT_ID --schema public > ./database-generated.types.ts", + "_generate:remote": "npx supabase gen types typescript --project-id $NEXT_PUBLIC_SUPABASE_PROJECT_ID --schema public,temporal > ./database-generated.types.ts", "generate:remote": "yarn _with-env yarn _generate:remote", - "generate": "npx supabase gen types typescript --local --schema public > ./database-generated.types.ts", + "generate": "npx supabase gen types typescript --local --schema public,temporal > ./database-generated.types.ts", "g": "yarn generate", "migration:diff": "npx supabase db diff -f ", "start": "npx supabase start", From 1ab630d336e82a9da01daaaff023e2539b4b6124 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Sun, 9 Feb 2025 11:34:46 -0800 Subject: [PATCH 15/58] Test temporal transfers table --- supabase/tests/temporal_transfers_test.sql | 202 +++++++++++++++++++++ 1 file changed, 202 insertions(+) create mode 100644 supabase/tests/temporal_transfers_test.sql diff --git a/supabase/tests/temporal_transfers_test.sql b/supabase/tests/temporal_transfers_test.sql new file mode 100644 index 000000000..3ae768257 --- /dev/null +++ b/supabase/tests/temporal_transfers_test.sql @@ -0,0 +1,202 @@ +BEGIN; +SELECT plan(6); + +-- Create the necessary extensions +CREATE EXTENSION "basejump-supabase_test_helpers"; + +-- Create test users +SELECT tests.create_supabase_user('test_user_from'); +SELECT tests.create_supabase_user('test_user_to'); + +-- Setup test accounts +INSERT INTO send_accounts (user_id, address, chain_id, init_code) +VALUES ( + tests.get_supabase_uid('test_user_from'), + '0x1234567890ABCDEF1234567890ABCDEF12345678', + 1, + '\\x00112233445566778899AABBCCDDEEFF' +), +( + tests.get_supabase_uid('test_user_to'), + '0xB0B7D5E8A4B6D534B3F608E9D27871F85A4E98DA', + 1, + '\\x00112233445566778899AABBCCDDEEFF' +); + +-- Test 1: Test token transfer insertion +SELECT temporal.insert_temporal_token_send_account_transfer( + 'test-workflow-1'::text, + 'initialized'::temporal.transfer_status, + '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, + '\xB0B7D5E8A4B6D534B3F608E9D27871F85A4E98DA'::bytea, + '100'::text, + '\xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266'::bytea +); + +SELECT results_eq( + $$ + SELECT + workflow_id, + status, + (data->>'f')::bytea, + (data->>'t')::bytea, + data->>'v', + (data->>'log_addr')::bytea + FROM temporal.send_account_transfers + WHERE workflow_id = 'test-workflow-1' + $$, + $$ + VALUES ( + 'test-workflow-1'::text, + 'initialized'::temporal.transfer_status, + '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, + '\xB0B7D5E8A4B6D534B3F608E9D27871F85A4E98DA'::bytea, + '100'::text, + '\xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266'::bytea + ) + $$, + 'Test token transfer insertion' +); + +-- Test 2: Test ETH transfer insertion +SELECT temporal.insert_temporal_eth_send_account_transfer( + 'test-workflow-2'::text, + 'initialized'::temporal.transfer_status, + '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, + '\xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266'::bytea, + '1000000000000000000'::text +); + +SELECT results_eq( + $$ + SELECT + workflow_id, + status, + (data->>'sender')::bytea, + (data->>'log_addr')::bytea, + data->>'value' + FROM temporal.send_account_transfers + WHERE workflow_id = 'test-workflow-2' + $$, + $$ + VALUES ( + 'test-workflow-2'::text, + 'initialized'::temporal.transfer_status, + '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, + '\xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266'::bytea, + '1000000000000000000'::text + ) + $$, + 'Test ETH transfer insertion' +); + +-- Test 3: Test update function +SELECT temporal.update_temporal_send_account_transfer( + 'test-workflow-1'::text, + 'sent'::temporal.transfer_status, + json_build_object( + 'user_op_hash', '\x1234'::bytea, + 'tx_hash', '\x5678'::bytea, + 'block_num', '123', + 'tx_idx', '1', + 'log_idx', '0' + )::jsonb +); + +SELECT results_eq( + $$ + SELECT + status, + (data->>'user_op_hash')::bytea, + (data->>'tx_hash')::bytea, + data->>'block_num', + data->>'tx_idx', + data->>'log_idx' + FROM temporal.send_account_transfers + WHERE workflow_id = 'test-workflow-1' + $$, + $$ + VALUES ( + 'sent'::temporal.transfer_status, + '\x1234'::bytea, + '\x5678'::bytea, + '123'::text, + '1'::text, + '0'::text + ) + $$, + 'Test transfer update' +); + +-- Test 4: Test activity insertion trigger for token transfer +SELECT results_eq( + $$ + SELECT + event_name, + from_user_id, + to_user_id, + (data->>'f')::bytea, + (data->>'t')::bytea, + data->>'v' + FROM activity + WHERE event_id = 'test-workflow-1' + $$, + $$ + VALUES ( + 'temporal_send_account_transfer'::text, + tests.get_supabase_uid('test_user_from'), + tests.get_supabase_uid('test_user_to'), + '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, + '\xB0B7D5E8A4B6D534B3F608E9D27871F85A4E98DA'::bytea, + '100'::text + ) + $$, + 'Test activity insertion for token transfer' +); + +-- Test 5: Test activity insertion trigger for ETH transfer +SELECT results_eq( + $$ + SELECT + event_name, + from_user_id, + to_user_id, + (data->>'sender')::bytea, + data->>'value' + FROM activity + WHERE event_id = 'test-workflow-2' + $$, + $$ + VALUES ( + 'temporal_send_account_transfer'::text, + tests.get_supabase_uid('test_user_from'), + NULL::uuid, + '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, + '1000000000000000000'::text + ) + $$, + 'Test activity insertion for ETH transfer' +); + +-- Test 6: Test activity update trigger +SELECT results_eq( + $$ + SELECT + (data->>'user_op_hash')::bytea, + (data->>'tx_hash')::bytea, + data->>'block_num' + FROM activity + WHERE event_id = 'test-workflow-1' + $$, + $$ + VALUES ( + '\x1234'::bytea, + '\x5678'::bytea, + '123'::text + ) + $$, + 'Test activity update' +); + +SELECT * FROM finish(); +ROLLBACK; \ No newline at end of file From 6742bd3bb85c5380303d028a0e2f1123ef631184 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Sun, 9 Feb 2025 11:35:54 -0800 Subject: [PATCH 16/58] Integrate temporal supabase table into workflow --- .../src/transfer-workflow/activities.ts | 32 ++++++------------- .../src/transfer-workflow/supabase.ts | 22 +++++-------- .../workflows/src/transfer-workflow/wagmi.ts | 4 --- .../src/transfer-workflow/workflow.ts | 11 ++++--- 4 files changed, 24 insertions(+), 45 deletions(-) diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index b3375ac8d..1b017f689 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -6,12 +6,7 @@ import { updateTemporalSendAccountTransfer, insertTemporalEthSendAccountTransfer, } from './supabase' -import { - simulateUserOperation, - sendUserOperation, - waitForTransactionReceipt, - getUserOperationByHash, -} from './wagmi' +import { simulateUserOperation, sendUserOperation, waitForTransactionReceipt } from './wagmi' import type { UserOperation } from 'permissionless' import { bootstrap } from '@my/workflows/utils' import { decodeTransferUserOp } from 'app/utils/decodeTransferUserOp' @@ -22,14 +17,7 @@ export const createTransferActivities = (env: Record bootstrap(env) return { - async initializeTransferActivity(workflowId: string, userOpHash: `0x${string}`) { - const userOpData = await getUserOperationByHash(userOpHash) - if (!userOpData) { - throw ApplicationFailure.nonRetryable('User Operation hash is not a valid user op') - } - - const userOp = userOpData.userOperation - + async initializeTransferActivity(workflowId: string, userOp: UserOperation<'v0.7'>) { const { from, to, token, amount } = decodeTransferUserOp({ userOp }) if (!from || !to || !amount || !token) { throw ApplicationFailure.nonRetryable('User Operation is not a valid transfer') @@ -54,7 +42,6 @@ export const createTransferActivities = (env: Record token === 'eth' ? await insertTemporalEthSendAccountTransfer({ workflow_id: workflowId, - user_op_hash: userOpHash, status: 'initialized', sender: fromBytea, value: amount, @@ -62,7 +49,6 @@ export const createTransferActivities = (env: Record }) : await insertTemporalTokenSendAccountTransfer({ workflow_id: workflowId, - user_op_hash: userOpHash, status: 'initialized', f: fromBytea, t: toBytea, @@ -71,8 +57,8 @@ export const createTransferActivities = (env: Record }) if (error) { - throw ApplicationFailure.retryable( - 'Error inserting transfer into temporal_send_account_transfers', + throw ApplicationFailure.nonRetryable( + 'Error inserting transfer into temporal.send_account_transfers', error.code, { error, @@ -81,7 +67,7 @@ export const createTransferActivities = (env: Record ) } - return { userOp, from, to, amount, token } + return { from, to, amount, token } }, async sendUserOpActivity(userOp: UserOperation<'v0.7'>) { try { @@ -89,13 +75,15 @@ export const createTransferActivities = (env: Record log.info('UserOperation sent successfully', { hash }) return hash } catch (error) { - throw ApplicationFailure.retryable('Error sending user operation', error.code, error) + log.error('Error sending user operation', { error }) + throw ApplicationFailure.retryable('Error sending user operation', error.code) } }, - async updateTemporalTransferSentStatusActivity(workflowId: string) { + async updateTemporalTransferSentStatusActivity(workflowId: string, hash: `0x${string}`) { const { error } = await updateTemporalSendAccountTransfer({ workflow_id: workflowId, status: 'sent', + data: { user_op_hash: hash }, }) if (error) { throw ApplicationFailure.retryable( @@ -107,7 +95,7 @@ export const createTransferActivities = (env: Record } ) } - return + return null }, async waitForTransactionReceiptActivity(workflowId: string, hash: `0x${string}`) { try { diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 09381419a..7e1664161 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -1,11 +1,10 @@ -import type { Database, Json } from '@my/supabase/database-generated.types' +import type { Database } from '@my/supabase/database-generated.types' import { log } from '@temporalio/activity' import { hexToBytea } from 'app/utils/hexToBytea' import { supabaseAdmin } from 'app/utils/supabase/admin' export async function insertTemporalTokenSendAccountTransfer({ workflow_id, - user_op_hash, status, f, t, @@ -13,16 +12,14 @@ export async function insertTemporalTokenSendAccountTransfer({ log_addr, }: { workflow_id: string - user_op_hash: string - status: Database['public']['Enums']['temporal_transfer_status'] + status: Database['temporal']['Enums']['transfer_status'] f: `\\x${string}` t: `\\x${string}` v: bigint log_addr: `\\x${string}` }) { - return await supabaseAdmin.rpc('insert_temporal_token_send_account_transfer', { + return await supabaseAdmin.schema('temporal').rpc('insert_temporal_token_send_account_transfer', { workflow_id, - user_op_hash, status, f, t, @@ -33,22 +30,19 @@ export async function insertTemporalTokenSendAccountTransfer({ export async function insertTemporalEthSendAccountTransfer({ workflow_id, - user_op_hash, status, sender, log_addr, value, }: { workflow_id: string - user_op_hash: string - status: Database['public']['Enums']['temporal_transfer_status'] + status: Database['temporal']['Enums']['transfer_status'] sender: `\\x${string}` log_addr: `\\x${string}` value: bigint }) { - return await supabaseAdmin.rpc('insert_temporal_eth_send_account_transfer', { + return await supabaseAdmin.schema('temporal').rpc('insert_temporal_eth_send_account_transfer', { workflow_id, - user_op_hash, status, sender, log_addr, @@ -62,10 +56,10 @@ export async function updateTemporalSendAccountTransfer({ data, }: { workflow_id: string - status: Database['public']['Enums']['temporal_transfer_status'] - data?: Json + status: Database['temporal']['Enums']['transfer_status'] + data?: Database['temporal']['Tables']['send_account_transfers']['Row']['data'] }) { - return await supabaseAdmin.rpc('update_temporal_send_account_transfer', { + return await supabaseAdmin.schema('temporal').rpc('update_temporal_send_account_transfer', { workflow_id, status, data, diff --git a/packages/workflows/src/transfer-workflow/wagmi.ts b/packages/workflows/src/transfer-workflow/wagmi.ts index 06511dfed..e508a1e5a 100644 --- a/packages/workflows/src/transfer-workflow/wagmi.ts +++ b/packages/workflows/src/transfer-workflow/wagmi.ts @@ -2,10 +2,6 @@ import type { UserOperation } from 'permissionless' import { baseMainnetBundlerClient, baseMainnetClient, entryPointAddress } from '@my/wagmi' import type { Hex } from 'viem' -export async function getUserOperationByHash(hash: `0x${string}`) { - return await baseMainnetBundlerClient.getUserOperationByHash({ hash }) -} - export async function simulateUserOperation(userOp: UserOperation<'v0.7'>) { return await baseMainnetClient.call({ account: entryPointAddress[baseMainnetClient.chain.id], diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index ff65cb584..ff7cf0e95 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -1,5 +1,6 @@ import { proxyActivities, workflowInfo } from '@temporalio/workflow' import type { createTransferActivities } from './activities' +import type { UserOperation } from 'permissionless' import debug from 'debug' @@ -16,16 +17,16 @@ const { startToCloseTimeout: '45 seconds', }) -export async function TransferWorkflow(userOpHash: `0x${string}`) { +export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { const workflowId = workflowInfo().workflowId - log('SendTransferWorkflow started with hash:', userOpHash) - const { userOp, token } = await initializeTransferActivity(workflowId, userOpHash) + log('SendTransferWorkflow started with userOp:', workflowId) + const { token } = await initializeTransferActivity(workflowId, userOp) log('Sending UserOperation') const hash = await sendUserOpActivity(userOp) log('UserOperation sent, hash:', hash) - await updateTemporalTransferSentStatusActivity(workflowId) + await updateTemporalTransferSentStatusActivity(workflowId, hash) const receipt = await waitForTransactionReceiptActivity(workflowId, hash) - log('Receipt received:', { tx_hash: receipt.transactionHash, user_op_hash: userOpHash }) + log('Receipt received:', { tx_hash: receipt.transactionHash }) const transfer = await isTransferIndexedActivity(workflowId, receipt.transactionHash, token) log('Transfer indexed') return transfer From ca0cf2f5ef2701114a47b2ddce94a52e6a1e8d78 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Sun, 9 Feb 2025 11:38:04 -0800 Subject: [PATCH 17/58] userId and UserOpHash for workflow id --- packages/api/src/routers/temporal.ts | 16 +++++++++++++--- packages/app/features/send/confirm/screen.tsx | 9 +-------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/packages/api/src/routers/temporal.ts b/packages/api/src/routers/temporal.ts index dcf9f9fde..4d17f34ab 100644 --- a/packages/api/src/routers/temporal.ts +++ b/packages/api/src/routers/temporal.ts @@ -3,7 +3,10 @@ import debug from 'debug' import { z } from 'zod' import { createTRPCRouter, protectedProcedure } from '../trpc' import { getTemporalClient } from '@my/temporal/client' +import type { UserOperation } from 'permissionless' import { TransferWorkflow } from '@my/workflows/all-workflows' +import { baseMainnetClient, entryPointAddress } from '@my/wagmi' +import { getUserOperationHash } from 'permissionless/utils' const log = debug('api:temporal') @@ -11,22 +14,29 @@ export const temporalRouter = createTRPCRouter({ transfer: protectedProcedure .input( z.object({ - userOpHash: z.custom<`0x${string}`>(), + userOp: z.custom>(), }) ) .mutation( async ({ - input: { userOpHash }, + input: { userOp }, ctx: { session: { user }, }, }) => { try { const client = await getTemporalClient() + const chainId = baseMainnetClient.chain.id + const entryPoint = entryPointAddress[chainId] + const userOpHash = getUserOperationHash({ + userOperation: userOp, + entryPoint, + chainId, + }) const { workflowId } = await client.workflow.start(TransferWorkflow, { taskQueue: 'monorepo', workflowId: `transfer-workflow-${user.id}-${userOpHash}`, - args: [userOpHash], + args: [userOp], }) log(`Workflow Created: ${workflowId}`) return workflowId diff --git a/packages/app/features/send/confirm/screen.tsx b/packages/app/features/send/confirm/screen.tsx index 73901f611..3bae667bc 100644 --- a/packages/app/features/send/confirm/screen.tsx +++ b/packages/app/features/send/confirm/screen.tsx @@ -42,7 +42,6 @@ import { useTokenPrices } from 'app/utils/useTokenPrices' const log = debug('app:features:send:confirm:screen') import { api } from 'app/utils/api' import { signUserOp } from 'app/utils/signUserOp' -import { getUserOperationHash } from 'permissionless/utils' export function SendConfirmScreen() { const [queryParams] = useSendScreenParams() @@ -206,13 +205,7 @@ export function SendConfirm() { }) userOp.signature = signature - const userOpHash = getUserOperationHash({ - userOperation: userOp, - entryPoint, - chainId, - }) - - const workflowId = await transfer({ userOpHash }) + const workflowId = await transfer({ userOp }) setWorkflowId(workflowId) if (selectedCoin?.token === 'eth') { await ethQuery.refetch() From fc1aa43295619cb30473ab3624045734877c9998 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Sun, 9 Feb 2025 15:35:19 -0800 Subject: [PATCH 18/58] delete temporal activity workflow --- .../src/transfer-workflow/activities.ts | 19 ++++- .../src/transfer-workflow/supabase.ts | 4 ++ .../src/transfer-workflow/workflow.ts | 2 + supabase/database-generated.types.ts | 6 ++ ...035940_create_temporal_transfers_table.sql | 71 +++++++++++-------- supabase/tests/temporal_transfers_test.sql | 27 ++++--- 6 files changed, 86 insertions(+), 43 deletions(-) diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index 1b017f689..9a368c7f0 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -5,6 +5,7 @@ import { insertTemporalTokenSendAccountTransfer, updateTemporalSendAccountTransfer, insertTemporalEthSendAccountTransfer, + deleteTemporalTransferFromActivityTable, } from './supabase' import { simulateUserOperation, sendUserOperation, waitForTransactionReceipt } from './wagmi' import type { UserOperation } from 'permissionless' @@ -76,7 +77,7 @@ export const createTransferActivities = (env: Record return hash } catch (error) { log.error('Error sending user operation', { error }) - throw ApplicationFailure.retryable('Error sending user operation', error.code) + throw ApplicationFailure.nonRetryable('Error sending user operation', error.code) } }, async updateTemporalTransferSentStatusActivity(workflowId: string, hash: `0x${string}`) { @@ -108,14 +109,13 @@ export const createTransferActivities = (env: Record } log.info('waitForTransactionReceiptActivity', { tx_hash: res.receipt.transactionHash }) const { receipt } = res + await updateTemporalSendAccountTransfer({ workflow_id: workflowId, status: 'confirmed', data: { tx_hash: receipt.transactionHash, block_num: receipt.blockNumber.toString(), - tx_idx: receipt.transactionIndex.toString(), - // log_idx: logs[0].logIndex.toString(), -- Need to look into how to get this }, }) return receipt @@ -153,5 +153,18 @@ export const createTransferActivities = (env: Record log.info('isTransferIndexedActivity', { isIndexed }) return isIndexed }, + async deleteTemporalTransferActivity(workflowId: string) { + const { error } = await deleteTemporalTransferFromActivityTable(workflowId) + if (error) { + throw ApplicationFailure.retryable( + 'Error deleting temporal_tranfer entry in activity', + error.code, + { + error, + workflowId, + } + ) + } + }, } } diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 7e1664161..435c166a7 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -66,6 +66,10 @@ export async function updateTemporalSendAccountTransfer({ }) } +export async function deleteTemporalTransferFromActivityTable(workflow_id: string) { + return await supabaseAdmin.from('activity').delete().eq('event_id', workflow_id) +} + export async function isTokenTransferIndexed(tx_hash: `0x${string}`) { const { count, error, status, statusText } = await supabaseAdmin .from('send_account_transfers') diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index ff7cf0e95..8dc2e037d 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -12,6 +12,7 @@ const { updateTemporalTransferSentStatusActivity, waitForTransactionReceiptActivity, isTransferIndexedActivity, + deleteTemporalTransferActivity, } = proxyActivities>({ // TODO: make this configurable startToCloseTimeout: '45 seconds', @@ -29,5 +30,6 @@ export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { log('Receipt received:', { tx_hash: receipt.transactionHash }) const transfer = await isTransferIndexedActivity(workflowId, receipt.transactionHash, token) log('Transfer indexed') + await deleteTemporalTransferActivity(workflowId) return transfer } diff --git a/supabase/database-generated.types.ts b/supabase/database-generated.types.ts index 8b58badd0..a57f1dcab 100644 --- a/supabase/database-generated.types.ts +++ b/supabase/database-generated.types.ts @@ -1499,6 +1499,12 @@ export type Database = { [_ in never]: never } Functions: { + delete_temporal_transfer_activity: { + Args: { + workflow_id: string + } + Returns: undefined + } insert_temporal_eth_send_account_transfer: { Args: { workflow_id: string diff --git a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql index d5cf54c47..850a60e20 100644 --- a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql @@ -25,8 +25,8 @@ CREATE TABLE temporal.send_account_transfers( user_id uuid NOT NULL, status temporal.transfer_status NOT NULL, data jsonb NOT NULL, - created_at timestamp with time zone DEFAULT NOW(), - updated_at timestamp with time zone DEFAULT NOW() + created_at timestamptz DEFAULT (NOW() AT TIME ZONE 'UTC'), + updated_at timestamptz DEFAULT (NOW() AT TIME ZONE 'UTC') ); alter table "temporal"."send_account_transfers" @@ -76,7 +76,7 @@ BEGIN json_build_object( 'f', f, 't', t, - 'v', v, + 'v', v::text, 'log_addr', log_addr ) ); @@ -114,7 +114,7 @@ BEGIN json_build_object( 'log_addr', log_addr, 'sender', sender, - 'value', value + 'value', value::text ) ); END; @@ -135,11 +135,10 @@ BEGIN -- Only construct _data if input data is not null IF data IS NOT NULL THEN _data := json_build_object( - 'user_op_hash', (data->>'user_op_hash')::bytea, - 'tx_hash', (data->>'tx_hash')::bytea, - 'block_num', data->>'block_num', - 'tx_idx', data->>'tx_idx', - 'log_idx', data->>'log_idx' + 'user_op_hash', (data->>'user_op_hash'), + 'tx_hash', (data->>'tx_hash'), + 'block_num', data->>'block_num'::text, + 'tx_idx', data->>'tx_idx'::text ); ELSE _data := '{}'::jsonb; @@ -152,7 +151,7 @@ BEGIN WHEN _data = '{}'::jsonb THEN temporal.send_account_transfers.data ELSE temporal.send_account_transfers.data || _data END, - updated_at = NOW() + updated_at = (NOW() AT TIME ZONE 'UTC') WHERE temporal.send_account_transfers.workflow_id = update_temporal_send_account_transfer.workflow_id; END; @@ -185,21 +184,20 @@ BEGIN created_at ) VALUES ( - 'temporal_send_account_transfer', + 'temporal_send_account_transfers', NEW.workflow_id, _f_user_id, _t_user_id, json_build_object( 'status', NEW.status, - 'user_op_hash', (NEW.data->>'user_op_hash')::bytea, - 'log_addr', (NEW.data->>'log_addr')::bytea, - 'f', (NEW.data->>'f')::bytea, - 't', (NEW.data->>'t')::bytea, - 'v', NEW.data->>'v', - 'tx_hash', (NEW.data->>'tx_hash')::bytea, - 'block_num', NEW.data->>'block_num', - 'tx_idx', NEW.data->>'tx_idx', - 'log_idx', NEW.data->>'log_idx' + 'user_op_hash', (NEW.data->>'user_op_hash'), + 'log_addr', (NEW.data->>'log_addr'), + 'f', (NEW.data->>'f'), + 't', (NEW.data->>'t'), + 'v', NEW.data->>'v'::text, + 'tx_hash', (NEW.data->>'tx_hash'), + 'block_num', NEW.data->>'block_num'::text, + 'tx_idx', NEW.data->>'tx_idx'::text ), NEW.created_at ); @@ -234,20 +232,19 @@ BEGIN created_at ) VALUES ( - 'temporal_send_account_transfer', + 'temporal_send_account_transfers', NEW.workflow_id, _from_user_id, _to_user_id, json_build_object( 'status', NEW.status, - 'user_op_hash', (NEW.data->>'user_op_hash')::bytea, - 'log_addr', (NEW.data->>'log_addr')::bytea, - 'sender', (NEW.data->>'sender')::bytea, - 'value', NEW.data->>'value', - 'tx_hash', (NEW.data->>'tx_hash')::bytea, - 'block_num', NEW.data->>'block_num', - 'tx_idx', NEW.data->>'tx_idx', - 'log_idx', NEW.data->>'log_idx' + 'user_op_hash', (NEW.data->>'user_op_hash'), + 'log_addr', (NEW.data->>'log_addr'), + 'sender', (NEW.data->>'sender'), + 'value', NEW.data->>'value'::text, + 'tx_hash', (NEW.data->>'tx_hash'), + 'block_num', NEW.data->>'block_num'::text, + 'tx_idx', NEW.data->>'tx_idx'::text ), NEW.created_at ); @@ -276,7 +273,7 @@ CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_upda BEGIN UPDATE activity SET data = NEW.data - WHERE event_name = 'temporal_send_account_transfer' + WHERE event_name = 'temporal_send_account_transfers' AND event_id = NEW.workflow_id; RETURN NEW; END; @@ -285,4 +282,16 @@ $$; CREATE TRIGGER temporal_send_account_transfers_trigger_update_activity AFTER UPDATE ON temporal.send_account_transfers FOR EACH ROW - EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_update_activity(); \ No newline at end of file + EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_update_activity(); + +CREATE OR REPLACE FUNCTION temporal.delete_temporal_transfer_activity(workflow_id text) +RETURNS void +LANGUAGE plpgsql +SECURITY DEFINER +AS $$ +BEGIN + DELETE FROM activity + WHERE event_name = 'temporal_send_account_transfers' + AND event_id = workflow_id; +END; +$$; \ No newline at end of file diff --git a/supabase/tests/temporal_transfers_test.sql b/supabase/tests/temporal_transfers_test.sql index 3ae768257..3a19f10fb 100644 --- a/supabase/tests/temporal_transfers_test.sql +++ b/supabase/tests/temporal_transfers_test.sql @@ -1,5 +1,5 @@ BEGIN; -SELECT plan(6); +SELECT plan(7); -- Create the necessary extensions CREATE EXTENSION "basejump-supabase_test_helpers"; @@ -98,8 +98,7 @@ SELECT temporal.update_temporal_send_account_transfer( 'user_op_hash', '\x1234'::bytea, 'tx_hash', '\x5678'::bytea, 'block_num', '123', - 'tx_idx', '1', - 'log_idx', '0' + 'tx_idx', '1' )::jsonb ); @@ -110,8 +109,7 @@ SELECT results_eq( (data->>'user_op_hash')::bytea, (data->>'tx_hash')::bytea, data->>'block_num', - data->>'tx_idx', - data->>'log_idx' + data->>'tx_idx' FROM temporal.send_account_transfers WHERE workflow_id = 'test-workflow-1' $$, @@ -121,8 +119,7 @@ SELECT results_eq( '\x1234'::bytea, '\x5678'::bytea, '123'::text, - '1'::text, - '0'::text + '1'::text ) $$, 'Test transfer update' @@ -143,7 +140,7 @@ SELECT results_eq( $$, $$ VALUES ( - 'temporal_send_account_transfer'::text, + 'temporal_send_account_transfers'::text, tests.get_supabase_uid('test_user_from'), tests.get_supabase_uid('test_user_to'), '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, @@ -168,7 +165,7 @@ SELECT results_eq( $$, $$ VALUES ( - 'temporal_send_account_transfer'::text, + 'temporal_send_account_transfers'::text, tests.get_supabase_uid('test_user_from'), NULL::uuid, '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, @@ -198,5 +195,17 @@ SELECT results_eq( 'Test activity update' ); +SELECT temporal.delete_temporal_transfer_activity('test-workflow-1'); + +SELECT is_empty( + $$ + SELECT * + FROM activity + WHERE event_name = 'temporal_send_account_transfers' + AND event_id = 'test-workflow-1' + $$, + 'Test temporal transfer activity was deleted' +); + SELECT * FROM finish(); ROLLBACK; \ No newline at end of file From 49f49fa1b0179b01448c2db481ca23a974445c2f Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Sun, 9 Feb 2025 15:35:51 -0800 Subject: [PATCH 19/58] show temporal transfers in token activity feed --- .../activity/utils/useActivityFeed.ts | 1 + .../home/utils/useTokenActivityFeed.ts | 23 ++-- packages/app/features/send/confirm/screen.tsx | 22 ++-- packages/app/utils/activity.ts | 53 +++++++++ .../activity/TemporalTransfersEventSchema.ts | 102 ++++++++++++++++++ packages/app/utils/zod/activity/events.ts | 4 + packages/app/utils/zod/activity/index.ts | 2 + 7 files changed, 188 insertions(+), 19 deletions(-) create mode 100644 packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts diff --git a/packages/app/features/activity/utils/useActivityFeed.ts b/packages/app/features/activity/utils/useActivityFeed.ts index 07a0c9c30..b03f8401f 100644 --- a/packages/app/features/activity/utils/useActivityFeed.ts +++ b/packages/app/features/activity/utils/useActivityFeed.ts @@ -53,6 +53,7 @@ export function useActivityFeed({ .order('created_at', { ascending: false }) .range(from, to) const { data, error } = await request + console.log('data: ', data) throwIf(error) return EventArraySchema.parse(data) } diff --git a/packages/app/features/home/utils/useTokenActivityFeed.ts b/packages/app/features/home/utils/useTokenActivityFeed.ts index 87eeb10a7..900be2a05 100644 --- a/packages/app/features/home/utils/useTokenActivityFeed.ts +++ b/packages/app/features/home/utils/useTokenActivityFeed.ts @@ -34,10 +34,16 @@ export function useTokenActivityFeed(params: { const to = (pageParam + 1) * pageSize - 1 let query = supabase.from('activity_feed').select('*') + // First, handle event_name conditions if (address) { - query = query.eq('event_name', Events.SendAccountTransfers).eq('data->>log_addr', address) + query = query + .in('event_name', [Events.SendAccountTransfers, Events.TemporalSendAccountTransfers]) + .eq('data->>log_addr', address) } else { - query = query.eq('event_name', Events.SendAccountReceive) + query = query.in('event_name', [ + Events.SendAccountReceive, + Events.TemporalSendAccountTransfers, + ]) } const paymasterAddresses = Object.values(tokenPaymasterAddress) @@ -53,15 +59,16 @@ export function useTokenActivityFeed(params: { .or('from_user.not.is.null, to_user.not.is.null') // only show activities with a send app user .or( squish(` - data->t.is.null, - data->f.is.null, - and( - data->>t.not.in.(${toTransferIgnoreValues}), - data->>f.not.in.(${fromTransferIgnoreValues}) - )`) + data->t.is.null, + data->f.is.null, + and( + data->>t.not.in.(${toTransferIgnoreValues}), + data->>f.not.in.(${fromTransferIgnoreValues}) + )`) ) .order('created_at', { ascending: false }) .range(from, to) + throwIf(error) return EventArraySchema.parse(data) } diff --git a/packages/app/features/send/confirm/screen.tsx b/packages/app/features/send/confirm/screen.tsx index 3bae667bc..928c05af8 100644 --- a/packages/app/features/send/confirm/screen.tsx +++ b/packages/app/features/send/confirm/screen.tsx @@ -78,14 +78,6 @@ export function SendConfirm() { const { mutateAsync: transfer } = api.temporal.transfer.useMutation() - const [workflowId, setWorkflowId] = useState() - - useEffect(() => { - if (workflowId) { - router.replace({ pathname: '/', query: { token: sendToken } }) - } - }, [workflowId, router, sendToken]) - const queryClient = useQueryClient() const isUSDCSelected = selectedCoin?.label === 'USDC' const { coin: usdc } = useCoin('USDC') @@ -205,13 +197,21 @@ export function SendConfirm() { }) userOp.signature = signature - const workflowId = await transfer({ userOp }) - setWorkflowId(workflowId) + await transfer({ userOp }) + + // Invalidate token activity feed before navigation + await queryClient.invalidateQueries({ + queryKey: ['token_activity_feed', selectedCoin?.token], + }) + if (selectedCoin?.token === 'eth') { await ethQuery.refetch() } else { await tokensQuery.refetch() } + + // Navigate after invalidating + router.replace({ pathname: '/', query: { token: sendToken } }) } catch (e) { console.error(e) setError(e) @@ -367,7 +367,7 @@ export function SendConfirm() { onPress={onSubmit} br={'$4'} disabledStyle={{ opacity: 0.7, cursor: 'not-allowed', pointerEvents: 'none' }} - disabled={!canSubmit || !!workflowId} + disabled={!canSubmit} gap={4} py={'$5'} width={'100%'} diff --git a/packages/app/utils/activity.ts b/packages/app/utils/activity.ts index e5cda26ff..bea4d4db5 100644 --- a/packages/app/utils/activity.ts +++ b/packages/app/utils/activity.ts @@ -17,6 +17,12 @@ import { import { isSendAccountReceiveEvent } from './zod/activity/SendAccountReceiveEventSchema' import { isSendTokenUpgradeEvent } from './zod/activity/SendAccountTransfersEventSchema' import { sendCoin, sendV0Coin } from 'app/data/coins' +import { + isTemporalEthTransfers, + isTemporalTokenTransfers, + isTemporalTransfersEvent, + temporalEventNameFromStatus, +} from './zod/activity/TemporalTransfersEventSchema' const wagmiAddresWithLabel = (addresses: `0x${string}`[], label: string) => Object.values(addresses).map((a) => [a, label]) @@ -38,6 +44,7 @@ const labelAddress = (address: `0x${string}`): string => * if sent, the counterpart is the user who received the token. * If the activity is a tag receipt, the actor is the user who created the tag. * If the activity is a referral, the actor is the user who referred the user. + * If the activity is a temporal transfer, the actor is the user who sent the token. */ export function counterpart(activity: Activity): Activity['from_user'] | Activity['to_user'] { const { from_user, to_user } = activity @@ -60,6 +67,9 @@ export function counterpart(activity: Activity): Activity['from_user'] | Activit return from_user } } + if (isTemporalTransfersEvent(activity)) { + return to_user + } return null // not a send or receive event } @@ -67,7 +77,35 @@ export function counterpart(activity: Activity): Activity['from_user'] | Activit * Returns the amount of the activity if there is one. */ export function amountFromActivity(activity: Activity): string { + console.log('activity: ', activity) switch (true) { + case isTemporalTransfersEvent(activity): { + const { data } = activity + + if (isTemporalTokenTransfers(activity)) { + if (data.coin) { + const amount = formatAmount( + formatUnits(data.v, data.coin.decimals), + 5, + data.coin.formatDecimals + ) + return `${amount} ${data.coin.symbol}` + } + return formatAmount(`${data.v}`, 5, 0) + } + if (isTemporalEthTransfers(activity)) { + if (data.coin) { + const amount = formatAmount( + formatUnits(data.value, data.coin.decimals), + 5, + data.coin.formatDecimals + ) + return `${amount} ${data.coin.symbol}` + } + return formatAmount(`${data.value}`, 5, 0) + } + return '' + } case isSendAccountTransfersEvent(activity): { const { v, coin } = activity.data if (coin) { @@ -148,6 +186,8 @@ export function eventNameFromActivity(activity: Activity) { const isETHReceive = isSendAccountReceiveEvent(activity) const isTransferOrReceive = isERC20Transfer || isETHReceive switch (true) { + case isTemporalTransfersEvent(activity): + return temporalEventNameFromStatus(data.status) case isERC20Transfer && isAddressEqual(data.f, sendtagCheckoutAddress[baseMainnet.id]): return 'Referral Reward' case isSendTokenUpgradeEvent(activity): @@ -166,6 +206,7 @@ export function eventNameFromActivity(activity: Activity) { return 'Referral' case isReferralsEvent(activity) && !!to_user?.id: return 'Referred By' + default: return event_name // catch-all i_am_rick_james -> I Am Rick James .split('_') @@ -184,8 +225,11 @@ export function phraseFromActivity(activity: Activity) { const isERC20Transfer = isSendAccountTransfersEvent(activity) const isETHReceive = isSendAccountReceiveEvent(activity) const isTransferOrReceive = isERC20Transfer || isETHReceive + const isTemporalTransfers = isTemporalTransfersEvent(activity) switch (true) { + case isTemporalTransfers: + return 'Pending' case isERC20Transfer && isAddressEqual(data.f, sendtagCheckoutAddress[baseMainnet.id]): return 'Earned referral reward' case isSendTokenUpgradeEvent(activity): @@ -220,6 +264,7 @@ export function subtextFromActivity(activity: Activity): string | null { const { from_user, to_user, data } = activity const isERC20Transfer = isSendAccountTransfersEvent(activity) const isETHReceive = isSendAccountReceiveEvent(activity) + if (isTagReceiptsEvent(activity) || isTagReceiptUSDCEvent(activity)) { return activity.data.tags.map((t) => `/${t}`).join(', ') } @@ -261,6 +306,14 @@ export function subtextFromActivity(activity: Activity): string | null { if (isETHReceive && to_user?.id) { return labelAddress(data.log_addr) } + if (isTemporalTransfersEvent(activity)) { + if (isTemporalTokenTransfers(activity)) { + return labelAddress(activity.data.t) + } + if (isTemporalEthTransfers(activity)) { + return labelAddress(activity.data.sender) + } + } return null } diff --git a/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts b/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts new file mode 100644 index 000000000..dbc106027 --- /dev/null +++ b/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts @@ -0,0 +1,102 @@ +import { z } from 'zod' +import { decimalStrToBigInt } from '../bigint' +import { byteaToHexEthAddress, byteaToHexTxHash } from '../bytea' +import { BaseEventSchema } from './BaseEventSchema' +import { CoinSchema, knownCoins } from 'app/data/coins' +import { isAddressEqual } from 'viem' +import { Events } from './events' +import type { Database } from '@my/supabase/database-generated.types' +/** + * Base temporal transfers data + */ +const BaseTemporalTransfersDataSchema = z.object({ + status: z.enum([ + 'initialized', + 'sent', + 'confirmed', + 'indexed', + 'failed', + ] as const satisfies readonly Database['temporal']['Enums']['transfer_status'][]), + user_op_hash: byteaToHexTxHash.optional(), + tx_hash: byteaToHexTxHash.optional(), + block_num: decimalStrToBigInt.optional(), + tx_idx: decimalStrToBigInt.optional(), + log_addr: byteaToHexEthAddress, +}) + +/** + * Token temporal transfers data + */ +export const TokenTemporalTransfersDataSchema = BaseTemporalTransfersDataSchema.extend({ + f: byteaToHexEthAddress, + t: byteaToHexEthAddress, + v: decimalStrToBigInt, +}) + .extend({ + coin: CoinSchema.optional(), + }) + .transform((t) => ({ + ...t, + coin: knownCoins.find((c) => c.token !== 'eth' && isAddressEqual(c.token, t.log_addr)), + })) + +/** + * ETH temporal transfers data + */ +export const EthTemporalTransfersDataSchema = BaseTemporalTransfersDataSchema.extend({ + sender: byteaToHexEthAddress, + value: decimalStrToBigInt, +}) + .extend({ + coin: CoinSchema.optional(), + }) + .transform((t) => ({ + ...t, + coin: knownCoins.find((c) => c.token === 'eth'), + })) + +export const TemporalTransfersEventSchema = BaseEventSchema.extend({ + event_name: z.literal(Events.TemporalSendAccountTransfers), + data: z.union([TokenTemporalTransfersDataSchema, EthTemporalTransfersDataSchema]), +}) + +export type TemporalTransfersEvent = z.infer + +export const isTemporalTransfersEvent = (event: { + event_name: string +}): event is TemporalTransfersEvent => event.event_name === Events.TemporalSendAccountTransfers + +export const isTemporalTokenTransfers = (event: { + data?: unknown + event_name: string +}): event is TemporalTransfersEvent & { + data: z.infer +} => { + return isTemporalTransfersEvent(event) && event.data.coin?.token !== 'eth' +} + +export const isTemporalEthTransfers = (event: { + data?: unknown + event_name: string +}): event is TemporalTransfersEvent & { data: z.infer } => { + return isTemporalTransfersEvent(event) && event.data.coin?.token === 'eth' +} + +export const temporalEventNameFromStatus = ( + status: Database['temporal']['Enums']['transfer_status'] +) => { + switch (status) { + case 'initialized': + return 'Sending' + case 'sent': + return 'Confirming' + case 'confirmed': + return '' + case 'indexed': + return 'Sent' + case 'failed': + return 'Failed' + default: + return '' + } +} diff --git a/packages/app/utils/zod/activity/events.ts b/packages/app/utils/zod/activity/events.ts index 3024f4797..b2f58dcf0 100644 --- a/packages/app/utils/zod/activity/events.ts +++ b/packages/app/utils/zod/activity/events.ts @@ -23,4 +23,8 @@ export enum Events { * Send account receives ETH */ SendAccountReceive = 'send_account_receives', + /** + * Temporal send account transfers + */ + TemporalSendAccountTransfers = 'temporal_send_account_transfers', } diff --git a/packages/app/utils/zod/activity/index.ts b/packages/app/utils/zod/activity/index.ts index 5d7e2f661..276b8ed49 100644 --- a/packages/app/utils/zod/activity/index.ts +++ b/packages/app/utils/zod/activity/index.ts @@ -5,6 +5,7 @@ import { SendAccountTransfersEventSchema } from './SendAccountTransfersEventSche import { TagReceiptsEventSchema } from './TagReceiptsEventSchema' import { TagReceiptUSDCEventSchema } from './TagReceiptUSDCEventSchema' import { SendAccountReceiveEventSchema } from './SendAccountReceiveEventSchema' +import { TemporalTransfersEventSchema } from './TemporalTransfersEventSchema' export type { BaseEvent } from './BaseEventSchema' export { ReferralsEventSchema, isReferralsEvent } from './ReferralsEventSchema' @@ -28,6 +29,7 @@ export const EventSchema = z TagReceiptUSDCEventSchema, ReferralsEventSchema, SendAccountReceiveEventSchema, + TemporalTransfersEventSchema, ]) .or(BaseEventSchema) .catch((ctx) => { From 66bd46072279987d947091012666a066ac8d852b Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Sun, 9 Feb 2025 17:05:45 -0800 Subject: [PATCH 20/58] Update runTransferWorkflow script --- apps/workers/src/client.ts | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/apps/workers/src/client.ts b/apps/workers/src/client.ts index 2c1b927af..3f7159cbc 100644 --- a/apps/workers/src/client.ts +++ b/apps/workers/src/client.ts @@ -1,5 +1,8 @@ import { Connection, Client } from '@temporalio/client' import { TransferWorkflow } from '@my/workflows/all-workflows' +import type { UserOperation } from 'permissionless' +import { baseMainnetClient, entryPointAddress } from '@my/wagmi' +import { getUserOperationHash } from 'permissionless/utils' // async function runDistributionWorkflow() { // const connection = await Connection.connect() // Connect to localhost with default ConnectionOptions. @@ -24,16 +27,23 @@ import { TransferWorkflow } from '@my/workflows/all-workflows' // return result // } -export async function runTransferWorkflow(userId: string, userOpHash: `0x${string}`) { +export async function runTransferWorkflow(userId: string, userOp: UserOperation<'v0.7'>) { const connection = await Connection.connect() const client = new Client({ connection, }) + const chainId = baseMainnetClient.chain.id + const entryPoint = entryPointAddress[chainId] + const userOpHash = getUserOperationHash({ + userOperation: userOp, + entryPoint, + chainId, + }) const handle = await client.workflow.start(TransferWorkflow, { taskQueue: 'monorepo', workflowId: `transfers-workflow-${userId}-${userOpHash}`, // TODO: remember to replace this with a meaningful business ID - args: [userOpHash], + args: [userOp], }) console.log('Started handle', handle.workflowId) // optional: wait for client result From 07b9b81d1dbb24383b9f7c9b0cd43700c20e35aa Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Feb 2025 13:53:17 -0800 Subject: [PATCH 21/58] Remove status index add created_at index --- .../20250205035940_create_temporal_transfers_table.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql index 850a60e20..6f0295f66 100644 --- a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql @@ -41,7 +41,7 @@ using ( CREATE INDEX temporal_send_account_transfers_user_id_idx ON temporal.send_account_transfers(user_id); -CREATE INDEX temporal_send_account_transfers_status_idx ON temporal.send_account_transfers(status); +CREATE INDEX temporal_send_account_transfers_created_at_idx ON temporal.send_account_transfers(created_at); CREATE UNIQUE INDEX temporal_send_account_transfers_workflow_id_idx ON temporal.send_account_transfers(workflow_id); CREATE OR REPLACE FUNCTION temporal.insert_temporal_token_send_account_transfer( From 14f454e748fc2c7b11464c54783c002c81b604c3 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Feb 2025 17:29:00 -0800 Subject: [PATCH 22/58] Add workflow failure logic --- .../activity/TemporalTransfersEventSchema.ts | 19 +- .../src/transfer-workflow/activities.ts | 165 +++++++++--------- .../src/transfer-workflow/supabase.ts | 12 +- .../workflows/src/transfer-workflow/wagmi.ts | 7 +- .../src/transfer-workflow/workflow.ts | 97 ++++++++-- supabase/database-generated.types.ts | 1 + ...035940_create_temporal_transfers_table.sql | 95 +++++----- 7 files changed, 248 insertions(+), 148 deletions(-) diff --git a/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts b/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts index dbc106027..554145cd0 100644 --- a/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts +++ b/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts @@ -6,17 +6,22 @@ import { CoinSchema, knownCoins } from 'app/data/coins' import { isAddressEqual } from 'viem' import { Events } from './events' import type { Database } from '@my/supabase/database-generated.types' + +/** Temporal transfers status */ +export const temporalTransferStatus = z.enum([ + 'initialized', + 'sent', + 'confirmed', + 'indexed', + 'failed', + 'cancelled', +] as const satisfies readonly Database['temporal']['Enums']['transfer_status'][]) + /** * Base temporal transfers data */ const BaseTemporalTransfersDataSchema = z.object({ - status: z.enum([ - 'initialized', - 'sent', - 'confirmed', - 'indexed', - 'failed', - ] as const satisfies readonly Database['temporal']['Enums']['transfer_status'][]), + status: temporalTransferStatus, user_op_hash: byteaToHexTxHash.optional(), tx_hash: byteaToHexTxHash.optional(), block_num: decimalStrToBigInt.optional(), diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index 9a368c7f0..2eb8516a9 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -12,7 +12,8 @@ import type { UserOperation } from 'permissionless' import { bootstrap } from '@my/workflows/utils' import { decodeTransferUserOp } from 'app/utils/decodeTransferUserOp' import { hexToBytea } from 'app/utils/hexToBytea' -import type { allCoinsDict } from 'app/data/coins' +import type { Json, Database, PgBytea } from '@my/supabase/database.types' +import superjson from 'superjson' export const createTransferActivities = (env: Record) => { bootstrap(env) @@ -26,39 +27,54 @@ export const createTransferActivities = (env: Record if (amount <= 0n) { throw ApplicationFailure.nonRetryable('User Operation has amount <= 0') } - if (!userOp.signature) { throw ApplicationFailure.nonRetryable('UserOp signature is required') } + + await simulateUserOperation(userOp).catch((error) => { + throw ApplicationFailure.nonRetryable('Error simulating user operation', error.code, error) + }) + + let fromBytea: PgBytea + let toBytea: PgBytea + let tokenBytea: PgBytea | null + try { - await simulateUserOperation(userOp) + fromBytea = hexToBytea(from) + toBytea = hexToBytea(to) + tokenBytea = token === 'eth' ? null : hexToBytea(token) } catch (error) { - throw ApplicationFailure.nonRetryable('Error simulating user operation', error.code, error) + throw ApplicationFailure.nonRetryable('Invalid hex address format') } - // Convert hex addresses to bytea for database - const fromBytea = hexToBytea(from) - const toBytea = hexToBytea(to) - const { error } = - token === 'eth' - ? await insertTemporalEthSendAccountTransfer({ - workflow_id: workflowId, - status: 'initialized', - sender: fromBytea, - value: amount, - log_addr: toBytea, - }) - : await insertTemporalTokenSendAccountTransfer({ - workflow_id: workflowId, - status: 'initialized', - f: fromBytea, - t: toBytea, - v: amount, - log_addr: hexToBytea(token), - }) + return { from: fromBytea, to: toBytea, amount, token: tokenBytea } + }, + async insertTemporalSendAccountTransfer( + workflowId: string, + from: PgBytea, + to: PgBytea, + amount: bigint, + token: PgBytea | null + ) { + const { error } = token + ? await insertTemporalTokenSendAccountTransfer({ + workflow_id: workflowId, + status: 'initialized', + f: from, + t: to, + v: amount, + log_addr: token, + }) + : await insertTemporalEthSendAccountTransfer({ + workflow_id: workflowId, + status: 'initialized', + sender: from, + value: amount, + log_addr: to, + }) if (error) { - throw ApplicationFailure.nonRetryable( + throw ApplicationFailure.retryable( 'Error inserting transfer into temporal.send_account_transfers', error.code, { @@ -67,8 +83,6 @@ export const createTransferActivities = (env: Record } ) } - - return { from, to, amount, token } }, async sendUserOpActivity(userOp: UserOperation<'v0.7'>) { try { @@ -77,72 +91,62 @@ export const createTransferActivities = (env: Record return hash } catch (error) { log.error('Error sending user operation', { error }) - throw ApplicationFailure.nonRetryable('Error sending user operation', error.code) + throw error } }, - async updateTemporalTransferSentStatusActivity(workflowId: string, hash: `0x${string}`) { - const { error } = await updateTemporalSendAccountTransfer({ - workflow_id: workflowId, - status: 'sent', - data: { user_op_hash: hash }, - }) - if (error) { - throw ApplicationFailure.retryable( - 'Error updating entry in temporal_send_account_transfers with sent status', - error.code, - { - error, - workflowId, - } - ) - } - return null - }, - async waitForTransactionReceiptActivity(workflowId: string, hash: `0x${string}`) { + async waitForTransactionReceiptActivity(hash: `0x${string}`) { try { - const res = await waitForTransactionReceipt(hash) - if (!res) { + const bundlerReceipt = await waitForTransactionReceipt(hash) + if (!bundlerReceipt) { throw ApplicationFailure.retryable('No receipt returned from waitForTransactionReceipt') } - if (!res.success) { - throw ApplicationFailure.nonRetryable('Tx failed', res.sender, res.userOpHash) - } - log.info('waitForTransactionReceiptActivity', { tx_hash: res.receipt.transactionHash }) - const { receipt } = res - - await updateTemporalSendAccountTransfer({ - workflow_id: workflowId, - status: 'confirmed', - data: { - tx_hash: receipt.transactionHash, - block_num: receipt.blockNumber.toString(), - }, + log.info('waitForTransactionReceiptActivity', { + bundlerReceipt: superjson.stringify(bundlerReceipt), }) - return receipt + if (!bundlerReceipt.success) { + throw new Error('Transaction failed') + } + return bundlerReceipt.receipt } catch (error) { - throw ApplicationFailure.retryable('Error waiting for tx receipt', error.code, error) + log.error('Error waiting for transaction receipt', { error }) + throw error } }, - async isTransferIndexedActivity( - workflowId: string, - tx_hash: `0x${string}`, - token: keyof allCoinsDict - ) { - const isIndexed = - token === 'eth' - ? await isEthTransferIndexed(tx_hash) - : await isTokenTransferIndexed(tx_hash) + + async isTransferIndexedActivity(tx_hash: `0x${string}`, token: PgBytea | null) { + const isIndexed = token + ? await isTokenTransferIndexed(tx_hash) + : await isEthTransferIndexed(tx_hash) if (!isIndexed) { throw ApplicationFailure.retryable('Transfer not indexed in db') } + log.info('isTransferIndexedActivity', { isIndexed }) + return isIndexed + }, + async updateTemporalTransferActivity({ + workflowId, + status, + data, + failureError, + }: { + workflowId: string + status: Database['temporal']['Enums']['transfer_status'] + data?: Json + failureError?: { + message?: string | null + type?: string | null + details: unknown[] + } + }) { const { error } = await updateTemporalSendAccountTransfer({ workflow_id: workflowId, - status: 'indexed', + status, + data, }) if (error) { throw ApplicationFailure.retryable( - 'Error updating entry in temporal_send_account_transfers with indexed status', + `Error updating entry in temporal_send_account_transfers with ${status} status`, error.code, { error, @@ -150,14 +154,19 @@ export const createTransferActivities = (env: Record } ) } - log.info('isTransferIndexedActivity', { isIndexed }) - return isIndexed + if (status === 'failed') { + throw ApplicationFailure.nonRetryable( + failureError?.message ?? null, + failureError?.type ?? null, + ...(failureError?.details ?? []) + ) + } }, async deleteTemporalTransferActivity(workflowId: string) { const { error } = await deleteTemporalTransferFromActivityTable(workflowId) if (error) { throw ApplicationFailure.retryable( - 'Error deleting temporal_tranfer entry in activity', + 'Error deleting temporal_transfer entry in activity table', error.code, { error, diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 435c166a7..bd6db0edd 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -1,4 +1,4 @@ -import type { Database } from '@my/supabase/database-generated.types' +import type { PgBytea, Database } from '@my/supabase/database.types' import { log } from '@temporalio/activity' import { hexToBytea } from 'app/utils/hexToBytea' import { supabaseAdmin } from 'app/utils/supabase/admin' @@ -13,10 +13,10 @@ export async function insertTemporalTokenSendAccountTransfer({ }: { workflow_id: string status: Database['temporal']['Enums']['transfer_status'] - f: `\\x${string}` - t: `\\x${string}` + f: PgBytea + t: PgBytea v: bigint - log_addr: `\\x${string}` + log_addr: PgBytea }) { return await supabaseAdmin.schema('temporal').rpc('insert_temporal_token_send_account_transfer', { workflow_id, @@ -37,8 +37,8 @@ export async function insertTemporalEthSendAccountTransfer({ }: { workflow_id: string status: Database['temporal']['Enums']['transfer_status'] - sender: `\\x${string}` - log_addr: `\\x${string}` + sender: PgBytea + log_addr: PgBytea value: bigint }) { return await supabaseAdmin.schema('temporal').rpc('insert_temporal_eth_send_account_transfer', { diff --git a/packages/workflows/src/transfer-workflow/wagmi.ts b/packages/workflows/src/transfer-workflow/wagmi.ts index e508a1e5a..1ba62a75e 100644 --- a/packages/workflows/src/transfer-workflow/wagmi.ts +++ b/packages/workflows/src/transfer-workflow/wagmi.ts @@ -1,6 +1,5 @@ -import type { UserOperation } from 'permissionless' +import type { UserOperation, GetUserOperationReceiptReturnType } from 'permissionless' import { baseMainnetBundlerClient, baseMainnetClient, entryPointAddress } from '@my/wagmi' -import type { Hex } from 'viem' export async function simulateUserOperation(userOp: UserOperation<'v0.7'>) { return await baseMainnetClient.call({ @@ -16,6 +15,8 @@ export async function sendUserOperation(userOp: UserOperation<'v0.7'>) { }) } -export async function waitForTransactionReceipt(hash: `0x${string}`) { +export async function waitForTransactionReceipt( + hash: `0x${string}` +): Promise { return await baseMainnetBundlerClient.waitForUserOperationReceipt({ hash }) } diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index 8dc2e037d..c50eea681 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -1,6 +1,7 @@ -import { proxyActivities, workflowInfo } from '@temporalio/workflow' +import { CancelledFailure, proxyActivities, workflowInfo } from '@temporalio/workflow' import type { createTransferActivities } from './activities' import type { UserOperation } from 'permissionless' +import superjson from 'superjson' import debug from 'debug' @@ -8,28 +9,94 @@ const log = debug('workflows:transfer') const { initializeTransferActivity, + insertTemporalSendAccountTransfer, sendUserOpActivity, - updateTemporalTransferSentStatusActivity, + updateTemporalTransferActivity, waitForTransactionReceiptActivity, isTransferIndexedActivity, deleteTemporalTransferActivity, } = proxyActivities>({ // TODO: make this configurable - startToCloseTimeout: '45 seconds', + startToCloseTimeout: '10 minutes', }) export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { const workflowId = workflowInfo().workflowId - log('SendTransferWorkflow started with userOp:', workflowId) - const { token } = await initializeTransferActivity(workflowId, userOp) - log('Sending UserOperation') - const hash = await sendUserOpActivity(userOp) - log('UserOperation sent, hash:', hash) - await updateTemporalTransferSentStatusActivity(workflowId, hash) - const receipt = await waitForTransactionReceiptActivity(workflowId, hash) - log('Receipt received:', { tx_hash: receipt.transactionHash }) - const transfer = await isTransferIndexedActivity(workflowId, receipt.transactionHash, token) - log('Transfer indexed') - await deleteTemporalTransferActivity(workflowId) - return transfer + try { + log('SendTransferWorkflow Initializing with userOp:', workflowId) + const { token, from, to, amount } = await initializeTransferActivity(workflowId, userOp) + + log('Inserting temporal transfer into temporal.send_account_transfers', workflowId) + await insertTemporalSendAccountTransfer(workflowId, from, to, amount, token) + + log('Sending UserOperation', superjson.stringify(userOp)) + const hash = await sendUserOpActivity(userOp).catch(async (error) => { + log('sendUserOpActivity failed', { error }) + await updateTemporalTransferActivity({ + workflowId, + status: 'failed', + failureError: { + message: error.message, + type: error.code, + details: error.details, + }, + }) + throw null + }) + + log('UserOperation sent, hash:', hash) + await updateTemporalTransferActivity({ + workflowId, + status: 'sent', + data: { user_op_hash: hash }, + }) + + const receipt = await waitForTransactionReceiptActivity(hash).catch(async (error) => { + log('waitForTransactionReceiptActivity failed', { error }) + await updateTemporalTransferActivity({ + workflowId, + status: 'failed', + failureError: { + message: error.message, + type: error.code, + details: error.details, + }, + }) + throw null + }) + log('Receipt received:', { tx_hash: receipt.transactionHash }) + + await updateTemporalTransferActivity({ + workflowId, + status: 'confirmed', + data: { + tx_hash: receipt.transactionHash, + block_num: receipt.blockNumber.toString(), + }, + }) + + await isTransferIndexedActivity(receipt.transactionHash, token) + await updateTemporalTransferActivity({ + workflowId, + status: 'indexed', + }) + + log('Transfer indexed') + await deleteTemporalTransferActivity(workflowId) + return workflowId + } catch (error) { + // Handle workflow cancellation + if (error instanceof CancelledFailure) { + await updateTemporalTransferActivity({ + workflowId, + status: 'cancelled', + failureError: { + message: 'Workflow Manually Terminated', + type: 'CANCELLED', + details: [], + }, + }) + } + throw error + } } diff --git a/supabase/database-generated.types.ts b/supabase/database-generated.types.ts index a57f1dcab..7fa5b89c2 100644 --- a/supabase/database-generated.types.ts +++ b/supabase/database-generated.types.ts @@ -1542,6 +1542,7 @@ export type Database = { | "confirmed" | "indexed" | "failed" + | "cancelled" } CompositeTypes: { [_ in never]: never diff --git a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql index 6f0295f66..798356779 100644 --- a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql @@ -16,7 +16,8 @@ CREATE TYPE temporal.transfer_status AS ENUM( 'sent', 'confirmed', 'indexed', - 'failed' + 'failed', + 'cancelled' ); CREATE TABLE temporal.send_account_transfers( @@ -57,12 +58,21 @@ LANGUAGE plpgsql SECURITY DEFINER AS $$ DECLARE - f_user_id uuid; + _user_id uuid; + _data jsonb; BEGIN - SELECT user_id INTO f_user_id + SELECT user_id INTO _user_id FROM send_accounts WHERE address = concat('0x', encode(f, 'hex'))::citext; + -- cast v to text to avoid losing precision when converting to json when sending to clients + _data := json_build_object( + 'f', f, + 't', t, + 'v', v::text, + 'log_addr', log_addr + ); + INSERT INTO temporal.send_account_transfers( workflow_id, user_id, @@ -71,14 +81,9 @@ BEGIN ) VALUES ( workflow_id, - f_user_id, + _user_id, status, - json_build_object( - 'f', f, - 't', t, - 'v', v::text, - 'log_addr', log_addr - ) + _data ); END; $$; @@ -95,12 +100,20 @@ LANGUAGE plpgsql SECURITY DEFINER AS $$ DECLARE - sender_user_id uuid; + _user_id uuid; + _data jsonb; BEGIN - SELECT user_id INTO sender_user_id + SELECT user_id INTO _user_id FROM send_accounts WHERE address = concat('0x', encode(sender, 'hex'))::citext; + -- cast v to text to avoid losing precision when converting to json when sending to clients + _data := json_build_object( + 'log_addr', log_addr, + 'sender', sender, + 'value', value::text + ); + INSERT INTO temporal.send_account_transfers( workflow_id, user_id, @@ -109,13 +122,9 @@ BEGIN ) VALUES ( workflow_id, - sender_user_id, + _user_id, status, - json_build_object( - 'log_addr', log_addr, - 'sender', sender, - 'value', value::text - ) + _data ); END; $$; @@ -166,6 +175,7 @@ CREATE OR REPLACE FUNCTION temporal.temporal_token_send_account_transfers_trigge DECLARE _f_user_id uuid; _t_user_id uuid; + _data jsonb; BEGIN SELECT user_id INTO _f_user_id FROM send_accounts @@ -175,6 +185,19 @@ BEGIN FROM send_accounts WHERE address = concat('0x', encode((NEW.data->>'t')::bytea, 'hex'))::citext; + -- cast v to text to avoid losing precision when converting to json when sending to clients + _data := json_build_object( + 'status', NEW.status, + 'user_op_hash', (NEW.data->>'user_op_hash'), + 'log_addr', (NEW.data->>'log_addr'), + 'f', (NEW.data->>'f'), + 't', (NEW.data->>'t'), + 'v', NEW.data->>'v'::text, + 'tx_hash', (NEW.data->>'tx_hash'), + 'block_num', NEW.data->>'block_num'::text, + 'tx_idx', NEW.data->>'tx_idx'::text + ); + INSERT INTO activity( event_name, event_id, @@ -188,17 +211,7 @@ BEGIN NEW.workflow_id, _f_user_id, _t_user_id, - json_build_object( - 'status', NEW.status, - 'user_op_hash', (NEW.data->>'user_op_hash'), - 'log_addr', (NEW.data->>'log_addr'), - 'f', (NEW.data->>'f'), - 't', (NEW.data->>'t'), - 'v', NEW.data->>'v'::text, - 'tx_hash', (NEW.data->>'tx_hash'), - 'block_num', NEW.data->>'block_num'::text, - 'tx_idx', NEW.data->>'tx_idx'::text - ), + _data, NEW.created_at ); RETURN NEW; @@ -214,6 +227,7 @@ CREATE OR REPLACE FUNCTION temporal.temporal_eth_send_account_transfers_trigger_ DECLARE _from_user_id uuid; _to_user_id uuid; + _data jsonb; BEGIN SELECT user_id INTO _from_user_id FROM send_accounts @@ -223,6 +237,18 @@ BEGIN FROM send_accounts WHERE address = concat('0x', encode((NEW.data->>'log_addr')::bytea, 'hex'))::citext; + -- cast v to text to avoid losing precision when converting to json when sending to clients + _data := json_build_object( + 'status', NEW.status, + 'user_op_hash', (NEW.data->>'user_op_hash'), + 'log_addr', (NEW.data->>'log_addr'), + 'sender', (NEW.data->>'sender'), + 'value', NEW.data->>'value'::text, + 'tx_hash', (NEW.data->>'tx_hash'), + 'block_num', NEW.data->>'block_num'::text, + 'tx_idx', NEW.data->>'tx_idx'::text + ); + INSERT INTO activity( event_name, event_id, @@ -236,16 +262,7 @@ BEGIN NEW.workflow_id, _from_user_id, _to_user_id, - json_build_object( - 'status', NEW.status, - 'user_op_hash', (NEW.data->>'user_op_hash'), - 'log_addr', (NEW.data->>'log_addr'), - 'sender', (NEW.data->>'sender'), - 'value', NEW.data->>'value'::text, - 'tx_hash', (NEW.data->>'tx_hash'), - 'block_num', NEW.data->>'block_num'::text, - 'tx_idx', NEW.data->>'tx_idx'::text - ), + _data, NEW.created_at ); RETURN NEW; From 2ca084546ac4afd1599cf5e67efb0d2eb2491803 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Feb 2025 20:24:20 -0800 Subject: [PATCH 23/58] Cast status to string and better error handling --- packages/snaplet/.snaplet/dataModel.json | 3 + .../src/transfer-workflow/activities.ts | 117 ++++++++++-------- .../src/transfer-workflow/supabase.ts | 7 +- .../src/transfer-workflow/workflow.ts | 114 ++++++++--------- ...035940_create_temporal_transfers_table.sql | 10 +- 5 files changed, 130 insertions(+), 121 deletions(-) diff --git a/packages/snaplet/.snaplet/dataModel.json b/packages/snaplet/.snaplet/dataModel.json index 96292af88..f38c2060e 100644 --- a/packages/snaplet/.snaplet/dataModel.json +++ b/packages/snaplet/.snaplet/dataModel.json @@ -9733,6 +9733,9 @@ "transfer_status": { "schemaName": "temporal", "values": [ + { + "name": "cancelled" + }, { "name": "confirmed" }, diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index 2eb8516a9..c1ddd0d54 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -15,11 +15,48 @@ import { hexToBytea } from 'app/utils/hexToBytea' import type { Json, Database, PgBytea } from '@my/supabase/database.types' import superjson from 'superjson' -export const createTransferActivities = (env: Record) => { +type TransferActivities = { + initializeTransferActivity: (userOp: UserOperation<'v0.7'>) => Promise<{ + from: PgBytea + to: PgBytea + amount: bigint + token: PgBytea | null + }> + insertTemporalSendAccountTransfer: ( + workflowId: string, + from: PgBytea, + to: PgBytea, + amount: bigint, + token: PgBytea | null + ) => Promise + sendUserOpActivity: ( + userOp: UserOperation<'v0.7'> + ) => Promise<{ hash: `0x${string}`; hashBytea: PgBytea }> + waitForTransactionReceiptActivity: (hash: `0x${string}`) => Promise<{ + transactionHash: `0x${string}` + blockNumber: bigint + }> + isTransferIndexedActivity: (tx_hash: `0x${string}`, token: PgBytea | null) => Promise + updateTemporalTransferActivity: (params: { + workflowId: string + status: Database['temporal']['Enums']['transfer_status'] + data?: Json + failureError?: { + message?: string | null + type?: string | null + details?: unknown[] + } + }) => Promise + deleteTemporalTransferActivity: (workflowId: string) => Promise<{ event_id: string }> +} + +export const createTransferActivities = ( + env: Record +): TransferActivities => { bootstrap(env) return { - async initializeTransferActivity(workflowId: string, userOp: UserOperation<'v0.7'>) { + async initializeTransferActivity(userOp) { const { from, to, token, amount } = decodeTransferUserOp({ userOp }) if (!from || !to || !amount || !token) { throw ApplicationFailure.nonRetryable('User Operation is not a valid transfer') @@ -49,13 +86,7 @@ export const createTransferActivities = (env: Record return { from: fromBytea, to: toBytea, amount, token: tokenBytea } }, - async insertTemporalSendAccountTransfer( - workflowId: string, - from: PgBytea, - to: PgBytea, - amount: bigint, - token: PgBytea | null - ) { + async insertTemporalSendAccountTransfer(workflowId, from, to, amount, token) { const { error } = token ? await insertTemporalTokenSendAccountTransfer({ workflow_id: workflowId, @@ -74,6 +105,12 @@ export const createTransferActivities = (env: Record }) if (error) { + if (error.code === '23505') { + throw ApplicationFailure.nonRetryable( + 'Duplicate entry for temporal.send_account_transfers', + error.code + ) + } throw ApplicationFailure.retryable( 'Error inserting transfer into temporal.send_account_transfers', error.code, @@ -85,35 +122,24 @@ export const createTransferActivities = (env: Record } }, async sendUserOpActivity(userOp: UserOperation<'v0.7'>) { - try { - const hash = await sendUserOperation(userOp) - log.info('UserOperation sent successfully', { hash }) - return hash - } catch (error) { - log.error('Error sending user operation', { error }) - throw error - } + const hash = await sendUserOperation(userOp) + const hashBytea = hexToBytea(hash) + return { hash, hashBytea } }, - async waitForTransactionReceiptActivity(hash: `0x${string}`) { - try { - const bundlerReceipt = await waitForTransactionReceipt(hash) - if (!bundlerReceipt) { - throw ApplicationFailure.retryable('No receipt returned from waitForTransactionReceipt') - } - log.info('waitForTransactionReceiptActivity', { - bundlerReceipt: superjson.stringify(bundlerReceipt), - }) - if (!bundlerReceipt.success) { - throw new Error('Transaction failed') - } - return bundlerReceipt.receipt - } catch (error) { - log.error('Error waiting for transaction receipt', { error }) - throw error + async waitForTransactionReceiptActivity(hash) { + const bundlerReceipt = await waitForTransactionReceipt(hash) + if (!bundlerReceipt) { + throw ApplicationFailure.retryable('No receipt returned from waitForTransactionReceipt') } + log.info('waitForTransactionReceiptActivity', { + bundlerReceipt: superjson.stringify(bundlerReceipt), + }) + if (!bundlerReceipt.success) { + throw new Error('Transaction failed') + } + return bundlerReceipt.receipt }, - - async isTransferIndexedActivity(tx_hash: `0x${string}`, token: PgBytea | null) { + async isTransferIndexedActivity(tx_hash, token) { const isIndexed = token ? await isTokenTransferIndexed(tx_hash) : await isEthTransferIndexed(tx_hash) @@ -124,21 +150,7 @@ export const createTransferActivities = (env: Record log.info('isTransferIndexedActivity', { isIndexed }) return isIndexed }, - async updateTemporalTransferActivity({ - workflowId, - status, - data, - failureError, - }: { - workflowId: string - status: Database['temporal']['Enums']['transfer_status'] - data?: Json - failureError?: { - message?: string | null - type?: string | null - details: unknown[] - } - }) { + async updateTemporalTransferActivity({ workflowId, status, data, failureError }) { const { error } = await updateTemporalSendAccountTransfer({ workflow_id: workflowId, status, @@ -162,8 +174,8 @@ export const createTransferActivities = (env: Record ) } }, - async deleteTemporalTransferActivity(workflowId: string) { - const { error } = await deleteTemporalTransferFromActivityTable(workflowId) + async deleteTemporalTransferActivity(workflowId) { + const { data, error } = await deleteTemporalTransferFromActivityTable(workflowId) if (error) { throw ApplicationFailure.retryable( 'Error deleting temporal_transfer entry in activity table', @@ -174,6 +186,7 @@ export const createTransferActivities = (env: Record } ) } + return data }, } } diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index bd6db0edd..e37e9706d 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -67,7 +67,12 @@ export async function updateTemporalSendAccountTransfer({ } export async function deleteTemporalTransferFromActivityTable(workflow_id: string) { - return await supabaseAdmin.from('activity').delete().eq('event_id', workflow_id) + return await supabaseAdmin + .from('activity') + .delete() + .eq('event_id', workflow_id) + .select('event_id') + .single() } export async function isTokenTransferIndexed(tx_hash: `0x${string}`) { diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index c50eea681..a69cf4820 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -1,4 +1,4 @@ -import { CancelledFailure, proxyActivities, workflowInfo } from '@temporalio/workflow' +import { ApplicationFailure, proxyActivities, workflowInfo } from '@temporalio/workflow' import type { createTransferActivities } from './activities' import type { UserOperation } from 'permissionless' import superjson from 'superjson' @@ -22,81 +22,65 @@ const { export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { const workflowId = workflowInfo().workflowId - try { - log('SendTransferWorkflow Initializing with userOp:', workflowId) - const { token, from, to, amount } = await initializeTransferActivity(workflowId, userOp) + log('SendTransferWorkflow Initializing with userOp:', workflowId) + const { token, from, to, amount } = await initializeTransferActivity(userOp) - log('Inserting temporal transfer into temporal.send_account_transfers', workflowId) - await insertTemporalSendAccountTransfer(workflowId, from, to, amount, token) + log('Inserting temporal transfer into temporal.send_account_transfers', workflowId) + await insertTemporalSendAccountTransfer(workflowId, from, to, amount, token) - log('Sending UserOperation', superjson.stringify(userOp)) - const hash = await sendUserOpActivity(userOp).catch(async (error) => { - log('sendUserOpActivity failed', { error }) - await updateTemporalTransferActivity({ - workflowId, - status: 'failed', - failureError: { - message: error.message, - type: error.code, - details: error.details, - }, - }) - throw null - }) - - log('UserOperation sent, hash:', hash) + log('Sending UserOperation', superjson.stringify(userOp)) + const { hash, hashBytea } = await sendUserOpActivity(userOp).catch(async (error) => { + log('sendUserOpActivity failed', { error }) + // Ensure cleanup happens before throwing + await deleteTemporalTransferActivity(workflowId) await updateTemporalTransferActivity({ workflowId, - status: 'sent', - data: { user_op_hash: hash }, + status: 'failed', + failureError: { + message: error.message, + type: error.code, + }, }) + throw ApplicationFailure.nonRetryable('Error sending user operation', error.code, error) + }) - const receipt = await waitForTransactionReceiptActivity(hash).catch(async (error) => { - log('waitForTransactionReceiptActivity failed', { error }) - await updateTemporalTransferActivity({ - workflowId, - status: 'failed', - failureError: { - message: error.message, - type: error.code, - details: error.details, - }, - }) - throw null - }) - log('Receipt received:', { tx_hash: receipt.transactionHash }) + log('UserOperation sent, hash:', hash) + await updateTemporalTransferActivity({ + workflowId, + status: 'sent', + data: { user_op_hash: hashBytea }, + }) + const receipt = await waitForTransactionReceiptActivity(hash).catch(async (error) => { + log('waitForTransactionReceiptActivity failed', { error }) await updateTemporalTransferActivity({ workflowId, - status: 'confirmed', - data: { - tx_hash: receipt.transactionHash, - block_num: receipt.blockNumber.toString(), + status: 'failed', + failureError: { + message: error.message, + type: error.code, }, }) + throw error + }) + log('Receipt received:', { tx_hash: receipt.transactionHash }) - await isTransferIndexedActivity(receipt.transactionHash, token) - await updateTemporalTransferActivity({ - workflowId, - status: 'indexed', - }) + await updateTemporalTransferActivity({ + workflowId, + status: 'confirmed', + data: { + tx_hash: receipt.transactionHash, + block_num: receipt.blockNumber.toString(), + }, + }) - log('Transfer indexed') - await deleteTemporalTransferActivity(workflowId) - return workflowId - } catch (error) { - // Handle workflow cancellation - if (error instanceof CancelledFailure) { - await updateTemporalTransferActivity({ - workflowId, - status: 'cancelled', - failureError: { - message: 'Workflow Manually Terminated', - type: 'CANCELLED', - details: [], - }, - }) - } - throw error - } + await isTransferIndexedActivity(receipt.transactionHash, token) + await updateTemporalTransferActivity({ + workflowId, + status: 'indexed', + }) + + log('Transfer indexed') + await deleteTemporalTransferActivity(workflowId) + return workflowId } diff --git a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql index 798356779..7498c53ae 100644 --- a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql @@ -187,7 +187,7 @@ BEGIN -- cast v to text to avoid losing precision when converting to json when sending to clients _data := json_build_object( - 'status', NEW.status, + 'status', NEW.status::text, 'user_op_hash', (NEW.data->>'user_op_hash'), 'log_addr', (NEW.data->>'log_addr'), 'f', (NEW.data->>'f'), @@ -239,7 +239,7 @@ BEGIN -- cast v to text to avoid losing precision when converting to json when sending to clients _data := json_build_object( - 'status', NEW.status, + 'status', NEW.status::text, 'user_op_hash', (NEW.data->>'user_op_hash'), 'log_addr', (NEW.data->>'log_addr'), 'sender', (NEW.data->>'sender'), @@ -287,9 +287,13 @@ CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_upda LANGUAGE plpgsql SECURITY DEFINER AS $$ +DECLARE + _data jsonb; BEGIN + _data := NEW.data || json_build_object('status', NEW.status::text)::jsonb; + UPDATE activity - SET data = NEW.data + SET data = _data WHERE event_name = 'temporal_send_account_transfers' AND event_id = NEW.workflow_id; RETURN NEW; From 1663d282fe44a9e057bce64e520861f133ac50ec Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Feb 2025 20:24:58 -0800 Subject: [PATCH 24/58] Show updates in activity feed --- .../activity/utils/useActivityFeed.ts | 1 - packages/app/features/home/TokenActivity.tsx | 18 ++++- packages/app/features/send/confirm/screen.tsx | 29 ++++---- packages/app/utils/activity.ts | 69 ++++++++----------- .../activity/TemporalTransfersEventSchema.ts | 36 ++++++---- 5 files changed, 85 insertions(+), 68 deletions(-) diff --git a/packages/app/features/activity/utils/useActivityFeed.ts b/packages/app/features/activity/utils/useActivityFeed.ts index b03f8401f..07a0c9c30 100644 --- a/packages/app/features/activity/utils/useActivityFeed.ts +++ b/packages/app/features/activity/utils/useActivityFeed.ts @@ -53,7 +53,6 @@ export function useActivityFeed({ .order('created_at', { ascending: false }) .range(from, to) const { data, error } = await request - console.log('data: ', data) throwIf(error) return EventArraySchema.parse(data) } diff --git a/packages/app/features/home/TokenActivity.tsx b/packages/app/features/home/TokenActivity.tsx index 2918fdc24..b3dab64b1 100644 --- a/packages/app/features/home/TokenActivity.tsx +++ b/packages/app/features/home/TokenActivity.tsx @@ -4,7 +4,7 @@ import { hexToBytea } from 'app/utils/hexToBytea' import { useEffect, useState } from 'react' import { useTokenActivityFeed } from './utils/useTokenActivityFeed' import { TokenActivityRow } from './TokenActivityRow' -import type { Activity } from 'app/utils/zod/activity' +import { Events, type Activity } from 'app/utils/zod/activity' import { ActivityDetails } from '../activity/ActivityDetails' import type { InfiniteData, UseInfiniteQueryResult } from '@tanstack/react-query' import type { ZodError } from 'zod' @@ -16,6 +16,7 @@ import { useScrollDirection } from 'app/provider/scroll' export const TokenActivity = ({ coin }: { coin: CoinWithBalance }) => { const [selectedActivity, setSelectedActivity] = useState(null) + const [refetchInterval, setRefetchInterval] = useState(30_000) const handleActivityPress = (activity: Activity) => { setSelectedActivity(activity) @@ -26,11 +27,26 @@ export const TokenActivity = ({ coin }: { coin: CoinWithBalance }) => { const tokenActivityFeedQuery = useTokenActivityFeed({ pageSize: 10, address: coin.token === 'eth' ? undefined : hexToBytea(coin.token), + refetchInterval, }) const { data, isLoading, error } = tokenActivityFeedQuery + const { pages } = data ?? {} + useEffect(() => { + if (!pages || !pages[0]) return + + pages[0].find( + (a) => + a.event_name === Events.TemporalSendAccountTransfers && + !['cancelled', 'failed'].includes(a.data.status) + ) + ? setRefetchInterval(1000) + : setRefetchInterval(30_000) + }, [pages]) + console.log('pages: ', pages) + if (isLoading) return return ( <> diff --git a/packages/app/features/send/confirm/screen.tsx b/packages/app/features/send/confirm/screen.tsx index 928c05af8..2fe40dd05 100644 --- a/packages/app/features/send/confirm/screen.tsx +++ b/packages/app/features/send/confirm/screen.tsx @@ -70,15 +70,19 @@ export function SendConfirmScreen() { export function SendConfirm() { const submitButtonRef = useRef(null) + const queryClient = useQueryClient() const router = useRouter() const [queryParams] = useSendScreenParams() const { sendToken, recipient, idType, amount } = queryParams const { data: sendAccount, isLoading: isSendAccountLoading } = useSendAccount() const { coin: selectedCoin, tokensQuery, ethQuery } = useCoinFromSendTokenParam() - const { mutateAsync: transfer } = api.temporal.transfer.useMutation() + const { + mutateAsync: transfer, + isPending: isTransferPending, + isSuccess: isTransferInitialized, + } = api.temporal.transfer.useMutation() - const queryClient = useQueryClient() const isUSDCSelected = selectedCoin?.label === 'USDC' const { coin: usdc } = useCoin('USDC') const { data: prices, isLoading: isPricesLoading } = useTokenPrices() @@ -197,21 +201,16 @@ export function SendConfirm() { }) userOp.signature = signature - await transfer({ userOp }) - - // Invalidate token activity feed before navigation - await queryClient.invalidateQueries({ - queryKey: ['token_activity_feed', selectedCoin?.token], - }) + const workflowId = await transfer({ userOp }) if (selectedCoin?.token === 'eth') { await ethQuery.refetch() } else { await tokensQuery.refetch() } - - // Navigate after invalidating - router.replace({ pathname: '/', query: { token: sendToken } }) + if (workflowId) { + router.replace({ pathname: '/', query: { token: sendToken } }) + } } catch (e) { console.error(e) setError(e) @@ -225,7 +224,13 @@ export function SendConfirm() { } }, []) - if (nonceIsLoading || isProfileLoading || isSendAccountLoading) + if ( + nonceIsLoading || + isProfileLoading || + isSendAccountLoading || + isTransferPending || + isTransferInitialized + ) return return ( diff --git a/packages/app/utils/activity.ts b/packages/app/utils/activity.ts index bea4d4db5..3cf8c79a0 100644 --- a/packages/app/utils/activity.ts +++ b/packages/app/utils/activity.ts @@ -18,9 +18,8 @@ import { isSendAccountReceiveEvent } from './zod/activity/SendAccountReceiveEven import { isSendTokenUpgradeEvent } from './zod/activity/SendAccountTransfersEventSchema' import { sendCoin, sendV0Coin } from 'app/data/coins' import { - isTemporalEthTransfers, - isTemporalTokenTransfers, - isTemporalTransfersEvent, + isTemporalEthTransfersEvent, + isTemporalTokenTransfersEvent, temporalEventNameFromStatus, } from './zod/activity/TemporalTransfersEventSchema' @@ -67,7 +66,7 @@ export function counterpart(activity: Activity): Activity['from_user'] | Activit return from_user } } - if (isTemporalTransfersEvent(activity)) { + if (isTemporalEthTransfersEvent(activity) || isTemporalTokenTransfersEvent(activity)) { return to_user } return null // not a send or receive event @@ -77,34 +76,23 @@ export function counterpart(activity: Activity): Activity['from_user'] | Activit * Returns the amount of the activity if there is one. */ export function amountFromActivity(activity: Activity): string { - console.log('activity: ', activity) switch (true) { - case isTemporalTransfersEvent(activity): { - const { data } = activity - - if (isTemporalTokenTransfers(activity)) { - if (data.coin) { - const amount = formatAmount( - formatUnits(data.v, data.coin.decimals), - 5, - data.coin.formatDecimals - ) - return `${amount} ${data.coin.symbol}` - } - return formatAmount(`${data.v}`, 5, 0) + case isTemporalTokenTransfersEvent(activity): { + console.log('activity: ', activity) + const { v, coin } = activity.data + if (coin) { + const amount = formatAmount(formatUnits(v, coin.decimals), 5, coin.formatDecimals) + return `${amount} ${coin.symbol}` } - if (isTemporalEthTransfers(activity)) { - if (data.coin) { - const amount = formatAmount( - formatUnits(data.value, data.coin.decimals), - 5, - data.coin.formatDecimals - ) - return `${amount} ${data.coin.symbol}` - } - return formatAmount(`${data.value}`, 5, 0) + return formatAmount(`${v}`, 5, 0) + } + case isTemporalEthTransfersEvent(activity): { + const { value, coin } = activity.data + if (coin) { + const amount = formatAmount(formatUnits(value, coin.decimals), 5, coin.formatDecimals) + return `${amount} ${coin.symbol}` } - return '' + return formatAmount(`${value}`, 5, 0) } case isSendAccountTransfersEvent(activity): { const { v, coin } = activity.data @@ -185,8 +173,10 @@ export function eventNameFromActivity(activity: Activity) { const isERC20Transfer = isSendAccountTransfersEvent(activity) const isETHReceive = isSendAccountReceiveEvent(activity) const isTransferOrReceive = isERC20Transfer || isETHReceive + const isTemporalTransfer = + isTemporalEthTransfersEvent(activity) || isTemporalTokenTransfersEvent(activity) switch (true) { - case isTemporalTransfersEvent(activity): + case isTemporalTransfer: return temporalEventNameFromStatus(data.status) case isERC20Transfer && isAddressEqual(data.f, sendtagCheckoutAddress[baseMainnet.id]): return 'Referral Reward' @@ -225,11 +215,12 @@ export function phraseFromActivity(activity: Activity) { const isERC20Transfer = isSendAccountTransfersEvent(activity) const isETHReceive = isSendAccountReceiveEvent(activity) const isTransferOrReceive = isERC20Transfer || isETHReceive - const isTemporalTransfers = isTemporalTransfersEvent(activity) + const isTemporalTransfer = + isTemporalEthTransfersEvent(activity) || isTemporalTokenTransfersEvent(activity) switch (true) { - case isTemporalTransfers: - return 'Pending' + case isTemporalTransfer: + return temporalEventNameFromStatus(data.status) case isERC20Transfer && isAddressEqual(data.f, sendtagCheckoutAddress[baseMainnet.id]): return 'Earned referral reward' case isSendTokenUpgradeEvent(activity): @@ -306,13 +297,11 @@ export function subtextFromActivity(activity: Activity): string | null { if (isETHReceive && to_user?.id) { return labelAddress(data.log_addr) } - if (isTemporalTransfersEvent(activity)) { - if (isTemporalTokenTransfers(activity)) { - return labelAddress(activity.data.t) - } - if (isTemporalEthTransfers(activity)) { - return labelAddress(activity.data.sender) - } + if (isTemporalTokenTransfersEvent(activity)) { + return labelAddress(activity.data.t) + } + if (isTemporalEthTransfersEvent(activity)) { + return labelAddress(activity.data.log_addr) } return null } diff --git a/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts b/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts index 554145cd0..f52e01ab6 100644 --- a/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts +++ b/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts @@ -22,10 +22,10 @@ export const temporalTransferStatus = z.enum([ */ const BaseTemporalTransfersDataSchema = z.object({ status: temporalTransferStatus, - user_op_hash: byteaToHexTxHash.optional(), - tx_hash: byteaToHexTxHash.optional(), - block_num: decimalStrToBigInt.optional(), - tx_idx: decimalStrToBigInt.optional(), + user_op_hash: byteaToHexTxHash.nullable(), + tx_hash: byteaToHexTxHash.nullable(), + block_num: decimalStrToBigInt.nullable(), + tx_idx: decimalStrToBigInt.nullable(), log_addr: byteaToHexEthAddress, }) @@ -60,31 +60,39 @@ export const EthTemporalTransfersDataSchema = BaseTemporalTransfersDataSchema.ex coin: knownCoins.find((c) => c.token === 'eth'), })) +const TemporalTranfersDataSchema = z.union([ + TokenTemporalTransfersDataSchema, + EthTemporalTransfersDataSchema, +]) + export const TemporalTransfersEventSchema = BaseEventSchema.extend({ event_name: z.literal(Events.TemporalSendAccountTransfers), - data: z.union([TokenTemporalTransfersDataSchema, EthTemporalTransfersDataSchema]), + data: TemporalTranfersDataSchema, }) export type TemporalTransfersEvent = z.infer -export const isTemporalTransfersEvent = (event: { +const isTemporalTransfersEvent = (event: { event_name: string }): event is TemporalTransfersEvent => event.event_name === Events.TemporalSendAccountTransfers -export const isTemporalTokenTransfers = (event: { +export const isTemporalTokenTransfersEvent = (event: { data?: unknown event_name: string }): event is TemporalTransfersEvent & { data: z.infer } => { - return isTemporalTransfersEvent(event) && event.data.coin?.token !== 'eth' + return ( + isTemporalTransfersEvent(event) && + Boolean(knownCoins.find((c) => c.token === event.data.log_addr)) + ) } -export const isTemporalEthTransfers = (event: { +export const isTemporalEthTransfersEvent = (event: { data?: unknown event_name: string }): event is TemporalTransfersEvent & { data: z.infer } => { - return isTemporalTransfersEvent(event) && event.data.coin?.token === 'eth' + return isTemporalTransfersEvent(event) && !knownCoins.some((c) => c.token === event.data.log_addr) } export const temporalEventNameFromStatus = ( @@ -92,16 +100,16 @@ export const temporalEventNameFromStatus = ( ) => { switch (status) { case 'initialized': - return 'Sending' + return 'Sending...' case 'sent': - return 'Confirming' + return 'Confirming...' case 'confirmed': - return '' case 'indexed': return 'Sent' case 'failed': + case 'cancelled': return 'Failed' default: - return '' + return 'Initializing...' } } From ba7a26421b123e08e447580946bb97b46bd77d66 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Feb 2025 21:26:52 -0800 Subject: [PATCH 25/58] Delete from temporal table if RPC fails to post nonce --- .../src/transfer-workflow/activities.ts | 63 ++++++++++++++----- .../src/transfer-workflow/supabase.ts | 8 +++ .../src/transfer-workflow/workflow.ts | 29 +-------- supabase/database-generated.types.ts | 6 ++ ...035940_create_temporal_transfers_table.sql | 35 ++++++++++- 5 files changed, 97 insertions(+), 44 deletions(-) diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index c1ddd0d54..9458c720c 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -6,6 +6,7 @@ import { updateTemporalSendAccountTransfer, insertTemporalEthSendAccountTransfer, deleteTemporalTransferFromActivityTable, + deleteTemporalTransfer, } from './supabase' import { simulateUserOperation, sendUserOperation, waitForTransactionReceipt } from './wagmi' import type { UserOperation } from 'permissionless' @@ -30,9 +31,13 @@ type TransferActivities = { token: PgBytea | null ) => Promise sendUserOpActivity: ( + workflowId: string, userOp: UserOperation<'v0.7'> ) => Promise<{ hash: `0x${string}`; hashBytea: PgBytea }> - waitForTransactionReceiptActivity: (hash: `0x${string}`) => Promise<{ + waitForTransactionReceiptActivity: ( + workflowId: string, + hash: `0x${string}` + ) => Promise<{ transactionHash: `0x${string}` blockNumber: bigint }> @@ -121,23 +126,49 @@ export const createTransferActivities = ( ) } }, - async sendUserOpActivity(userOp: UserOperation<'v0.7'>) { - const hash = await sendUserOperation(userOp) - const hashBytea = hexToBytea(hash) - return { hash, hashBytea } - }, - async waitForTransactionReceiptActivity(hash) { - const bundlerReceipt = await waitForTransactionReceipt(hash) - if (!bundlerReceipt) { - throw ApplicationFailure.retryable('No receipt returned from waitForTransactionReceipt') + async sendUserOpActivity(workflowId, userOp) { + try { + const hash = await sendUserOperation(userOp) + const hashBytea = hexToBytea(hash) + return { hash, hashBytea } + } catch (error) { + log.error('sendUserOpActivity failed', { error }) + const { error: deleteError } = await deleteTemporalTransfer(workflowId) + if (deleteError) { + throw ApplicationFailure.retryable( + 'Error deleting transfer from temporal.send_account_transfers', + deleteError.code, + { + deleteError, + workflowId, + } + ) + } + + throw ApplicationFailure.nonRetryable('Error sending user operation', error.code, error) } - log.info('waitForTransactionReceiptActivity', { - bundlerReceipt: superjson.stringify(bundlerReceipt), - }) - if (!bundlerReceipt.success) { - throw new Error('Transaction failed') + }, + async waitForTransactionReceiptActivity(workflowId, hash) { + try { + const bundlerReceipt = await waitForTransactionReceipt(hash) + if (!bundlerReceipt) { + throw ApplicationFailure.retryable('No receipt returned from waitForTransactionReceipt') + } + log.info('waitForTransactionReceiptActivity', { + bundlerReceipt: superjson.stringify(bundlerReceipt), + }) + if (!bundlerReceipt.success) { + throw new Error('Transaction failed') + } + return bundlerReceipt.receipt + } catch (error) { + log.error('waitForTransactionReceiptActivity failed', { error }) + const { error: updateError } = await updateTemporalSendAccountTransfer({ + workflow_id: workflowId, + status: 'failed', + }) + throw ApplicationFailure.nonRetryable(updateError?.message) } - return bundlerReceipt.receipt }, async isTransferIndexedActivity(tx_hash, token) { const isIndexed = token diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index e37e9706d..4def19815 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -66,6 +66,14 @@ export async function updateTemporalSendAccountTransfer({ }) } +export async function deleteTemporalTransfer(workflow_id: string) { + return await supabaseAdmin + .schema('temporal') + .rpc('delete_temporal_transfer', { workflow_id }) + .select('workflow_id') + .single() +} + export async function deleteTemporalTransferFromActivityTable(workflow_id: string) { return await supabaseAdmin .from('activity') diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index a69cf4820..3f0624bcd 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -29,21 +29,7 @@ export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { await insertTemporalSendAccountTransfer(workflowId, from, to, amount, token) log('Sending UserOperation', superjson.stringify(userOp)) - const { hash, hashBytea } = await sendUserOpActivity(userOp).catch(async (error) => { - log('sendUserOpActivity failed', { error }) - // Ensure cleanup happens before throwing - await deleteTemporalTransferActivity(workflowId) - await updateTemporalTransferActivity({ - workflowId, - status: 'failed', - failureError: { - message: error.message, - type: error.code, - }, - }) - throw ApplicationFailure.nonRetryable('Error sending user operation', error.code, error) - }) - + const { hash, hashBytea } = await sendUserOpActivity(workflowId, userOp) log('UserOperation sent, hash:', hash) await updateTemporalTransferActivity({ workflowId, @@ -51,18 +37,7 @@ export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { data: { user_op_hash: hashBytea }, }) - const receipt = await waitForTransactionReceiptActivity(hash).catch(async (error) => { - log('waitForTransactionReceiptActivity failed', { error }) - await updateTemporalTransferActivity({ - workflowId, - status: 'failed', - failureError: { - message: error.message, - type: error.code, - }, - }) - throw error - }) + const receipt = await waitForTransactionReceiptActivity(workflowId, hash) log('Receipt received:', { tx_hash: receipt.transactionHash }) await updateTemporalTransferActivity({ diff --git a/supabase/database-generated.types.ts b/supabase/database-generated.types.ts index 7fa5b89c2..a1adcbd71 100644 --- a/supabase/database-generated.types.ts +++ b/supabase/database-generated.types.ts @@ -1499,6 +1499,12 @@ export type Database = { [_ in never]: never } Functions: { + delete_temporal_transfer: { + Args: { + workflow_id: string + } + Returns: undefined + } delete_temporal_transfer_activity: { Args: { workflow_id: string diff --git a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql index 7498c53ae..8eab93733 100644 --- a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql @@ -315,4 +315,37 @@ BEGIN WHERE event_name = 'temporal_send_account_transfers' AND event_id = workflow_id; END; -$$; \ No newline at end of file +$$; + +CREATE OR REPLACE FUNCTION temporal.delete_temporal_transfer(workflow_id text) +RETURNS void +LANGUAGE plpgsql +SECURITY DEFINER +AS $$ +BEGIN + DELETE FROM temporal.send_account_transfers + WHERE workflow_id = workflow_id + AND EXISTS ( + SELECT 1 FROM temporal.send_account_transfers + WHERE workflow_id = workflow_id + ); +END; +$$; + +CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_delete_activity() + RETURNS TRIGGER + LANGUAGE plpgsql + SECURITY DEFINER + AS $$ +BEGIN + DELETE FROM activity + WHERE event_name = 'temporal_send_account_transfers' + AND event_id = OLD.workflow_id; + RETURN OLD; +END; +$$; + +CREATE TRIGGER temporal_send_account_transfers_trigger_delete_activity + BEFORE DELETE ON temporal.send_account_transfers + FOR EACH ROW + EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_delete_activity(); \ No newline at end of file From bb7fab90151d73129cf49e0d2518f596620b796b Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Feb 2025 21:27:36 -0800 Subject: [PATCH 26/58] Only show temporal transfers for senders --- .../features/home/utils/useTokenActivityFeed.ts | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/packages/app/features/home/utils/useTokenActivityFeed.ts b/packages/app/features/home/utils/useTokenActivityFeed.ts index 900be2a05..c3b092b00 100644 --- a/packages/app/features/home/utils/useTokenActivityFeed.ts +++ b/packages/app/features/home/utils/useTokenActivityFeed.ts @@ -6,7 +6,9 @@ import { type InfiniteData, type UseInfiniteQueryResult, } from '@tanstack/react-query' +import { hexToBytea } from 'app/utils/hexToBytea' import { pgAddrCondValues } from 'app/utils/pgAddrCondValues' +import { useSendAccount } from 'app/utils/send-accounts' import { squish } from 'app/utils/strings' import { useSupabase } from 'app/utils/supabase/useSupabase' import { throwIf } from 'app/utils/throwIf' @@ -28,22 +30,24 @@ export function useTokenActivityFeed(params: { }): UseInfiniteQueryResult, PostgrestError | ZodError> { const { pageSize = 10, address, refetchInterval = 30_000, enabled = true } = params const supabase = useSupabase() + const { data: sendAccount } = useSendAccount() + const senderBytea = sendAccount?.address ? hexToBytea(sendAccount.address) : null async function fetchTokenActivityFeed({ pageParam }: { pageParam: number }): Promise { const from = pageParam * pageSize const to = (pageParam + 1) * pageSize - 1 let query = supabase.from('activity_feed').select('*') - // First, handle event_name conditions if (address) { query = query - .in('event_name', [Events.SendAccountTransfers, Events.TemporalSendAccountTransfers]) .eq('data->>log_addr', address) + .or( + `event_name.eq.${Events.SendAccountTransfers},and(event_name.eq.${Events.TemporalSendAccountTransfers},data->>f.eq.${senderBytea})` + ) } else { - query = query.in('event_name', [ - Events.SendAccountReceive, - Events.TemporalSendAccountTransfers, - ]) + query = query.or( + `event_name.eq.${Events.SendAccountReceive},and(event_name.eq.${Events.TemporalSendAccountTransfers},data->>sender.eq.${senderBytea})` + ) } const paymasterAddresses = Object.values(tokenPaymasterAddress) From dda570299b5de953f17c4e5b5e9d0a310a5b7bda Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Feb 2025 23:01:46 -0800 Subject: [PATCH 27/58] Remove console logs --- packages/app/features/home/TokenActivity.tsx | 1 - packages/app/utils/activity.ts | 1 - 2 files changed, 2 deletions(-) diff --git a/packages/app/features/home/TokenActivity.tsx b/packages/app/features/home/TokenActivity.tsx index b3dab64b1..0ad02c794 100644 --- a/packages/app/features/home/TokenActivity.tsx +++ b/packages/app/features/home/TokenActivity.tsx @@ -45,7 +45,6 @@ export const TokenActivity = ({ coin }: { coin: CoinWithBalance }) => { ? setRefetchInterval(1000) : setRefetchInterval(30_000) }, [pages]) - console.log('pages: ', pages) if (isLoading) return return ( diff --git a/packages/app/utils/activity.ts b/packages/app/utils/activity.ts index 3cf8c79a0..b5e3882da 100644 --- a/packages/app/utils/activity.ts +++ b/packages/app/utils/activity.ts @@ -78,7 +78,6 @@ export function counterpart(activity: Activity): Activity['from_user'] | Activit export function amountFromActivity(activity: Activity): string { switch (true) { case isTemporalTokenTransfersEvent(activity): { - console.log('activity: ', activity) const { v, coin } = activity.data if (coin) { const amount = formatAmount(formatUnits(v, coin.decimals), 5, coin.formatDecimals) From a37a89be17eca7927bc88de047a3a8bd8e9f2e24 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Feb 2025 23:02:25 -0800 Subject: [PATCH 28/58] remove delete function and use postgres --- .../activity/TemporalTransfersEventSchema.ts | 3 --- packages/snaplet/.snaplet/dataModel.json | 3 --- .../src/transfer-workflow/activities.ts | 15 --------------- .../src/transfer-workflow/supabase.ts | 4 +++- .../src/transfer-workflow/workflow.ts | 18 ++++-------------- supabase/database-generated.types.ts | 7 ------- 6 files changed, 7 insertions(+), 43 deletions(-) diff --git a/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts b/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts index f52e01ab6..8e740ad96 100644 --- a/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts +++ b/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts @@ -12,7 +12,6 @@ export const temporalTransferStatus = z.enum([ 'initialized', 'sent', 'confirmed', - 'indexed', 'failed', 'cancelled', ] as const satisfies readonly Database['temporal']['Enums']['transfer_status'][]) @@ -25,7 +24,6 @@ const BaseTemporalTransfersDataSchema = z.object({ user_op_hash: byteaToHexTxHash.nullable(), tx_hash: byteaToHexTxHash.nullable(), block_num: decimalStrToBigInt.nullable(), - tx_idx: decimalStrToBigInt.nullable(), log_addr: byteaToHexEthAddress, }) @@ -104,7 +102,6 @@ export const temporalEventNameFromStatus = ( case 'sent': return 'Confirming...' case 'confirmed': - case 'indexed': return 'Sent' case 'failed': case 'cancelled': diff --git a/packages/snaplet/.snaplet/dataModel.json b/packages/snaplet/.snaplet/dataModel.json index f38c2060e..d4ed1a03d 100644 --- a/packages/snaplet/.snaplet/dataModel.json +++ b/packages/snaplet/.snaplet/dataModel.json @@ -9742,9 +9742,6 @@ { "name": "failed" }, - { - "name": "indexed" - }, { "name": "initialized" }, diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index 9458c720c..79f826ec9 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -52,7 +52,6 @@ type TransferActivities = { details?: unknown[] } }) => Promise - deleteTemporalTransferActivity: (workflowId: string) => Promise<{ event_id: string }> } export const createTransferActivities = ( @@ -205,19 +204,5 @@ export const createTransferActivities = ( ) } }, - async deleteTemporalTransferActivity(workflowId) { - const { data, error } = await deleteTemporalTransferFromActivityTable(workflowId) - if (error) { - throw ApplicationFailure.retryable( - 'Error deleting temporal_transfer entry in activity table', - error.code, - { - error, - workflowId, - } - ) - } - return data - }, } } diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 4def19815..8f16477f8 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -69,7 +69,9 @@ export async function updateTemporalSendAccountTransfer({ export async function deleteTemporalTransfer(workflow_id: string) { return await supabaseAdmin .schema('temporal') - .rpc('delete_temporal_transfer', { workflow_id }) + .from('send_account_transfers') + .delete() + .eq('workflow_id', workflow_id) .select('workflow_id') .single() } diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index 3f0624bcd..125bdec0f 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -1,9 +1,9 @@ -import { ApplicationFailure, proxyActivities, workflowInfo } from '@temporalio/workflow' +import { proxyActivities, workflowInfo } from '@temporalio/workflow' import type { createTransferActivities } from './activities' import type { UserOperation } from 'permissionless' import superjson from 'superjson' - import debug from 'debug' +import { hexToBytea } from 'app/utils/hexToBytea' const log = debug('workflows:transfer') @@ -14,7 +14,6 @@ const { updateTemporalTransferActivity, waitForTransactionReceiptActivity, isTransferIndexedActivity, - deleteTemporalTransferActivity, } = proxyActivities>({ // TODO: make this configurable startToCloseTimeout: '10 minutes', @@ -44,18 +43,9 @@ export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { workflowId, status: 'confirmed', data: { - tx_hash: receipt.transactionHash, + tx_hash: hexToBytea(receipt.transactionHash), block_num: receipt.blockNumber.toString(), }, }) - - await isTransferIndexedActivity(receipt.transactionHash, token) - await updateTemporalTransferActivity({ - workflowId, - status: 'indexed', - }) - - log('Transfer indexed') - await deleteTemporalTransferActivity(workflowId) - return workflowId + return hash } diff --git a/supabase/database-generated.types.ts b/supabase/database-generated.types.ts index a1adcbd71..4a29254b3 100644 --- a/supabase/database-generated.types.ts +++ b/supabase/database-generated.types.ts @@ -1505,12 +1505,6 @@ export type Database = { } Returns: undefined } - delete_temporal_transfer_activity: { - Args: { - workflow_id: string - } - Returns: undefined - } insert_temporal_eth_send_account_transfer: { Args: { workflow_id: string @@ -1546,7 +1540,6 @@ export type Database = { | "initialized" | "sent" | "confirmed" - | "indexed" | "failed" | "cancelled" } From 98c86089dbdf8df05f01886baf3d5d3085bc2c32 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Feb 2025 23:02:54 -0800 Subject: [PATCH 29/58] Update trigger function to delete temporal activity --- ...035940_create_temporal_transfers_table.sql | 107 +++++++++++------- 1 file changed, 67 insertions(+), 40 deletions(-) diff --git a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql index 8eab93733..a62baf6b2 100644 --- a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql @@ -15,7 +15,6 @@ CREATE TYPE temporal.transfer_status AS ENUM( 'initialized', 'sent', 'confirmed', - 'indexed', 'failed', 'cancelled' ); @@ -146,8 +145,7 @@ BEGIN _data := json_build_object( 'user_op_hash', (data->>'user_op_hash'), 'tx_hash', (data->>'tx_hash'), - 'block_num', data->>'block_num'::text, - 'tx_idx', data->>'tx_idx'::text + 'block_num', data->>'block_num'::text ); ELSE _data := '{}'::jsonb; @@ -194,8 +192,7 @@ BEGIN 't', (NEW.data->>'t'), 'v', NEW.data->>'v'::text, 'tx_hash', (NEW.data->>'tx_hash'), - 'block_num', NEW.data->>'block_num'::text, - 'tx_idx', NEW.data->>'tx_idx'::text + 'block_num', NEW.data->>'block_num'::text ); INSERT INTO activity( @@ -245,8 +242,7 @@ BEGIN 'sender', (NEW.data->>'sender'), 'value', NEW.data->>'value'::text, 'tx_hash', (NEW.data->>'tx_hash'), - 'block_num', NEW.data->>'block_num'::text, - 'tx_idx', NEW.data->>'tx_idx'::text + 'block_num', NEW.data->>'block_num'::text ); INSERT INTO activity( @@ -290,12 +286,19 @@ CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_upda DECLARE _data jsonb; BEGIN - _data := NEW.data || json_build_object('status', NEW.status::text)::jsonb; + IF EXISTS ( + SELECT 1 FROM activity + WHERE event_name = 'temporal_send_account_transfers' + AND event_id = NEW.workflow_id + ) THEN + _data := NEW.data || json_build_object('status', NEW.status::text)::jsonb; + + UPDATE activity + SET data = _data + WHERE event_name = 'temporal_send_account_transfers' + AND event_id = NEW.workflow_id; + END IF; - UPDATE activity - SET data = _data - WHERE event_name = 'temporal_send_account_transfers' - AND event_id = NEW.workflow_id; RETURN NEW; END; $$; @@ -305,33 +308,6 @@ CREATE TRIGGER temporal_send_account_transfers_trigger_update_activity FOR EACH ROW EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_update_activity(); -CREATE OR REPLACE FUNCTION temporal.delete_temporal_transfer_activity(workflow_id text) -RETURNS void -LANGUAGE plpgsql -SECURITY DEFINER -AS $$ -BEGIN - DELETE FROM activity - WHERE event_name = 'temporal_send_account_transfers' - AND event_id = workflow_id; -END; -$$; - -CREATE OR REPLACE FUNCTION temporal.delete_temporal_transfer(workflow_id text) -RETURNS void -LANGUAGE plpgsql -SECURITY DEFINER -AS $$ -BEGIN - DELETE FROM temporal.send_account_transfers - WHERE workflow_id = workflow_id - AND EXISTS ( - SELECT 1 FROM temporal.send_account_transfers - WHERE workflow_id = workflow_id - ); -END; -$$; - CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_delete_activity() RETURNS TRIGGER LANGUAGE plpgsql @@ -348,4 +324,55 @@ $$; CREATE TRIGGER temporal_send_account_transfers_trigger_delete_activity BEFORE DELETE ON temporal.send_account_transfers FOR EACH ROW - EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_delete_activity(); \ No newline at end of file + EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_delete_activity(); + + +-- When a send_account_transfer activity is inserted, delete any temporal_send_account_transfers +-- with the same tx_hash from activity table. +-- This prevents duplicate activities once a transfer is completed. +create or replace function send_account_transfers_trigger_insert_activity() returns trigger +language plpgsql +security definer as +$$ +declare + _f_user_id uuid; + _t_user_id uuid; + _data jsonb; +begin + -- Delete any temporal transfers with matching tx_hash + DELETE FROM activity a + WHERE event_name = 'temporal_send_account_transfers' + AND a.data->>'tx_hash' = NEW.tx_hash + + -- select send app info for from address + select user_id into _f_user_id from send_accounts where address = concat('0x', encode(NEW.f, 'hex'))::citext; + select user_id into _t_user_id from send_accounts where address = concat('0x', encode(NEW.t, 'hex'))::citext; + + -- cast v to text to avoid losing precision when converting to json when sending to clients + _data := json_build_object( + 'log_addr', NEW.log_addr, + 'f', NEW.f, + 't', NEW.t, + 'v', NEW.v::text, + 'tx_hash', NEW.tx_hash, + 'block_num', NEW.block_num::text, + 'tx_idx', NEW.tx_idx::text, + 'log_idx', NEW.log_idx::text + ); + + insert into activity (event_name, event_id, from_user_id, to_user_id, data, created_at) + values ('send_account_transfers', + NEW.event_id, + _f_user_id, + _t_user_id, + _data, + to_timestamp(NEW.block_time) at time zone 'UTC') + on conflict (event_name, event_id) do update set + from_user_id = _f_user_id, + to_user_id = _t_user_id, + data = _data, + created_at = to_timestamp(NEW.block_time) at time zone 'UTC'; + + return NEW; +end; +$$; \ No newline at end of file From 3aa6b9970951db4c2e60230cbfee3c6200096928 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Thu, 13 Feb 2025 13:26:14 -0800 Subject: [PATCH 30/58] Fix typo --- environment.d.ts | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/environment.d.ts b/environment.d.ts index 81c708568..5ddc3a874 100644 --- a/environment.d.ts +++ b/environment.d.ts @@ -21,7 +21,7 @@ declare global { */ BUNDLER_RPC_URL: string /** - * The URL of the ERC 4337 Account Abstraction Bundler RPC endpoint + * The URL of the Base network RPC endpoint */ NEXT_PUBLIC_BASE_RPC_URL: string NEXT_PUBLIC_BUNDLER_RPC_URL: string @@ -46,21 +46,6 @@ declare global { * Cloudflare Turnstile site key */ NEXT_PUBLIC_TURNSTILE_SITE_KEY: string - - /** - * Coinbase Developer Portal App ID - */ - NEXT_PUBLIC_CDP_APP_ID: string - - /** - * Onchain Kit API Key - */ - NEXT_PUBLIC_ONCHAIN_KIT_API_KEY: string - - /** - * Onramp Allowlist (comma separated list of user ids that can see the debit card option) - */ - NEXT_PUBLIC_ONRAMP_ALLOWLIST: string } } /** From b1a5ae14e4f0162bf74dee36b205fd0c0705cdfe Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Thu, 13 Feb 2025 14:19:44 -0800 Subject: [PATCH 31/58] use block_time to remove old temporal activities --- supabase/database-generated.types.ts | 6 ----- ...035940_create_temporal_transfers_table.sql | 8 ++++--- supabase/tests/temporal_transfers_test.sql | 22 ++++++++++--------- 3 files changed, 17 insertions(+), 19 deletions(-) diff --git a/supabase/database-generated.types.ts b/supabase/database-generated.types.ts index 4a29254b3..0951f90e1 100644 --- a/supabase/database-generated.types.ts +++ b/supabase/database-generated.types.ts @@ -1499,12 +1499,6 @@ export type Database = { [_ in never]: never } Functions: { - delete_temporal_transfer: { - Args: { - workflow_id: string - } - Returns: undefined - } insert_temporal_eth_send_account_transfer: { Args: { workflow_id: string diff --git a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql index a62baf6b2..95e26ab0e 100644 --- a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql @@ -4,7 +4,8 @@ SET check_function_bodies = OFF; CREATE SCHEMA IF NOT EXISTS temporal; -- Grant permissions for temporal schema -GRANT USAGE ON SCHEMA temporal TO authenticated, service_role; +GRANT USAGE ON SCHEMA temporal TO authenticated; +GRANT USAGE ON SCHEMA temporal TO service_role; -- Grant execute on functions to service_role GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA temporal TO service_role; @@ -29,6 +30,8 @@ CREATE TABLE temporal.send_account_transfers( updated_at timestamptz DEFAULT (NOW() AT TIME ZONE 'UTC') ); +GRANT ALL ON TABLE temporal.send_account_transfers TO service_role; + alter table "temporal"."send_account_transfers" enable row level security; @@ -342,8 +345,7 @@ begin -- Delete any temporal transfers with matching tx_hash DELETE FROM activity a WHERE event_name = 'temporal_send_account_transfers' - AND a.data->>'tx_hash' = NEW.tx_hash - + AND extract(epoch from a.created_at)::numeric < NEW.block_time; -- select send app info for from address select user_id into _f_user_id from send_accounts where address = concat('0x', encode(NEW.f, 'hex'))::citext; select user_id into _t_user_id from send_accounts where address = concat('0x', encode(NEW.t, 'hex'))::citext; diff --git a/supabase/tests/temporal_transfers_test.sql b/supabase/tests/temporal_transfers_test.sql index 3a19f10fb..2c7947d7e 100644 --- a/supabase/tests/temporal_transfers_test.sql +++ b/supabase/tests/temporal_transfers_test.sql @@ -195,17 +195,19 @@ SELECT results_eq( 'Test activity update' ); -SELECT temporal.delete_temporal_transfer_activity('test-workflow-1'); +-- @TODO update this to test send_account_transfer insert -SELECT is_empty( - $$ - SELECT * - FROM activity - WHERE event_name = 'temporal_send_account_transfers' - AND event_id = 'test-workflow-1' - $$, - 'Test temporal transfer activity was deleted' -); +-- SELECT temporal.delete_temporal_transfer_activity('test-workflow-1'); + +-- SELECT is_empty( +-- $$ +-- SELECT * +-- FROM activity +-- WHERE event_name = 'temporal_send_account_transfers' +-- AND event_id = 'test-workflow-1' +-- $$, +-- 'Test temporal transfer activity was deleted' +-- ); SELECT * FROM finish(); ROLLBACK; \ No newline at end of file From 1a273da8e53ffd7aac309fa3fea82eea646d9b86 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Thu, 6 Mar 2025 18:02:12 -0800 Subject: [PATCH 32/58] Put workflow bundle under dist directory --- apps/next/styles/globals.css | 1 - biome.json | 3 +-- packages/workflows/.gitignore | 4 +--- packages/workflows/package.json | 3 +-- .../src/scripts/build-workflow-bundle.ts | 2 +- .../src/transfer-workflow/activities.ts | 12 ---------- .../src/transfer-workflow/supabase.ts | 24 ------------------- .../src/transfer-workflow/workflow.ts | 1 - 8 files changed, 4 insertions(+), 46 deletions(-) diff --git a/apps/next/styles/globals.css b/apps/next/styles/globals.css index d94d3dc87..5d49443c2 100644 --- a/apps/next/styles/globals.css +++ b/apps/next/styles/globals.css @@ -5,7 +5,6 @@ :root { height: 100%; background-color: transparent; - overflow: hidden; overscroll-behavior: none; } diff --git a/biome.json b/biome.json index 463609fb8..cc7f7359d 100644 --- a/biome.json +++ b/biome.json @@ -25,8 +25,7 @@ "./supabase/.temp/**", "./packages/contracts/var/*.json", "**/tsconfig.json", - "**/*.tsconfig.json", - "./packages/workflows/workflow-bundle.js" + "**/*.tsconfig.json" ] }, "organizeImports": { diff --git a/packages/workflows/.gitignore b/packages/workflows/.gitignore index 3c6e5b4df..9abb4b996 100644 --- a/packages/workflows/.gitignore +++ b/packages/workflows/.gitignore @@ -174,6 +174,4 @@ dist # Finder (MacOS) folder config .DS_Store -lib - -workflow-bundle.js \ No newline at end of file +lib \ No newline at end of file diff --git a/packages/workflows/package.json b/packages/workflows/package.json index f02fa3bc8..5b706dc92 100644 --- a/packages/workflows/package.json +++ b/packages/workflows/package.json @@ -19,8 +19,7 @@ "default": "./src/all-workflows.ts" }, "./workflow-bundle": { - "types": "./workflow-bundle.d.ts", - "default": "./workflow-bundle.js" + "default": "./dist/workflow-bundle.js" } }, "scripts": { diff --git a/packages/workflows/src/scripts/build-workflow-bundle.ts b/packages/workflows/src/scripts/build-workflow-bundle.ts index 511062d61..6bec5e9be 100644 --- a/packages/workflows/src/scripts/build-workflow-bundle.ts +++ b/packages/workflows/src/scripts/build-workflow-bundle.ts @@ -12,7 +12,7 @@ async function bundle() { const { code } = await bundleWorkflowCode({ workflowsPath: require.resolve('../all-workflows.ts'), }) - const codePath = path.join(__dirname, '../../workflow-bundle.js') + const codePath = path.join(__dirname, '../../dist/workflow-bundle.js') await writeFile(codePath, code) console.log(`Bundle written to ${codePath}`) diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index 79f826ec9..485418c45 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -41,7 +41,6 @@ type TransferActivities = { transactionHash: `0x${string}` blockNumber: bigint }> - isTransferIndexedActivity: (tx_hash: `0x${string}`, token: PgBytea | null) => Promise updateTemporalTransferActivity: (params: { workflowId: string status: Database['temporal']['Enums']['transfer_status'] @@ -169,17 +168,6 @@ export const createTransferActivities = ( throw ApplicationFailure.nonRetryable(updateError?.message) } }, - async isTransferIndexedActivity(tx_hash, token) { - const isIndexed = token - ? await isTokenTransferIndexed(tx_hash) - : await isEthTransferIndexed(tx_hash) - - if (!isIndexed) { - throw ApplicationFailure.retryable('Transfer not indexed in db') - } - log.info('isTransferIndexedActivity', { isIndexed }) - return isIndexed - }, async updateTemporalTransferActivity({ workflowId, status, data, failureError }) { const { error } = await updateTemporalSendAccountTransfer({ workflow_id: workflowId, diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 8f16477f8..731a5a144 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -1,6 +1,4 @@ import type { PgBytea, Database } from '@my/supabase/database.types' -import { log } from '@temporalio/activity' -import { hexToBytea } from 'app/utils/hexToBytea' import { supabaseAdmin } from 'app/utils/supabase/admin' export async function insertTemporalTokenSendAccountTransfer({ @@ -84,25 +82,3 @@ export async function deleteTemporalTransferFromActivityTable(workflow_id: strin .select('event_id') .single() } - -export async function isTokenTransferIndexed(tx_hash: `0x${string}`) { - const { count, error, status, statusText } = await supabaseAdmin - .from('send_account_transfers') - .select('*', { count: 'exact', head: true }) - .eq('tx_hash', hexToBytea(tx_hash)) - - log.info('isTokenTransferIndexed', { count, error, status, statusText }) - - return count !== null && count > 0 -} - -export async function isEthTransferIndexed(tx_hash: `0x${string}`) { - const { count, error, status, statusText } = await supabaseAdmin - .from('send_account_receives') - .select('*', { count: 'exact', head: true }) - .eq('tx_hash', hexToBytea(tx_hash)) - - log.info('isEthTransferIndexed', { count, error, status, statusText }) - - return count !== null && count > 0 -} diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index 125bdec0f..d0bea39de 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -13,7 +13,6 @@ const { sendUserOpActivity, updateTemporalTransferActivity, waitForTransactionReceiptActivity, - isTransferIndexedActivity, } = proxyActivities>({ // TODO: make this configurable startToCloseTimeout: '10 minutes', From b72f904161f3b06682b3d40db1fee677221b2b04 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Thu, 6 Mar 2025 18:03:22 -0800 Subject: [PATCH 33/58] Use block time to delete indexed transfers --- ...035940_create_temporal_transfers_table.sql | 57 ++++++------------- 1 file changed, 17 insertions(+), 40 deletions(-) diff --git a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql index 95e26ab0e..2538126a0 100644 --- a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250205035940_create_temporal_transfers_table.sql @@ -330,51 +330,28 @@ CREATE TRIGGER temporal_send_account_transfers_trigger_delete_activity EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_delete_activity(); --- When a send_account_transfer activity is inserted, delete any temporal_send_account_transfers --- with the same tx_hash from activity table. +-- When a send_account_transfer is inserted, delete older temporal_send_account_transfers +-- We know they are indexed if its inserting newer blocks. -- This prevents duplicate activities once a transfer is completed. -create or replace function send_account_transfers_trigger_insert_activity() returns trigger +-- keep failed so we can show it to the user, we can garbage collect later +create or replace function send_account_transfers_delete_temporal_activity() returns trigger language plpgsql security definer as $$ -declare - _f_user_id uuid; - _t_user_id uuid; - _data jsonb; -begin - -- Delete any temporal transfers with matching tx_hash - DELETE FROM activity a - WHERE event_name = 'temporal_send_account_transfers' - AND extract(epoch from a.created_at)::numeric < NEW.block_time; - -- select send app info for from address - select user_id into _f_user_id from send_accounts where address = concat('0x', encode(NEW.f, 'hex'))::citext; - select user_id into _t_user_id from send_accounts where address = concat('0x', encode(NEW.t, 'hex'))::citext; - -- cast v to text to avoid losing precision when converting to json when sending to clients - _data := json_build_object( - 'log_addr', NEW.log_addr, - 'f', NEW.f, - 't', NEW.t, - 'v', NEW.v::text, - 'tx_hash', NEW.tx_hash, - 'block_num', NEW.block_num::text, - 'tx_idx', NEW.tx_idx::text, - 'log_idx', NEW.log_idx::text +begin + delete from activity a + where event_name = 'temporal_send_account_transfers' and event_id in ( + select sat.workflow_id from temporal.send_account_transfers sat + where extract(epoch from sat.created_at)::numeric < NEW.block_time + and sat.status != 'failed' ); - - insert into activity (event_name, event_id, from_user_id, to_user_id, data, created_at) - values ('send_account_transfers', - NEW.event_id, - _f_user_id, - _t_user_id, - _data, - to_timestamp(NEW.block_time) at time zone 'UTC') - on conflict (event_name, event_id) do update set - from_user_id = _f_user_id, - to_user_id = _t_user_id, - data = _data, - created_at = to_timestamp(NEW.block_time) at time zone 'UTC'; - return NEW; end; -$$; \ No newline at end of file +end; +$$; + +CREATE TRIGGER send_account_transfers_trigger_delete_temporal_activity + BEFORE INSERT ON send_account_transfers + FOR EACH ROW + EXECUTE FUNCTION send_account_transfers_delete_temporal_activity(); From 5a9a8e832f7c527ce781bb52817d1f13ecc02843 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Sun, 9 Mar 2025 21:50:38 -0700 Subject: [PATCH 34/58] Update migration version --- ...le.sql => 20250307021828_create_temporal_transfers_table.sql} | 1 - 1 file changed, 1 deletion(-) rename supabase/migrations/{20250205035940_create_temporal_transfers_table.sql => 20250307021828_create_temporal_transfers_table.sql} (99%) diff --git a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql similarity index 99% rename from supabase/migrations/20250205035940_create_temporal_transfers_table.sql rename to supabase/migrations/20250307021828_create_temporal_transfers_table.sql index 2538126a0..409f2dbfe 100644 --- a/supabase/migrations/20250205035940_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql @@ -348,7 +348,6 @@ begin ); return NEW; end; -end; $$; CREATE TRIGGER send_account_transfers_trigger_delete_temporal_activity From 100e98be89d8a5832f71ae431e146ea7eac39b9f Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Sun, 9 Mar 2025 21:51:21 -0700 Subject: [PATCH 35/58] Update temporal_transfers test --- .../workflows/src/transfer-workflow/activities.ts | 3 --- supabase/tests/temporal_transfers_test.sql | 11 ++++------- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index 485418c45..1414e55d1 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -1,11 +1,8 @@ import { log, ApplicationFailure } from '@temporalio/activity' import { - isTokenTransferIndexed, - isEthTransferIndexed, insertTemporalTokenSendAccountTransfer, updateTemporalSendAccountTransfer, insertTemporalEthSendAccountTransfer, - deleteTemporalTransferFromActivityTable, deleteTemporalTransfer, } from './supabase' import { simulateUserOperation, sendUserOperation, waitForTransactionReceipt } from './wagmi' diff --git a/supabase/tests/temporal_transfers_test.sql b/supabase/tests/temporal_transfers_test.sql index 2c7947d7e..3b3d6bc1b 100644 --- a/supabase/tests/temporal_transfers_test.sql +++ b/supabase/tests/temporal_transfers_test.sql @@ -1,5 +1,5 @@ BEGIN; -SELECT plan(7); +SELECT plan(6); -- Create the necessary extensions CREATE EXTENSION "basejump-supabase_test_helpers"; @@ -97,8 +97,7 @@ SELECT temporal.update_temporal_send_account_transfer( json_build_object( 'user_op_hash', '\x1234'::bytea, 'tx_hash', '\x5678'::bytea, - 'block_num', '123', - 'tx_idx', '1' + 'block_num', '123' )::jsonb ); @@ -108,8 +107,7 @@ SELECT results_eq( status, (data->>'user_op_hash')::bytea, (data->>'tx_hash')::bytea, - data->>'block_num', - data->>'tx_idx' + data->>'block_num' FROM temporal.send_account_transfers WHERE workflow_id = 'test-workflow-1' $$, @@ -118,8 +116,7 @@ SELECT results_eq( 'sent'::temporal.transfer_status, '\x1234'::bytea, '\x5678'::bytea, - '123'::text, - '1'::text + '123'::text ) $$, 'Test transfer update' From 21a89363e66e36778cb4e2d98f7a9c6e4965cf91 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Sun, 9 Mar 2025 22:13:59 -0700 Subject: [PATCH 36/58] remove generated payload-converter file --- packages/temporal/build/payload-converter.cjs | 882 ------------------ 1 file changed, 882 deletions(-) delete mode 100644 packages/temporal/build/payload-converter.cjs diff --git a/packages/temporal/build/payload-converter.cjs b/packages/temporal/build/payload-converter.cjs deleted file mode 100644 index f8bad8dfe..000000000 --- a/packages/temporal/build/payload-converter.cjs +++ /dev/null @@ -1,882 +0,0 @@ -"use strict"; -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - -// src/payload-converter.ts -var payload_converter_exports = {}; -__export(payload_converter_exports, { - payloadConverter: () => payloadConverter -}); -module.exports = __toCommonJS(payload_converter_exports); -var import_common2 = require("@temporalio/common"); - -// src/superjson-payload-converter.ts -var import_common = require("@temporalio/common"); - -// ../../node_modules/superjson/dist/double-indexed-kv.js -var DoubleIndexedKV = class { - constructor() { - this.keyToValue = /* @__PURE__ */ new Map(); - this.valueToKey = /* @__PURE__ */ new Map(); - } - set(key, value) { - this.keyToValue.set(key, value); - this.valueToKey.set(value, key); - } - getByKey(key) { - return this.keyToValue.get(key); - } - getByValue(value) { - return this.valueToKey.get(value); - } - clear() { - this.keyToValue.clear(); - this.valueToKey.clear(); - } -}; - -// ../../node_modules/superjson/dist/registry.js -var Registry = class { - constructor(generateIdentifier) { - this.generateIdentifier = generateIdentifier; - this.kv = new DoubleIndexedKV(); - } - register(value, identifier) { - if (this.kv.getByValue(value)) { - return; - } - if (!identifier) { - identifier = this.generateIdentifier(value); - } - this.kv.set(identifier, value); - } - clear() { - this.kv.clear(); - } - getIdentifier(value) { - return this.kv.getByValue(value); - } - getValue(identifier) { - return this.kv.getByKey(identifier); - } -}; - -// ../../node_modules/superjson/dist/class-registry.js -var ClassRegistry = class extends Registry { - constructor() { - super((c) => c.name); - this.classToAllowedProps = /* @__PURE__ */ new Map(); - } - register(value, options) { - if (typeof options === "object") { - if (options.allowProps) { - this.classToAllowedProps.set(value, options.allowProps); - } - super.register(value, options.identifier); - } else { - super.register(value, options); - } - } - getAllowedProps(value) { - return this.classToAllowedProps.get(value); - } -}; - -// ../../node_modules/superjson/dist/util.js -function valuesOfObj(record) { - if ("values" in Object) { - return Object.values(record); - } - const values = []; - for (const key in record) { - if (record.hasOwnProperty(key)) { - values.push(record[key]); - } - } - return values; -} -function find(record, predicate) { - const values = valuesOfObj(record); - if ("find" in values) { - return values.find(predicate); - } - const valuesNotNever = values; - for (let i = 0; i < valuesNotNever.length; i++) { - const value = valuesNotNever[i]; - if (predicate(value)) { - return value; - } - } - return void 0; -} -function forEach(record, run) { - Object.entries(record).forEach(([key, value]) => run(value, key)); -} -function includes(arr, value) { - return arr.indexOf(value) !== -1; -} -function findArr(record, predicate) { - for (let i = 0; i < record.length; i++) { - const value = record[i]; - if (predicate(value)) { - return value; - } - } - return void 0; -} - -// ../../node_modules/superjson/dist/custom-transformer-registry.js -var CustomTransformerRegistry = class { - constructor() { - this.transfomers = {}; - } - register(transformer) { - this.transfomers[transformer.name] = transformer; - } - findApplicable(v) { - return find(this.transfomers, (transformer) => transformer.isApplicable(v)); - } - findByName(name) { - return this.transfomers[name]; - } -}; - -// ../../node_modules/superjson/dist/is.js -var getType = (payload) => Object.prototype.toString.call(payload).slice(8, -1); -var isUndefined = (payload) => typeof payload === "undefined"; -var isNull = (payload) => payload === null; -var isPlainObject = (payload) => { - if (typeof payload !== "object" || payload === null) - return false; - if (payload === Object.prototype) - return false; - if (Object.getPrototypeOf(payload) === null) - return true; - return Object.getPrototypeOf(payload) === Object.prototype; -}; -var isEmptyObject = (payload) => isPlainObject(payload) && Object.keys(payload).length === 0; -var isArray = (payload) => Array.isArray(payload); -var isString = (payload) => typeof payload === "string"; -var isNumber = (payload) => typeof payload === "number" && !isNaN(payload); -var isBoolean = (payload) => typeof payload === "boolean"; -var isRegExp = (payload) => payload instanceof RegExp; -var isMap = (payload) => payload instanceof Map; -var isSet = (payload) => payload instanceof Set; -var isSymbol = (payload) => getType(payload) === "Symbol"; -var isDate = (payload) => payload instanceof Date && !isNaN(payload.valueOf()); -var isError = (payload) => payload instanceof Error; -var isNaNValue = (payload) => typeof payload === "number" && isNaN(payload); -var isPrimitive = (payload) => isBoolean(payload) || isNull(payload) || isUndefined(payload) || isNumber(payload) || isString(payload) || isSymbol(payload); -var isBigint = (payload) => typeof payload === "bigint"; -var isInfinite = (payload) => payload === Infinity || payload === -Infinity; -var isTypedArray = (payload) => ArrayBuffer.isView(payload) && !(payload instanceof DataView); -var isURL = (payload) => payload instanceof URL; - -// ../../node_modules/superjson/dist/pathstringifier.js -var escapeKey = (key) => key.replace(/\./g, "\\."); -var stringifyPath = (path) => path.map(String).map(escapeKey).join("."); -var parsePath = (string) => { - const result = []; - let segment = ""; - for (let i = 0; i < string.length; i++) { - let char = string.charAt(i); - const isEscapedDot = char === "\\" && string.charAt(i + 1) === "."; - if (isEscapedDot) { - segment += "."; - i++; - continue; - } - const isEndOfSegment = char === "."; - if (isEndOfSegment) { - result.push(segment); - segment = ""; - continue; - } - segment += char; - } - const lastSegment = segment; - result.push(lastSegment); - return result; -}; - -// ../../node_modules/superjson/dist/transformer.js -function simpleTransformation(isApplicable, annotation, transform, untransform) { - return { - isApplicable, - annotation, - transform, - untransform - }; -} -var simpleRules = [ - simpleTransformation(isUndefined, "undefined", () => null, () => void 0), - simpleTransformation(isBigint, "bigint", (v) => v.toString(), (v) => { - if (typeof BigInt !== "undefined") { - return BigInt(v); - } - console.error("Please add a BigInt polyfill."); - return v; - }), - simpleTransformation(isDate, "Date", (v) => v.toISOString(), (v) => new Date(v)), - simpleTransformation(isError, "Error", (v, superJson) => { - const baseError = { - name: v.name, - message: v.message - }; - superJson.allowedErrorProps.forEach((prop) => { - baseError[prop] = v[prop]; - }); - return baseError; - }, (v, superJson) => { - const e = new Error(v.message); - e.name = v.name; - e.stack = v.stack; - superJson.allowedErrorProps.forEach((prop) => { - e[prop] = v[prop]; - }); - return e; - }), - simpleTransformation(isRegExp, "regexp", (v) => "" + v, (regex) => { - const body = regex.slice(1, regex.lastIndexOf("/")); - const flags = regex.slice(regex.lastIndexOf("/") + 1); - return new RegExp(body, flags); - }), - simpleTransformation( - isSet, - "set", - // (sets only exist in es6+) - // eslint-disable-next-line es5/no-es6-methods - (v) => [...v.values()], - (v) => new Set(v) - ), - simpleTransformation(isMap, "map", (v) => [...v.entries()], (v) => new Map(v)), - simpleTransformation((v) => isNaNValue(v) || isInfinite(v), "number", (v) => { - if (isNaNValue(v)) { - return "NaN"; - } - if (v > 0) { - return "Infinity"; - } else { - return "-Infinity"; - } - }, Number), - simpleTransformation((v) => v === 0 && 1 / v === -Infinity, "number", () => { - return "-0"; - }, Number), - simpleTransformation(isURL, "URL", (v) => v.toString(), (v) => new URL(v)) -]; -function compositeTransformation(isApplicable, annotation, transform, untransform) { - return { - isApplicable, - annotation, - transform, - untransform - }; -} -var symbolRule = compositeTransformation((s, superJson) => { - if (isSymbol(s)) { - const isRegistered = !!superJson.symbolRegistry.getIdentifier(s); - return isRegistered; - } - return false; -}, (s, superJson) => { - const identifier = superJson.symbolRegistry.getIdentifier(s); - return ["symbol", identifier]; -}, (v) => v.description, (_, a, superJson) => { - const value = superJson.symbolRegistry.getValue(a[1]); - if (!value) { - throw new Error("Trying to deserialize unknown symbol"); - } - return value; -}); -var constructorToName = [ - Int8Array, - Uint8Array, - Int16Array, - Uint16Array, - Int32Array, - Uint32Array, - Float32Array, - Float64Array, - Uint8ClampedArray -].reduce((obj, ctor) => { - obj[ctor.name] = ctor; - return obj; -}, {}); -var typedArrayRule = compositeTransformation(isTypedArray, (v) => ["typed-array", v.constructor.name], (v) => [...v], (v, a) => { - const ctor = constructorToName[a[1]]; - if (!ctor) { - throw new Error("Trying to deserialize unknown typed array"); - } - return new ctor(v); -}); -function isInstanceOfRegisteredClass(potentialClass, superJson) { - if (potentialClass?.constructor) { - const isRegistered = !!superJson.classRegistry.getIdentifier(potentialClass.constructor); - return isRegistered; - } - return false; -} -var classRule = compositeTransformation(isInstanceOfRegisteredClass, (clazz, superJson) => { - const identifier = superJson.classRegistry.getIdentifier(clazz.constructor); - return ["class", identifier]; -}, (clazz, superJson) => { - const allowedProps = superJson.classRegistry.getAllowedProps(clazz.constructor); - if (!allowedProps) { - return { ...clazz }; - } - const result = {}; - allowedProps.forEach((prop) => { - result[prop] = clazz[prop]; - }); - return result; -}, (v, a, superJson) => { - const clazz = superJson.classRegistry.getValue(a[1]); - if (!clazz) { - throw new Error(`Trying to deserialize unknown class '${a[1]}' - check https://github.com/blitz-js/superjson/issues/116#issuecomment-773996564`); - } - return Object.assign(Object.create(clazz.prototype), v); -}); -var customRule = compositeTransformation((value, superJson) => { - return !!superJson.customTransformerRegistry.findApplicable(value); -}, (value, superJson) => { - const transformer = superJson.customTransformerRegistry.findApplicable(value); - return ["custom", transformer.name]; -}, (value, superJson) => { - const transformer = superJson.customTransformerRegistry.findApplicable(value); - return transformer.serialize(value); -}, (v, a, superJson) => { - const transformer = superJson.customTransformerRegistry.findByName(a[1]); - if (!transformer) { - throw new Error("Trying to deserialize unknown custom value"); - } - return transformer.deserialize(v); -}); -var compositeRules = [classRule, symbolRule, customRule, typedArrayRule]; -var transformValue = (value, superJson) => { - const applicableCompositeRule = findArr(compositeRules, (rule) => rule.isApplicable(value, superJson)); - if (applicableCompositeRule) { - return { - value: applicableCompositeRule.transform(value, superJson), - type: applicableCompositeRule.annotation(value, superJson) - }; - } - const applicableSimpleRule = findArr(simpleRules, (rule) => rule.isApplicable(value, superJson)); - if (applicableSimpleRule) { - return { - value: applicableSimpleRule.transform(value, superJson), - type: applicableSimpleRule.annotation - }; - } - return void 0; -}; -var simpleRulesByAnnotation = {}; -simpleRules.forEach((rule) => { - simpleRulesByAnnotation[rule.annotation] = rule; -}); -var untransformValue = (json, type, superJson) => { - if (isArray(type)) { - switch (type[0]) { - case "symbol": - return symbolRule.untransform(json, type, superJson); - case "class": - return classRule.untransform(json, type, superJson); - case "custom": - return customRule.untransform(json, type, superJson); - case "typed-array": - return typedArrayRule.untransform(json, type, superJson); - default: - throw new Error("Unknown transformation: " + type); - } - } else { - const transformation = simpleRulesByAnnotation[type]; - if (!transformation) { - throw new Error("Unknown transformation: " + type); - } - return transformation.untransform(json, superJson); - } -}; - -// ../../node_modules/superjson/dist/accessDeep.js -var getNthKey = (value, n) => { - if (n > value.size) - throw new Error("index out of bounds"); - const keys = value.keys(); - while (n > 0) { - keys.next(); - n--; - } - return keys.next().value; -}; -function validatePath(path) { - if (includes(path, "__proto__")) { - throw new Error("__proto__ is not allowed as a property"); - } - if (includes(path, "prototype")) { - throw new Error("prototype is not allowed as a property"); - } - if (includes(path, "constructor")) { - throw new Error("constructor is not allowed as a property"); - } -} -var getDeep = (object, path) => { - validatePath(path); - for (let i = 0; i < path.length; i++) { - const key = path[i]; - if (isSet(object)) { - object = getNthKey(object, +key); - } else if (isMap(object)) { - const row = +key; - const type = +path[++i] === 0 ? "key" : "value"; - const keyOfRow = getNthKey(object, row); - switch (type) { - case "key": - object = keyOfRow; - break; - case "value": - object = object.get(keyOfRow); - break; - } - } else { - object = object[key]; - } - } - return object; -}; -var setDeep = (object, path, mapper) => { - validatePath(path); - if (path.length === 0) { - return mapper(object); - } - let parent = object; - for (let i = 0; i < path.length - 1; i++) { - const key = path[i]; - if (isArray(parent)) { - const index = +key; - parent = parent[index]; - } else if (isPlainObject(parent)) { - parent = parent[key]; - } else if (isSet(parent)) { - const row = +key; - parent = getNthKey(parent, row); - } else if (isMap(parent)) { - const isEnd = i === path.length - 2; - if (isEnd) { - break; - } - const row = +key; - const type = +path[++i] === 0 ? "key" : "value"; - const keyOfRow = getNthKey(parent, row); - switch (type) { - case "key": - parent = keyOfRow; - break; - case "value": - parent = parent.get(keyOfRow); - break; - } - } - } - const lastKey = path[path.length - 1]; - if (isArray(parent)) { - parent[+lastKey] = mapper(parent[+lastKey]); - } else if (isPlainObject(parent)) { - parent[lastKey] = mapper(parent[lastKey]); - } - if (isSet(parent)) { - const oldValue = getNthKey(parent, +lastKey); - const newValue = mapper(oldValue); - if (oldValue !== newValue) { - parent.delete(oldValue); - parent.add(newValue); - } - } - if (isMap(parent)) { - const row = +path[path.length - 2]; - const keyToRow = getNthKey(parent, row); - const type = +lastKey === 0 ? "key" : "value"; - switch (type) { - case "key": { - const newKey = mapper(keyToRow); - parent.set(newKey, parent.get(keyToRow)); - if (newKey !== keyToRow) { - parent.delete(keyToRow); - } - break; - } - case "value": { - parent.set(keyToRow, mapper(parent.get(keyToRow))); - break; - } - } - } - return object; -}; - -// ../../node_modules/superjson/dist/plainer.js -function traverse(tree, walker2, origin = []) { - if (!tree) { - return; - } - if (!isArray(tree)) { - forEach(tree, (subtree, key) => traverse(subtree, walker2, [...origin, ...parsePath(key)])); - return; - } - const [nodeValue, children] = tree; - if (children) { - forEach(children, (child, key) => { - traverse(child, walker2, [...origin, ...parsePath(key)]); - }); - } - walker2(nodeValue, origin); -} -function applyValueAnnotations(plain, annotations, superJson) { - traverse(annotations, (type, path) => { - plain = setDeep(plain, path, (v) => untransformValue(v, type, superJson)); - }); - return plain; -} -function applyReferentialEqualityAnnotations(plain, annotations) { - function apply(identicalPaths, path) { - const object = getDeep(plain, parsePath(path)); - identicalPaths.map(parsePath).forEach((identicalObjectPath) => { - plain = setDeep(plain, identicalObjectPath, () => object); - }); - } - if (isArray(annotations)) { - const [root, other] = annotations; - root.forEach((identicalPath) => { - plain = setDeep(plain, parsePath(identicalPath), () => plain); - }); - if (other) { - forEach(other, apply); - } - } else { - forEach(annotations, apply); - } - return plain; -} -var isDeep = (object, superJson) => isPlainObject(object) || isArray(object) || isMap(object) || isSet(object) || isInstanceOfRegisteredClass(object, superJson); -function addIdentity(object, path, identities) { - const existingSet = identities.get(object); - if (existingSet) { - existingSet.push(path); - } else { - identities.set(object, [path]); - } -} -function generateReferentialEqualityAnnotations(identitites, dedupe) { - const result = {}; - let rootEqualityPaths = void 0; - identitites.forEach((paths) => { - if (paths.length <= 1) { - return; - } - if (!dedupe) { - paths = paths.map((path) => path.map(String)).sort((a, b) => a.length - b.length); - } - const [representativePath, ...identicalPaths] = paths; - if (representativePath.length === 0) { - rootEqualityPaths = identicalPaths.map(stringifyPath); - } else { - result[stringifyPath(representativePath)] = identicalPaths.map(stringifyPath); - } - }); - if (rootEqualityPaths) { - if (isEmptyObject(result)) { - return [rootEqualityPaths]; - } else { - return [rootEqualityPaths, result]; - } - } else { - return isEmptyObject(result) ? void 0 : result; - } -} -var walker = (object, identities, superJson, dedupe, path = [], objectsInThisPath = [], seenObjects = /* @__PURE__ */ new Map()) => { - const primitive = isPrimitive(object); - if (!primitive) { - addIdentity(object, path, identities); - const seen = seenObjects.get(object); - if (seen) { - return dedupe ? { - transformedValue: null - } : seen; - } - } - if (!isDeep(object, superJson)) { - const transformed2 = transformValue(object, superJson); - const result2 = transformed2 ? { - transformedValue: transformed2.value, - annotations: [transformed2.type] - } : { - transformedValue: object - }; - if (!primitive) { - seenObjects.set(object, result2); - } - return result2; - } - if (includes(objectsInThisPath, object)) { - return { - transformedValue: null - }; - } - const transformationResult = transformValue(object, superJson); - const transformed = transformationResult?.value ?? object; - const transformedValue = isArray(transformed) ? [] : {}; - const innerAnnotations = {}; - forEach(transformed, (value, index) => { - if (index === "__proto__" || index === "constructor" || index === "prototype") { - throw new Error(`Detected property ${index}. This is a prototype pollution risk, please remove it from your object.`); - } - const recursiveResult = walker(value, identities, superJson, dedupe, [...path, index], [...objectsInThisPath, object], seenObjects); - transformedValue[index] = recursiveResult.transformedValue; - if (isArray(recursiveResult.annotations)) { - innerAnnotations[index] = recursiveResult.annotations; - } else if (isPlainObject(recursiveResult.annotations)) { - forEach(recursiveResult.annotations, (tree, key) => { - innerAnnotations[escapeKey(index) + "." + key] = tree; - }); - } - }); - const result = isEmptyObject(innerAnnotations) ? { - transformedValue, - annotations: !!transformationResult ? [transformationResult.type] : void 0 - } : { - transformedValue, - annotations: !!transformationResult ? [transformationResult.type, innerAnnotations] : innerAnnotations - }; - if (!primitive) { - seenObjects.set(object, result); - } - return result; -}; - -// ../../node_modules/is-what/dist/index.js -function getType2(payload) { - return Object.prototype.toString.call(payload).slice(8, -1); -} -function isArray2(payload) { - return getType2(payload) === "Array"; -} -function isPlainObject2(payload) { - if (getType2(payload) !== "Object") - return false; - const prototype = Object.getPrototypeOf(payload); - return !!prototype && prototype.constructor === Object && prototype === Object.prototype; -} -function isNull2(payload) { - return getType2(payload) === "Null"; -} -function isOneOf(a, b, c, d, e) { - return (value) => a(value) || b(value) || !!c && c(value) || !!d && d(value) || !!e && e(value); -} -function isUndefined2(payload) { - return getType2(payload) === "Undefined"; -} -var isNullOrUndefined = isOneOf(isNull2, isUndefined2); - -// ../../node_modules/copy-anything/dist/index.js -function assignProp(carry, key, newVal, originalObject, includeNonenumerable) { - const propType = {}.propertyIsEnumerable.call(originalObject, key) ? "enumerable" : "nonenumerable"; - if (propType === "enumerable") - carry[key] = newVal; - if (includeNonenumerable && propType === "nonenumerable") { - Object.defineProperty(carry, key, { - value: newVal, - enumerable: false, - writable: true, - configurable: true - }); - } -} -function copy(target, options = {}) { - if (isArray2(target)) { - return target.map((item) => copy(item, options)); - } - if (!isPlainObject2(target)) { - return target; - } - const props = Object.getOwnPropertyNames(target); - const symbols = Object.getOwnPropertySymbols(target); - return [...props, ...symbols].reduce((carry, key) => { - if (isArray2(options.props) && !options.props.includes(key)) { - return carry; - } - const val = target[key]; - const newVal = copy(val, options); - assignProp(carry, key, newVal, target, options.nonenumerable); - return carry; - }, {}); -} - -// ../../node_modules/superjson/dist/index.js -var SuperJSON = class { - /** - * @param dedupeReferentialEqualities If true, SuperJSON will make sure only one instance of referentially equal objects are serialized and the rest are replaced with `null`. - */ - constructor({ dedupe = false } = {}) { - this.classRegistry = new ClassRegistry(); - this.symbolRegistry = new Registry((s) => s.description ?? ""); - this.customTransformerRegistry = new CustomTransformerRegistry(); - this.allowedErrorProps = []; - this.dedupe = dedupe; - } - serialize(object) { - const identities = /* @__PURE__ */ new Map(); - const output = walker(object, identities, this, this.dedupe); - const res = { - json: output.transformedValue - }; - if (output.annotations) { - res.meta = { - ...res.meta, - values: output.annotations - }; - } - const equalityAnnotations = generateReferentialEqualityAnnotations(identities, this.dedupe); - if (equalityAnnotations) { - res.meta = { - ...res.meta, - referentialEqualities: equalityAnnotations - }; - } - return res; - } - deserialize(payload) { - const { json, meta } = payload; - let result = copy(json); - if (meta?.values) { - result = applyValueAnnotations(result, meta.values, this); - } - if (meta?.referentialEqualities) { - result = applyReferentialEqualityAnnotations(result, meta.referentialEqualities); - } - return result; - } - stringify(object) { - return JSON.stringify(this.serialize(object)); - } - parse(string) { - return this.deserialize(JSON.parse(string)); - } - registerClass(v, options) { - this.classRegistry.register(v, options); - } - registerSymbol(v, identifier) { - this.symbolRegistry.register(v, identifier); - } - registerCustom(transformer, name) { - this.customTransformerRegistry.register({ - name, - ...transformer - }); - } - allowErrorProps(...props) { - this.allowedErrorProps.push(...props); - } -}; -SuperJSON.defaultInstance = new SuperJSON(); -SuperJSON.serialize = SuperJSON.defaultInstance.serialize.bind(SuperJSON.defaultInstance); -SuperJSON.deserialize = SuperJSON.defaultInstance.deserialize.bind(SuperJSON.defaultInstance); -SuperJSON.stringify = SuperJSON.defaultInstance.stringify.bind(SuperJSON.defaultInstance); -SuperJSON.parse = SuperJSON.defaultInstance.parse.bind(SuperJSON.defaultInstance); -SuperJSON.registerClass = SuperJSON.defaultInstance.registerClass.bind(SuperJSON.defaultInstance); -SuperJSON.registerSymbol = SuperJSON.defaultInstance.registerSymbol.bind(SuperJSON.defaultInstance); -SuperJSON.registerCustom = SuperJSON.defaultInstance.registerCustom.bind(SuperJSON.defaultInstance); -SuperJSON.allowErrorProps = SuperJSON.defaultInstance.allowErrorProps.bind(SuperJSON.defaultInstance); -var serialize = SuperJSON.serialize; -var deserialize = SuperJSON.deserialize; -var stringify = SuperJSON.stringify; -var parse = SuperJSON.parse; -var registerClass = SuperJSON.registerClass; -var registerCustom = SuperJSON.registerCustom; -var registerSymbol = SuperJSON.registerSymbol; -var allowErrorProps = SuperJSON.allowErrorProps; - -// src/superjson-payload-converter.ts -var import_encoding = require("@temporalio/common/lib/encoding"); -var SuperjsonPayloadConverter = class { - // Use 'json/plain' so that Payloads are displayed in the UI - encodingType = "json/plain"; - toPayload(value) { - if (value === void 0) return void 0; - let sjson = ""; - try { - sjson = SuperJSON.stringify(value); - } catch (e) { - throw new UnsupportedSuperjsonTypeError( - `Can't run SUPERJSON.stringify on this value: ${value}. Either convert it (or its properties) to SUPERJSON-serializable values (see https://github.com/flightcontrolhq/superjson#readme ), or create a custom data converter. SJSON.stringify error message: ${errorMessage( - e - )}`, - e - ); - } - return { - metadata: { - [import_common.METADATA_ENCODING_KEY]: (0, import_encoding.encode)("json/plain"), - // Include an additional metadata field to indicate that this is an SuperJSON payload - format: (0, import_encoding.encode)("extended") - }, - data: (0, import_encoding.encode)(sjson) - }; - } - fromPayload(content) { - try { - if (!content.data) { - throw new UnsupportedSuperjsonTypeError( - `Can't run SUPERJSON.parse on this value: ${content.data}. Either convert it (or its properties) to SUPERJSON-serializable values (see https://github.com/flightcontrolhq/superjson#readme ), or create a custom data converter. No data found in payload.` - ); - } - return SuperJSON.parse((0, import_encoding.decode)(content.data)); - } catch (e) { - throw new UnsupportedSuperjsonTypeError( - `Can't run SUPERJSON.parse on this value: ${content.data}. Either convert it (or its properties) to SUPERJSON-serializable values (see https://github.com/flightcontrolhq/superjson#readme ), or create a custom data converter. SJSON.parse error message: ${errorMessage( - e - )}`, - e - ); - } - } -}; -var UnsupportedSuperjsonTypeError = class extends import_common.PayloadConverterError { - constructor(message, cause) { - super(message ?? void 0); - this.cause = cause; - } - name = "UnsupportedJsonTypeError"; -}; -function errorMessage(error) { - if (typeof error === "string") { - return error; - } - if (error instanceof Error) { - return error.message; - } - return void 0; -} - -// src/payload-converter.ts -var payloadConverter = new import_common2.CompositePayloadConverter( - new import_common2.UndefinedPayloadConverter(), - new SuperjsonPayloadConverter() -); -// Annotate the CommonJS export names for ESM import in node: -0 && (module.exports = { - payloadConverter -}); From e5a8fb4c0f5801b4c65ff182628f607adaf46fb6 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Sun, 9 Mar 2025 22:16:40 -0700 Subject: [PATCH 37/58] convert console.log to debug logs --- packages/app/features/send/confirm/screen.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/app/features/send/confirm/screen.tsx b/packages/app/features/send/confirm/screen.tsx index 2fe40dd05..7b7f0b337 100644 --- a/packages/app/features/send/confirm/screen.tsx +++ b/packages/app/features/send/confirm/screen.tsx @@ -187,9 +187,9 @@ export function SendConfirm() { maxPriorityFeePerGas: feesPerGas.maxPriorityFeePerGas, } - console.log('gasEstimate', usdcFees) - console.log('feesPerGas', feesPerGas) - console.log('userOp', _userOp) + log('gasEstimate', usdcFees) + log('feesPerGas', feesPerGas) + log('userOp', _userOp) const chainId = baseMainnetClient.chain.id const entryPoint = entryPointAddress[chainId] From de39d25c5db129945af72ee3838f4de84a3d1e15 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Sun, 9 Mar 2025 22:17:06 -0700 Subject: [PATCH 38/58] Remove token balance refetches --- packages/app/features/send/confirm/screen.tsx | 5 ----- 1 file changed, 5 deletions(-) diff --git a/packages/app/features/send/confirm/screen.tsx b/packages/app/features/send/confirm/screen.tsx index 7b7f0b337..5563c5b29 100644 --- a/packages/app/features/send/confirm/screen.tsx +++ b/packages/app/features/send/confirm/screen.tsx @@ -203,11 +203,6 @@ export function SendConfirm() { const workflowId = await transfer({ userOp }) - if (selectedCoin?.token === 'eth') { - await ethQuery.refetch() - } else { - await tokensQuery.refetch() - } if (workflowId) { router.replace({ pathname: '/', query: { token: sendToken } }) } From fb456b3b3268134c51f00c0c3190c45f248743ec Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Mon, 10 Mar 2025 08:15:22 -0700 Subject: [PATCH 39/58] Add activity RLS policy and filter temporal in activity feed --- ...021828_create_temporal_transfers_table.sql | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/supabase/migrations/20250307021828_create_temporal_transfers_table.sql b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql index 409f2dbfe..4245c32dd 100644 --- a/supabase/migrations/20250307021828_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql @@ -354,3 +354,33 @@ CREATE TRIGGER send_account_transfers_trigger_delete_temporal_activity BEFORE INSERT ON send_account_transfers FOR EACH ROW EXECUTE FUNCTION send_account_transfers_delete_temporal_activity(); +-- Add temporal filter (a.to_user_id = ( select auth.uid() ) and a.event_name not like 'temporal_%') +create or replace view activity_feed with (security_barrier = on) as +select a.created_at as created_at, + a.event_name as event_name, + case when a.from_user_id = from_p.id then (case when a.from_user_id = ( select auth.uid() ) + then ( select auth.uid() ) end, + from_p.name, + from_p.avatar_url, + from_p.send_id, + ( select array_agg(name) + from tags + where user_id = from_p.id and status = 'confirmed' ) + )::activity_feed_user end as from_user, + case when a.to_user_id = to_p.id then (case when a.to_user_id = ( select auth.uid() ) + then ( select auth.uid() ) end, + to_p.name, + to_p.avatar_url, + to_p.send_id, + ( select array_agg(name) + from tags + where user_id = to_p.id and status = 'confirmed' ) + )::activity_feed_user end as to_user, + a.data as data +from activity a + left join profiles from_p on a.from_user_id = from_p.id + left join profiles to_p on a.to_user_id = to_p.id +where a.from_user_id = ( select auth.uid() ) + or (a.to_user_id = ( select auth.uid() ) and a.event_name not like 'temporal_%') +group by a.created_at, a.event_name, a.from_user_id, a.to_user_id, from_p.id, from_p.name, from_p.avatar_url, + from_p.send_id, to_p.id, to_p.name, to_p.avatar_url, to_p.send_id, a.data; From 179ef9a13fdfa7b6260575308c690e62a7519ef6 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Mon, 10 Mar 2025 08:17:59 -0700 Subject: [PATCH 40/58] Add created_at_block_num column so delete trigger works --- packages/snaplet/.snaplet/dataModel.json | 18 ++++- .../src/transfer-workflow/activities.ts | 48 ++++++----- .../src/transfer-workflow/supabase.ts | 25 ++---- .../workflows/src/transfer-workflow/wagmi.ts | 4 + .../src/transfer-workflow/workflow.ts | 8 +- supabase/database-generated.types.ts | 17 ++-- ...021828_create_temporal_transfers_table.sql | 79 ++++++++++--------- 7 files changed, 113 insertions(+), 86 deletions(-) diff --git a/packages/snaplet/.snaplet/dataModel.json b/packages/snaplet/.snaplet/dataModel.json index d4ed1a03d..25f3559b1 100644 --- a/packages/snaplet/.snaplet/dataModel.json +++ b/packages/snaplet/.snaplet/dataModel.json @@ -6231,12 +6231,26 @@ "isId": false, "maxLength": null }, + { + "id": "temporal.send_account_transfers.created_at_block_num", + "name": "created_at_block_num", + "columnName": "created_at_block_num", + "type": "numeric", + "isRequired": true, + "kind": "scalar", + "isList": false, + "isGenerated": false, + "sequence": false, + "hasDefaultValue": false, + "isId": false, + "maxLength": null + }, { "id": "temporal.send_account_transfers.created_at", "name": "created_at", "columnName": "created_at", "type": "timestamptz", - "isRequired": false, + "isRequired": true, "kind": "scalar", "isList": false, "isGenerated": false, @@ -6250,7 +6264,7 @@ "name": "updated_at", "columnName": "updated_at", "type": "timestamptz", - "isRequired": false, + "isRequired": true, "kind": "scalar", "isList": false, "isGenerated": false, diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index 1414e55d1..7f8588d3f 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -1,17 +1,22 @@ import { log, ApplicationFailure } from '@temporalio/activity' import { insertTemporalTokenSendAccountTransfer, - updateTemporalSendAccountTransfer, insertTemporalEthSendAccountTransfer, - deleteTemporalTransfer, + updateTemporalSendAccountTransfer, } from './supabase' -import { simulateUserOperation, sendUserOperation, waitForTransactionReceipt } from './wagmi' +import { + simulateUserOperation, + sendUserOperation, + waitForTransactionReceipt, + getBlockNumber, +} from './wagmi' import type { UserOperation } from 'permissionless' import { bootstrap } from '@my/workflows/utils' import { decodeTransferUserOp } from 'app/utils/decodeTransferUserOp' import { hexToBytea } from 'app/utils/hexToBytea' import type { Json, Database, PgBytea } from '@my/supabase/database.types' import superjson from 'superjson' +import { byteaToHex } from 'app/utils/byteaToHex' type TransferActivities = { initializeTransferActivity: (userOp: UserOperation<'v0.7'>) => Promise<{ @@ -19,21 +24,20 @@ type TransferActivities = { to: PgBytea amount: bigint token: PgBytea | null + blockNumber: bigint }> insertTemporalSendAccountTransfer: ( workflowId: string, from: PgBytea, to: PgBytea, amount: bigint, - token: PgBytea | null + token: PgBytea | null, + blockNumber: bigint ) => Promise - sendUserOpActivity: ( - workflowId: string, - userOp: UserOperation<'v0.7'> - ) => Promise<{ hash: `0x${string}`; hashBytea: PgBytea }> + sendUserOpActivity: (workflowId: string, userOp: UserOperation<'v0.7'>) => Promise waitForTransactionReceiptActivity: ( workflowId: string, - hash: `0x${string}` + hash: PgBytea ) => Promise<{ transactionHash: `0x${string}` blockNumber: bigint @@ -41,7 +45,7 @@ type TransferActivities = { updateTemporalTransferActivity: (params: { workflowId: string status: Database['temporal']['Enums']['transfer_status'] - data?: Json + data: Json failureError?: { message?: string | null type?: string | null @@ -84,13 +88,16 @@ export const createTransferActivities = ( throw ApplicationFailure.nonRetryable('Invalid hex address format') } - return { from: fromBytea, to: toBytea, amount, token: tokenBytea } + const blockNumber = await getBlockNumber() + + return { from: fromBytea, to: toBytea, amount, token: tokenBytea, blockNumber } }, - async insertTemporalSendAccountTransfer(workflowId, from, to, amount, token) { + async insertTemporalSendAccountTransfer(workflowId, from, to, amount, token, blockNumber) { const { error } = token ? await insertTemporalTokenSendAccountTransfer({ workflow_id: workflowId, status: 'initialized', + block_num: blockNumber, f: from, t: to, v: amount, @@ -99,6 +106,7 @@ export const createTransferActivities = ( : await insertTemporalEthSendAccountTransfer({ workflow_id: workflowId, status: 'initialized', + block_num: blockNumber, sender: from, value: amount, log_addr: to, @@ -125,16 +133,19 @@ export const createTransferActivities = ( try { const hash = await sendUserOperation(userOp) const hashBytea = hexToBytea(hash) - return { hash, hashBytea } + return hashBytea } catch (error) { log.error('sendUserOpActivity failed', { error }) - const { error: deleteError } = await deleteTemporalTransfer(workflowId) - if (deleteError) { + const { error: updateError } = await updateTemporalSendAccountTransfer({ + workflow_id: workflowId, + status: 'failed', + }) + if (updateError) { throw ApplicationFailure.retryable( 'Error deleting transfer from temporal.send_account_transfers', - deleteError.code, + updateError.code, { - deleteError, + error: updateError, workflowId, } ) @@ -144,8 +155,9 @@ export const createTransferActivities = ( } }, async waitForTransactionReceiptActivity(workflowId, hash) { + const hexHash = byteaToHex(hash) try { - const bundlerReceipt = await waitForTransactionReceipt(hash) + const bundlerReceipt = await waitForTransactionReceipt(hexHash) if (!bundlerReceipt) { throw ApplicationFailure.retryable('No receipt returned from waitForTransactionReceipt') } diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 731a5a144..09902fc3a 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -4,6 +4,7 @@ import { supabaseAdmin } from 'app/utils/supabase/admin' export async function insertTemporalTokenSendAccountTransfer({ workflow_id, status, + block_num, f, t, v, @@ -11,6 +12,7 @@ export async function insertTemporalTokenSendAccountTransfer({ }: { workflow_id: string status: Database['temporal']['Enums']['transfer_status'] + block_num: bigint f: PgBytea t: PgBytea v: bigint @@ -19,6 +21,7 @@ export async function insertTemporalTokenSendAccountTransfer({ return await supabaseAdmin.schema('temporal').rpc('insert_temporal_token_send_account_transfer', { workflow_id, status, + block_num: block_num.toString(), f, t, v: v.toString(), @@ -29,12 +32,14 @@ export async function insertTemporalTokenSendAccountTransfer({ export async function insertTemporalEthSendAccountTransfer({ workflow_id, status, + block_num, sender, log_addr, value, }: { workflow_id: string status: Database['temporal']['Enums']['transfer_status'] + block_num: bigint sender: PgBytea log_addr: PgBytea value: bigint @@ -42,6 +47,7 @@ export async function insertTemporalEthSendAccountTransfer({ return await supabaseAdmin.schema('temporal').rpc('insert_temporal_eth_send_account_transfer', { workflow_id, status, + block_num: block_num.toString(), sender, log_addr, value: value.toString(), @@ -63,22 +69,3 @@ export async function updateTemporalSendAccountTransfer({ data, }) } - -export async function deleteTemporalTransfer(workflow_id: string) { - return await supabaseAdmin - .schema('temporal') - .from('send_account_transfers') - .delete() - .eq('workflow_id', workflow_id) - .select('workflow_id') - .single() -} - -export async function deleteTemporalTransferFromActivityTable(workflow_id: string) { - return await supabaseAdmin - .from('activity') - .delete() - .eq('event_id', workflow_id) - .select('event_id') - .single() -} diff --git a/packages/workflows/src/transfer-workflow/wagmi.ts b/packages/workflows/src/transfer-workflow/wagmi.ts index 1ba62a75e..bd82c74d0 100644 --- a/packages/workflows/src/transfer-workflow/wagmi.ts +++ b/packages/workflows/src/transfer-workflow/wagmi.ts @@ -20,3 +20,7 @@ export async function waitForTransactionReceipt( ): Promise { return await baseMainnetBundlerClient.waitForUserOperationReceipt({ hash }) } + +export async function getBlockNumber() { + return await baseMainnetClient.getBlockNumber() +} diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index d0bea39de..0976fe482 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -21,18 +21,18 @@ const { export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { const workflowId = workflowInfo().workflowId log('SendTransferWorkflow Initializing with userOp:', workflowId) - const { token, from, to, amount } = await initializeTransferActivity(userOp) + const { token, from, to, amount, blockNumber } = await initializeTransferActivity(userOp) log('Inserting temporal transfer into temporal.send_account_transfers', workflowId) - await insertTemporalSendAccountTransfer(workflowId, from, to, amount, token) + await insertTemporalSendAccountTransfer(workflowId, from, to, amount, token, blockNumber) log('Sending UserOperation', superjson.stringify(userOp)) - const { hash, hashBytea } = await sendUserOpActivity(workflowId, userOp) + const hash = await sendUserOpActivity(workflowId, userOp) log('UserOperation sent, hash:', hash) await updateTemporalTransferActivity({ workflowId, status: 'sent', - data: { user_op_hash: hashBytea }, + data: { user_op_hash: hash }, }) const receipt = await waitForTransactionReceiptActivity(workflowId, hash) diff --git a/supabase/database-generated.types.ts b/supabase/database-generated.types.ts index 0951f90e1..dd409cf8e 100644 --- a/supabase/database-generated.types.ts +++ b/supabase/database-generated.types.ts @@ -1466,29 +1466,32 @@ export type Database = { Tables: { send_account_transfers: { Row: { - created_at: string | null + created_at: string + created_at_block_num: number data: Json id: number status: Database["temporal"]["Enums"]["transfer_status"] - updated_at: string | null + updated_at: string user_id: string workflow_id: string } Insert: { - created_at?: string | null + created_at?: string + created_at_block_num: number data: Json id?: number status: Database["temporal"]["Enums"]["transfer_status"] - updated_at?: string | null + updated_at?: string user_id: string workflow_id: string } Update: { - created_at?: string | null + created_at?: string + created_at_block_num?: number data?: Json id?: number status?: Database["temporal"]["Enums"]["transfer_status"] - updated_at?: string | null + updated_at?: string user_id?: string workflow_id?: string } @@ -1503,6 +1506,7 @@ export type Database = { Args: { workflow_id: string status: Database["temporal"]["Enums"]["transfer_status"] + block_num: string sender: string log_addr: string value: string @@ -1513,6 +1517,7 @@ export type Database = { Args: { workflow_id: string status: Database["temporal"]["Enums"]["transfer_status"] + block_num: string f: string t: string v: string diff --git a/supabase/migrations/20250307021828_create_temporal_transfers_table.sql b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql index 4245c32dd..453a3dc0d 100644 --- a/supabase/migrations/20250307021828_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql @@ -22,12 +22,13 @@ CREATE TYPE temporal.transfer_status AS ENUM( CREATE TABLE temporal.send_account_transfers( id serial primary key, - workflow_id text NOT NULL, - user_id uuid NOT NULL, - status temporal.transfer_status NOT NULL, - data jsonb NOT NULL, - created_at timestamptz DEFAULT (NOW() AT TIME ZONE 'UTC'), - updated_at timestamptz DEFAULT (NOW() AT TIME ZONE 'UTC') + workflow_id text not null, + user_id uuid not null, + status temporal.transfer_status not null, + data jsonb not null, + created_at_block_num numeric not null, + created_at timestamp with time zone not null default (now() AT TIME ZONE 'utc'::text), + updated_at timestamp with time zone not null default (now() AT TIME ZONE 'utc'::text) ); GRANT ALL ON TABLE temporal.send_account_transfers TO service_role; @@ -39,9 +40,16 @@ create policy "users can see their own temporal transfers" on "temporal"."send_account_transfers" as permissive for select to authenticated using ( - user_id = auth.uid() + auth.uid() = user_id ); +create policy "users can only see temporal transfers they initiated" +on "public"."activity" as permissive +for select to authenticated +using ( + (event_name != 'temporal_send_account_transfers') OR + (from_user_id = auth.uid()) +); CREATE INDEX temporal_send_account_transfers_user_id_idx ON temporal.send_account_transfers(user_id); CREATE INDEX temporal_send_account_transfers_created_at_idx ON temporal.send_account_transfers(created_at); @@ -50,6 +58,7 @@ CREATE UNIQUE INDEX temporal_send_account_transfers_workflow_id_idx ON temporal. CREATE OR REPLACE FUNCTION temporal.insert_temporal_token_send_account_transfer( workflow_id text, status temporal.transfer_status, + block_num text, f bytea, t bytea, v text, @@ -79,12 +88,14 @@ BEGIN workflow_id, user_id, status, + created_at_block_num, data ) VALUES ( workflow_id, _user_id, status, + block_num::numeric, _data ); END; @@ -93,6 +104,7 @@ $$; CREATE OR REPLACE FUNCTION temporal.insert_temporal_eth_send_account_transfer( workflow_id text, status temporal.transfer_status, + block_num text, sender bytea, log_addr bytea, value text @@ -120,12 +132,14 @@ BEGIN workflow_id, user_id, status, + created_at_block_num, data ) VALUES ( workflow_id, _user_id, status, + block_num::numeric, _data ); END; @@ -146,7 +160,7 @@ BEGIN -- Only construct _data if input data is not null IF data IS NOT NULL THEN _data := json_build_object( - 'user_op_hash', (data->>'user_op_hash'), + 'user_op_hash', (data->'user_op_hash'), 'tx_hash', (data->>'tx_hash'), 'block_num', data->>'block_num'::text ); @@ -189,7 +203,7 @@ BEGIN -- cast v to text to avoid losing precision when converting to json when sending to clients _data := json_build_object( 'status', NEW.status::text, - 'user_op_hash', (NEW.data->>'user_op_hash'), + 'user_op_hash', (NEW.data->'user_op_hash'), 'log_addr', (NEW.data->>'log_addr'), 'f', (NEW.data->>'f'), 't', (NEW.data->>'t'), @@ -203,16 +217,14 @@ BEGIN event_id, from_user_id, to_user_id, - data, - created_at + data ) VALUES ( 'temporal_send_account_transfers', NEW.workflow_id, _f_user_id, _t_user_id, - _data, - NEW.created_at + _data ); RETURN NEW; END; @@ -237,10 +249,10 @@ BEGIN FROM send_accounts WHERE address = concat('0x', encode((NEW.data->>'log_addr')::bytea, 'hex'))::citext; - -- cast v to text to avoid losing precision when converting to json when sending to clients + -- cast v to text to avoid losing precision when converting to json when sending to clients _data := json_build_object( 'status', NEW.status::text, - 'user_op_hash', (NEW.data->>'user_op_hash'), + 'user_op_hash', (NEW.data->'user_op_hash'), 'log_addr', (NEW.data->>'log_addr'), 'sender', (NEW.data->>'sender'), 'value', NEW.data->>'value'::text, @@ -253,16 +265,14 @@ BEGIN event_id, from_user_id, to_user_id, - data, - created_at + data ) VALUES ( 'temporal_send_account_transfers', NEW.workflow_id, _from_user_id, _to_user_id, - _data, - NEW.created_at + _data ); RETURN NEW; END; @@ -289,18 +299,12 @@ CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_upda DECLARE _data jsonb; BEGIN - IF EXISTS ( - SELECT 1 FROM activity - WHERE event_name = 'temporal_send_account_transfers' - AND event_id = NEW.workflow_id - ) THEN - _data := NEW.data || json_build_object('status', NEW.status::text)::jsonb; + _data := NEW.data || json_build_object('status', NEW.status::text)::jsonb; - UPDATE activity - SET data = _data - WHERE event_name = 'temporal_send_account_transfers' - AND event_id = NEW.workflow_id; - END IF; + UPDATE activity + SET data = _data + WHERE event_name = 'temporal_send_account_transfers' + AND event_id = NEW.workflow_id; RETURN NEW; END; @@ -338,22 +342,23 @@ create or replace function send_account_transfers_delete_temporal_activity() ret language plpgsql security definer as $$ - begin - delete from activity a - where event_name = 'temporal_send_account_transfers' and event_id in ( - select sat.workflow_id from temporal.send_account_transfers sat - where extract(epoch from sat.created_at)::numeric < NEW.block_time - and sat.status != 'failed' + delete from public.activity a + where a.event_name = 'temporal_send_account_transfers' and a.event_id in ( + select t_sat.workflow_id + from temporal.send_account_transfers t_sat + where t_sat.created_at_block_num <= NEW.block_num + and t_sat.status != 'failed' ); return NEW; end; $$; CREATE TRIGGER send_account_transfers_trigger_delete_temporal_activity - BEFORE INSERT ON send_account_transfers + BEFORE INSERT ON public.send_account_transfers FOR EACH ROW EXECUTE FUNCTION send_account_transfers_delete_temporal_activity(); + -- Add temporal filter (a.to_user_id = ( select auth.uid() ) and a.event_name not like 'temporal_%') create or replace view activity_feed with (security_barrier = on) as select a.created_at as created_at, From 0ec356f787495ec9da6ad9395b284aa8c0933f46 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Mon, 10 Mar 2025 08:18:21 -0700 Subject: [PATCH 41/58] Fix temporal_transfers_test --- supabase/tests/temporal_transfers_test.sql | 2 ++ 1 file changed, 2 insertions(+) diff --git a/supabase/tests/temporal_transfers_test.sql b/supabase/tests/temporal_transfers_test.sql index 3b3d6bc1b..fa34a8018 100644 --- a/supabase/tests/temporal_transfers_test.sql +++ b/supabase/tests/temporal_transfers_test.sql @@ -27,6 +27,7 @@ VALUES ( SELECT temporal.insert_temporal_token_send_account_transfer( 'test-workflow-1'::text, 'initialized'::temporal.transfer_status, + '123', '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, '\xB0B7D5E8A4B6D534B3F608E9D27871F85A4E98DA'::bytea, '100'::text, @@ -62,6 +63,7 @@ SELECT results_eq( SELECT temporal.insert_temporal_eth_send_account_transfer( 'test-workflow-2'::text, 'initialized'::temporal.transfer_status, + '123', '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, '\xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266'::bytea, '1000000000000000000'::text From 43bac8b8a5370dde121772bf33014df963e4bda2 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Mon, 10 Mar 2025 08:18:50 -0700 Subject: [PATCH 42/58] change workflow_id convention --- packages/api/src/routers/temporal.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/api/src/routers/temporal.ts b/packages/api/src/routers/temporal.ts index 4d17f34ab..4b5f5e280 100644 --- a/packages/api/src/routers/temporal.ts +++ b/packages/api/src/routers/temporal.ts @@ -35,7 +35,7 @@ export const temporalRouter = createTRPCRouter({ }) const { workflowId } = await client.workflow.start(TransferWorkflow, { taskQueue: 'monorepo', - workflowId: `transfer-workflow-${user.id}-${userOpHash}`, + workflowId: `temporal/transfer/${user.id}/${userOpHash}`, args: [userOp], }) log(`Workflow Created: ${workflowId}`) From e02b9a7aa6dca7b859a173a5a5ece1a1a59a54db Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Mon, 10 Mar 2025 08:19:22 -0700 Subject: [PATCH 43/58] Remove temporal query filters in activity feed hook --- packages/app/features/home/utils/useTokenActivityFeed.ts | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/packages/app/features/home/utils/useTokenActivityFeed.ts b/packages/app/features/home/utils/useTokenActivityFeed.ts index c3b092b00..bde3268d4 100644 --- a/packages/app/features/home/utils/useTokenActivityFeed.ts +++ b/packages/app/features/home/utils/useTokenActivityFeed.ts @@ -30,9 +30,6 @@ export function useTokenActivityFeed(params: { }): UseInfiniteQueryResult, PostgrestError | ZodError> { const { pageSize = 10, address, refetchInterval = 30_000, enabled = true } = params const supabase = useSupabase() - const { data: sendAccount } = useSendAccount() - const senderBytea = sendAccount?.address ? hexToBytea(sendAccount.address) : null - async function fetchTokenActivityFeed({ pageParam }: { pageParam: number }): Promise { const from = pageParam * pageSize const to = (pageParam + 1) * pageSize - 1 @@ -42,13 +39,14 @@ export function useTokenActivityFeed(params: { query = query .eq('data->>log_addr', address) .or( - `event_name.eq.${Events.SendAccountTransfers},and(event_name.eq.${Events.TemporalSendAccountTransfers},data->>f.eq.${senderBytea})` + `event_name.eq.${Events.SendAccountTransfers},event_name.eq.${Events.TemporalSendAccountTransfers}` ) } else { query = query.or( - `event_name.eq.${Events.SendAccountReceive},and(event_name.eq.${Events.TemporalSendAccountTransfers},data->>sender.eq.${senderBytea})` + `event_name.eq.${Events.SendAccountReceive},event_name.eq.${Events.TemporalSendAccountTransfers}` ) } + 9 const paymasterAddresses = Object.values(tokenPaymasterAddress) const sendTokenV0LockboxAddresses = Object.values(sendTokenV0LockboxAddress) From 502dde2b02067d5501e35013e4ddfa06932eb1e2 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Mon, 10 Mar 2025 10:36:35 -0700 Subject: [PATCH 44/58] Move refetchInterval logic to hook --- packages/app/features/home/TokenActivity.tsx | 16 +--------------- .../features/home/utils/useTokenActivityFeed.ts | 17 +++++++++++++---- 2 files changed, 14 insertions(+), 19 deletions(-) diff --git a/packages/app/features/home/TokenActivity.tsx b/packages/app/features/home/TokenActivity.tsx index 0ad02c794..fe26cdc72 100644 --- a/packages/app/features/home/TokenActivity.tsx +++ b/packages/app/features/home/TokenActivity.tsx @@ -4,7 +4,7 @@ import { hexToBytea } from 'app/utils/hexToBytea' import { useEffect, useState } from 'react' import { useTokenActivityFeed } from './utils/useTokenActivityFeed' import { TokenActivityRow } from './TokenActivityRow' -import { Events, type Activity } from 'app/utils/zod/activity' +import type { Activity } from 'app/utils/zod/activity' import { ActivityDetails } from '../activity/ActivityDetails' import type { InfiniteData, UseInfiniteQueryResult } from '@tanstack/react-query' import type { ZodError } from 'zod' @@ -16,7 +16,6 @@ import { useScrollDirection } from 'app/provider/scroll' export const TokenActivity = ({ coin }: { coin: CoinWithBalance }) => { const [selectedActivity, setSelectedActivity] = useState(null) - const [refetchInterval, setRefetchInterval] = useState(30_000) const handleActivityPress = (activity: Activity) => { setSelectedActivity(activity) @@ -27,25 +26,12 @@ export const TokenActivity = ({ coin }: { coin: CoinWithBalance }) => { const tokenActivityFeedQuery = useTokenActivityFeed({ pageSize: 10, address: coin.token === 'eth' ? undefined : hexToBytea(coin.token), - refetchInterval, }) const { data, isLoading, error } = tokenActivityFeedQuery const { pages } = data ?? {} - useEffect(() => { - if (!pages || !pages[0]) return - - pages[0].find( - (a) => - a.event_name === Events.TemporalSendAccountTransfers && - !['cancelled', 'failed'].includes(a.data.status) - ) - ? setRefetchInterval(1000) - : setRefetchInterval(30_000) - }, [pages]) - if (isLoading) return return ( <> diff --git a/packages/app/features/home/utils/useTokenActivityFeed.ts b/packages/app/features/home/utils/useTokenActivityFeed.ts index bde3268d4..955c42ed3 100644 --- a/packages/app/features/home/utils/useTokenActivityFeed.ts +++ b/packages/app/features/home/utils/useTokenActivityFeed.ts @@ -6,15 +6,15 @@ import { type InfiniteData, type UseInfiniteQueryResult, } from '@tanstack/react-query' -import { hexToBytea } from 'app/utils/hexToBytea' import { pgAddrCondValues } from 'app/utils/pgAddrCondValues' -import { useSendAccount } from 'app/utils/send-accounts' import { squish } from 'app/utils/strings' import { useSupabase } from 'app/utils/supabase/useSupabase' import { throwIf } from 'app/utils/throwIf' import { EventArraySchema, Events, type Activity } from 'app/utils/zod/activity' import type { ZodError } from 'zod' +const PENDING_TRANSFERS_INTERVAL = 1_000 + /** * Infinite query to fetch ERC-20 token activity feed. * @@ -46,7 +46,6 @@ export function useTokenActivityFeed(params: { `event_name.eq.${Events.SendAccountReceive},event_name.eq.${Events.TemporalSendAccountTransfers}` ) } - 9 const paymasterAddresses = Object.values(tokenPaymasterAddress) const sendTokenV0LockboxAddresses = Object.values(sendTokenV0LockboxAddress) @@ -89,7 +88,17 @@ export function useTokenActivityFeed(params: { return firstPageParam - 1 }, queryFn: fetchTokenActivityFeed, - refetchInterval, + refetchInterval: ({ state: { data } }) => { + const { pages } = data ?? {} + if (!pages || !pages[0]) return refetchInterval + const activities = pages.flat() + const hasPendingTransfer = activities.some( + (a) => + a.event_name === Events.TemporalSendAccountTransfers && + !['cancelled', 'failed'].includes(a.data.status) + ) + return hasPendingTransfer ? PENDING_TRANSFERS_INTERVAL : refetchInterval + }, enabled, }) } From e41c3ea48885ca618b3688f84ea46e53202d0134 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Mon, 10 Mar 2025 10:41:50 -0700 Subject: [PATCH 45/58] Break up init activity to improve retryability --- .../src/transfer-workflow/activities.ts | 55 ++++++++++++------- .../workflows/src/transfer-workflow/wagmi.ts | 2 +- .../src/transfer-workflow/workflow.ts | 24 ++++++-- 3 files changed, 54 insertions(+), 27 deletions(-) diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index 7f8588d3f..40b411071 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -8,7 +8,7 @@ import { simulateUserOperation, sendUserOperation, waitForTransactionReceipt, - getBlockNumber, + getBaseBlockNumber, } from './wagmi' import type { UserOperation } from 'permissionless' import { bootstrap } from '@my/workflows/utils' @@ -19,14 +19,15 @@ import superjson from 'superjson' import { byteaToHex } from 'app/utils/byteaToHex' type TransferActivities = { - initializeTransferActivity: (userOp: UserOperation<'v0.7'>) => Promise<{ + simulateTransferActivity: (userOp: UserOperation<'v0.7'>) => Promise + getBaseBlockNumberActivity: () => Promise + decodeTransferUserOpActivity: (userOp: UserOperation<'v0.7'>) => Promise<{ from: PgBytea to: PgBytea amount: bigint token: PgBytea | null - blockNumber: bigint }> - insertTemporalSendAccountTransfer: ( + insertTemporalSendAccountTransferActivity: ( workflowId: string, from: PgBytea, to: PgBytea, @@ -60,39 +61,51 @@ export const createTransferActivities = ( bootstrap(env) return { - async initializeTransferActivity(userOp) { + async simulateTransferActivity(userOp) { + await simulateUserOperation(userOp).catch((error) => { + throw ApplicationFailure.nonRetryable('Error simulating user operation', error.code, error) + }) + }, + async getBaseBlockNumberActivity() { + try { + return await getBaseBlockNumber() + } catch (error) { + log.error('Failed to get block number', { code: error.code, error }) + throw ApplicationFailure.retryable('Failed to get block number') + } + }, + async decodeTransferUserOpActivity(userOp) { const { from, to, token, amount } = decodeTransferUserOp({ userOp }) if (!from || !to || !amount || !token) { + log.error('User Operation is not a valid transfer', { from, to, amount, token }) throw ApplicationFailure.nonRetryable('User Operation is not a valid transfer') } if (amount <= 0n) { + log.error('User Operation has amount <= 0', { amount }) throw ApplicationFailure.nonRetryable('User Operation has amount <= 0') } if (!userOp.signature) { + log.error('UserOp signature is required') throw ApplicationFailure.nonRetryable('UserOp signature is required') } - await simulateUserOperation(userOp).catch((error) => { - throw ApplicationFailure.nonRetryable('Error simulating user operation', error.code, error) - }) - - let fromBytea: PgBytea - let toBytea: PgBytea - let tokenBytea: PgBytea | null - try { - fromBytea = hexToBytea(from) - toBytea = hexToBytea(to) - tokenBytea = token === 'eth' ? null : hexToBytea(token) + const fromBytea = hexToBytea(from) + const toBytea = hexToBytea(to) + const tokenBytea = token === 'eth' ? null : hexToBytea(token) + return { from: fromBytea, to: toBytea, amount, token: tokenBytea } } catch (error) { throw ApplicationFailure.nonRetryable('Invalid hex address format') } - - const blockNumber = await getBlockNumber() - - return { from: fromBytea, to: toBytea, amount, token: tokenBytea, blockNumber } }, - async insertTemporalSendAccountTransfer(workflowId, from, to, amount, token, blockNumber) { + async insertTemporalSendAccountTransferActivity( + workflowId, + from, + to, + amount, + token, + blockNumber + ) { const { error } = token ? await insertTemporalTokenSendAccountTransfer({ workflow_id: workflowId, diff --git a/packages/workflows/src/transfer-workflow/wagmi.ts b/packages/workflows/src/transfer-workflow/wagmi.ts index bd82c74d0..eed7ca4da 100644 --- a/packages/workflows/src/transfer-workflow/wagmi.ts +++ b/packages/workflows/src/transfer-workflow/wagmi.ts @@ -21,6 +21,6 @@ export async function waitForTransactionReceipt( return await baseMainnetBundlerClient.waitForUserOperationReceipt({ hash }) } -export async function getBlockNumber() { +export async function getBaseBlockNumber() { return await baseMainnetClient.getBlockNumber() } diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index 0976fe482..2633b9d7a 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -8,8 +8,10 @@ import { hexToBytea } from 'app/utils/hexToBytea' const log = debug('workflows:transfer') const { - initializeTransferActivity, - insertTemporalSendAccountTransfer, + simulateTransferActivity, + getBaseBlockNumberActivity, + decodeTransferUserOpActivity, + insertTemporalSendAccountTransferActivity, sendUserOpActivity, updateTemporalTransferActivity, waitForTransactionReceiptActivity, @@ -20,11 +22,23 @@ const { export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { const workflowId = workflowInfo().workflowId - log('SendTransferWorkflow Initializing with userOp:', workflowId) - const { token, from, to, amount, blockNumber } = await initializeTransferActivity(userOp) + log('Starting SendTransfer Workflow with userOp:', workflowId) + + log('Simulating transfer', workflowId) + await simulateTransferActivity(userOp) + log('Successfully simulated transfer', workflowId) + + log('Getting latest base block', workflowId) + const blockNumber = await getBaseBlockNumberActivity() + log('Base block:', { workflowId, blockNumber: blockNumber.toString() }) + + log('Decoding transfer userOp', workflowId) + const { token, from, to, amount } = await decodeTransferUserOpActivity(userOp) + log('Decoded transfer userOp', { workflowId, token, from, to, amount: amount.toString() }) log('Inserting temporal transfer into temporal.send_account_transfers', workflowId) - await insertTemporalSendAccountTransfer(workflowId, from, to, amount, token, blockNumber) + await insertTemporalSendAccountTransferActivity(workflowId, from, to, amount, token, blockNumber) + log('Inserted temporal transfer into temporal.send_account_transfers', workflowId) log('Sending UserOperation', superjson.stringify(userOp)) const hash = await sendUserOpActivity(workflowId, userOp) From d623de1d591e6954151683b69c362e0ae2a9ba86 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Mon, 10 Mar 2025 11:17:33 -0700 Subject: [PATCH 46/58] Add max refetch count constraint --- .../features/home/utils/useTokenActivityFeed.ts | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/app/features/home/utils/useTokenActivityFeed.ts b/packages/app/features/home/utils/useTokenActivityFeed.ts index 955c42ed3..a905302da 100644 --- a/packages/app/features/home/utils/useTokenActivityFeed.ts +++ b/packages/app/features/home/utils/useTokenActivityFeed.ts @@ -11,9 +11,11 @@ import { squish } from 'app/utils/strings' import { useSupabase } from 'app/utils/supabase/useSupabase' import { throwIf } from 'app/utils/throwIf' import { EventArraySchema, Events, type Activity } from 'app/utils/zod/activity' +import { useRef } from 'react' import type { ZodError } from 'zod' const PENDING_TRANSFERS_INTERVAL = 1_000 +const MAX_REFETCHES = 10 // 10 seconds /** * Infinite query to fetch ERC-20 token activity feed. @@ -30,6 +32,7 @@ export function useTokenActivityFeed(params: { }): UseInfiniteQueryResult, PostgrestError | ZodError> { const { pageSize = 10, address, refetchInterval = 30_000, enabled = true } = params const supabase = useSupabase() + const refetchCount = useRef(0) async function fetchTokenActivityFeed({ pageParam }: { pageParam: number }): Promise { const from = pageParam * pageSize const to = (pageParam + 1) * pageSize - 1 @@ -97,7 +100,18 @@ export function useTokenActivityFeed(params: { a.event_name === Events.TemporalSendAccountTransfers && !['cancelled', 'failed'].includes(a.data.status) ) - return hasPendingTransfer ? PENDING_TRANSFERS_INTERVAL : refetchInterval + + if (hasPendingTransfer) { + if (refetchCount.current >= MAX_REFETCHES) { + return refetchInterval // Return to normal interval after max refetches + } + refetchCount.current += 1 + return PENDING_TRANSFERS_INTERVAL + } + + // Reset refetch count when there are no pending transfers + refetchCount.current = 0 + return refetchInterval }, enabled, }) From fa576677a0e7debcadc1481c1075ca24e01e63a5 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Mar 2025 07:07:25 -0700 Subject: [PATCH 47/58] Use postgrest. remove insert and update functions better db error handling --- packages/snaplet/.snaplet/dataModel.json | 18 +- .../src/transfer-workflow/activities.ts | 107 +++++++----- .../src/transfer-workflow/supabase.ts | 80 ++++++--- .../src/transfer-workflow/workflow.ts | 17 +- supabase/database-generated.types.ts | 34 +--- ...021828_create_temporal_transfers_table.sql | 154 ++++-------------- supabase/tests/temporal_transfers_test.sql | 70 ++++---- 7 files changed, 216 insertions(+), 264 deletions(-) diff --git a/packages/snaplet/.snaplet/dataModel.json b/packages/snaplet/.snaplet/dataModel.json index 25f3559b1..6755c1d76 100644 --- a/packages/snaplet/.snaplet/dataModel.json +++ b/packages/snaplet/.snaplet/dataModel.json @@ -6199,7 +6199,7 @@ "isList": false, "isGenerated": false, "sequence": false, - "hasDefaultValue": false, + "hasDefaultValue": true, "isId": false, "maxLength": null }, @@ -6218,10 +6218,10 @@ "maxLength": null }, { - "id": "temporal.send_account_transfers.data", - "name": "data", - "columnName": "data", - "type": "jsonb", + "id": "temporal.send_account_transfers.created_at_block_num", + "name": "created_at_block_num", + "columnName": "created_at_block_num", + "type": "int8", "isRequired": true, "kind": "scalar", "isList": false, @@ -6232,10 +6232,10 @@ "maxLength": null }, { - "id": "temporal.send_account_transfers.created_at_block_num", - "name": "created_at_block_num", - "columnName": "created_at_block_num", - "type": "numeric", + "id": "temporal.send_account_transfers.data", + "name": "data", + "columnName": "data", + "type": "jsonb", "isRequired": true, "kind": "scalar", "isList": false, diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index 40b411071..8c2783fe4 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -3,6 +3,7 @@ import { insertTemporalTokenSendAccountTransfer, insertTemporalEthSendAccountTransfer, updateTemporalSendAccountTransfer, + isRetryableDBError, } from './supabase' import { simulateUserOperation, @@ -17,6 +18,9 @@ import { hexToBytea } from 'app/utils/hexToBytea' import type { Json, Database, PgBytea } from '@my/supabase/database.types' import superjson from 'superjson' import { byteaToHex } from 'app/utils/byteaToHex' +import { allCoins } from 'app/data/coins' + +type TemporalTransfer = Database['temporal']['Tables']['send_account_transfers']['Row'] type TransferActivities = { simulateTransferActivity: (userOp: UserOperation<'v0.7'>) => Promise @@ -34,7 +38,7 @@ type TransferActivities = { amount: bigint, token: PgBytea | null, blockNumber: bigint - ) => Promise + ) => Promise sendUserOpActivity: (workflowId: string, userOp: UserOperation<'v0.7'>) => Promise waitForTransactionReceiptActivity: ( workflowId: string, @@ -52,7 +56,7 @@ type TransferActivities = { type?: string | null details?: unknown[] } - }) => Promise + }) => Promise } export const createTransferActivities = ( @@ -75,27 +79,47 @@ export const createTransferActivities = ( } }, async decodeTransferUserOpActivity(userOp) { - const { from, to, token, amount } = decodeTransferUserOp({ userOp }) - if (!from || !to || !amount || !token) { - log.error('User Operation is not a valid transfer', { from, to, amount, token }) - throw ApplicationFailure.nonRetryable('User Operation is not a valid transfer') - } - if (amount <= 0n) { - log.error('User Operation has amount <= 0', { amount }) - throw ApplicationFailure.nonRetryable('User Operation has amount <= 0') - } - if (!userOp.signature) { - log.error('UserOp signature is required') - throw ApplicationFailure.nonRetryable('UserOp signature is required') - } - try { + const { from, to, token, amount } = decodeTransferUserOp({ userOp }) + if (!from || !to || !amount || !token) { + log.error('Failed to decode transfer user op', { from, to, amount, token }) + throw ApplicationFailure.nonRetryable('User Operation is not a valid transfer') + } + if (!allCoins.find((c) => c.token === token)) { + log.error('Token ${token} is not a supported', { token }) + throw ApplicationFailure.nonRetryable('Token ${token} is not a supported') + } + if (amount <= 0n) { + log.error('User Operation has amount <= 0', { amount }) + throw ApplicationFailure.nonRetryable('User Operation has amount <= 0') + } + if (!userOp.signature) { + log.error('UserOp signature is required') + throw ApplicationFailure.nonRetryable('UserOp signature is required') + } + const fromBytea = hexToBytea(from) const toBytea = hexToBytea(to) const tokenBytea = token === 'eth' ? null : hexToBytea(token) return { from: fromBytea, to: toBytea, amount, token: tokenBytea } } catch (error) { - throw ApplicationFailure.nonRetryable('Invalid hex address format') + // Handle viem decode errors + if ( + error.name === 'AbiFunctionSignatureNotFoundError' || + error.name === 'DecodeAbiParametersError' || + error.name === 'FormatAbiItemError' || + error.name === 'ToFunctionSelectorError' || + error.name === 'SliceError' + ) { + log.error('Failed to decode function data', { error }) + throw ApplicationFailure.nonRetryable('Invalid transfer function data', error.name, error) + } + // Handle hex conversion errors + if (error.message === 'Hex string must start with 0x') { + log.error('Invalid hex address format', { error }) + throw ApplicationFailure.nonRetryable('Invalid hex address format') + } + throw error } }, async insertTemporalSendAccountTransferActivity( @@ -106,7 +130,7 @@ export const createTransferActivities = ( token, blockNumber ) { - const { error } = token + const { data, error } = token ? await insertTemporalTokenSendAccountTransfer({ workflow_id: workflowId, status: 'initialized', @@ -126,21 +150,20 @@ export const createTransferActivities = ( }) if (error) { - if (error.code === '23505') { - throw ApplicationFailure.nonRetryable( - 'Duplicate entry for temporal.send_account_transfers', - error.code - ) - } - throw ApplicationFailure.retryable( - 'Error inserting transfer into temporal.send_account_transfers', - error.code, - { + if (isRetryableDBError(error)) { + throw ApplicationFailure.retryable('Database connection error, retrying...', error.code, { error, workflowId, - } - ) + }) + } + + throw ApplicationFailure.nonRetryable('Database error occurred', error.code, { + error, + workflowId, + }) } + + return data }, async sendUserOpActivity(workflowId, userOp) { try { @@ -191,28 +214,24 @@ export const createTransferActivities = ( } }, async updateTemporalTransferActivity({ workflowId, status, data, failureError }) { - const { error } = await updateTemporalSendAccountTransfer({ + const { data: updatedData, error } = await updateTemporalSendAccountTransfer({ workflow_id: workflowId, status, data, }) if (error) { - throw ApplicationFailure.retryable( - `Error updating entry in temporal_send_account_transfers with ${status} status`, - error.code, - { + if (isRetryableDBError(error)) { + throw ApplicationFailure.retryable('Database connection error, retrying...', error.code, { error, workflowId, - } - ) - } - if (status === 'failed') { - throw ApplicationFailure.nonRetryable( - failureError?.message ?? null, - failureError?.type ?? null, - ...(failureError?.details ?? []) - ) + }) + } + throw ApplicationFailure.nonRetryable('Database error occurred', error.code, { + error, + workflowId, + }) } + return updatedData }, } } diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 09902fc3a..7f3a23dd2 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -1,4 +1,5 @@ import type { PgBytea, Database } from '@my/supabase/database.types' +import type { PostgrestError } from '@supabase/supabase-js' import { supabaseAdmin } from 'app/utils/supabase/admin' export async function insertTemporalTokenSendAccountTransfer({ @@ -18,15 +19,22 @@ export async function insertTemporalTokenSendAccountTransfer({ v: bigint log_addr: PgBytea }) { - return await supabaseAdmin.schema('temporal').rpc('insert_temporal_token_send_account_transfer', { - workflow_id, - status, - block_num: block_num.toString(), - f, - t, - v: v.toString(), - log_addr, - }) + return await supabaseAdmin + .schema('temporal') + .from('send_account_transfers') + .insert({ + workflow_id, + status, + created_at_block_num: Number(block_num), + data: { + f, + t, + v: v.toString(), + log_addr, + }, + }) + .select('*') + .single() } export async function insertTemporalEthSendAccountTransfer({ @@ -44,14 +52,21 @@ export async function insertTemporalEthSendAccountTransfer({ log_addr: PgBytea value: bigint }) { - return await supabaseAdmin.schema('temporal').rpc('insert_temporal_eth_send_account_transfer', { - workflow_id, - status, - block_num: block_num.toString(), - sender, - log_addr, - value: value.toString(), - }) + return await supabaseAdmin + .schema('temporal') + .from('send_account_transfers') + .insert({ + workflow_id, + status, + created_at_block_num: Number(block_num), + data: { + sender, + value: value.toString(), + log_addr, + }, + }) + .select('*') + .single() } export async function updateTemporalSendAccountTransfer({ @@ -63,9 +78,30 @@ export async function updateTemporalSendAccountTransfer({ status: Database['temporal']['Enums']['transfer_status'] data?: Database['temporal']['Tables']['send_account_transfers']['Row']['data'] }) { - return await supabaseAdmin.schema('temporal').rpc('update_temporal_send_account_transfer', { - workflow_id, - status, - data, - }) + return await supabaseAdmin + .schema('temporal') + .from('send_account_transfers') + .update({ + status, + data, + }) + .eq('workflow_id', workflow_id) + .select('*') + .single() +} + +export function isRetryableDBError(error: PostgrestError) { + // Network related errors should be retried + const retryableCodes = [ + '08000', // Connection error + '08006', // Connection failure + '08001', // SQL client unable to establish connection + '08004', // Rejected by server + '57P01', // Admin shutdown + '57P02', // Crash shutdown + '40001', // Serialization failure + '40P01', // Deadlock detected + ] + + return retryableCodes.includes(error.code) } diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index 2633b9d7a..7036730aa 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -37,16 +37,26 @@ export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { log('Decoded transfer userOp', { workflowId, token, from, to, amount: amount.toString() }) log('Inserting temporal transfer into temporal.send_account_transfers', workflowId) - await insertTemporalSendAccountTransferActivity(workflowId, from, to, amount, token, blockNumber) + const initialTransfer = await insertTemporalSendAccountTransferActivity( + workflowId, + from, + to, + amount, + token, + blockNumber + ) log('Inserted temporal transfer into temporal.send_account_transfers', workflowId) log('Sending UserOperation', superjson.stringify(userOp)) const hash = await sendUserOpActivity(workflowId, userOp) log('UserOperation sent, hash:', hash) - await updateTemporalTransferActivity({ + const sentTransfer = await updateTemporalTransferActivity({ workflowId, status: 'sent', - data: { user_op_hash: hash }, + data: { + ...(initialTransfer.data as Record), + user_op_hash: hash, + }, }) const receipt = await waitForTransactionReceiptActivity(workflowId, hash) @@ -56,6 +66,7 @@ export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { workflowId, status: 'confirmed', data: { + ...(sentTransfer.data as Record), tx_hash: hexToBytea(receipt.transactionHash), block_num: receipt.blockNumber.toString(), }, diff --git a/supabase/database-generated.types.ts b/supabase/database-generated.types.ts index dd409cf8e..94193176f 100644 --- a/supabase/database-generated.types.ts +++ b/supabase/database-generated.types.ts @@ -1482,7 +1482,7 @@ export type Database = { id?: number status: Database["temporal"]["Enums"]["transfer_status"] updated_at?: string - user_id: string + user_id?: string workflow_id: string } Update: { @@ -1502,37 +1502,7 @@ export type Database = { [_ in never]: never } Functions: { - insert_temporal_eth_send_account_transfer: { - Args: { - workflow_id: string - status: Database["temporal"]["Enums"]["transfer_status"] - block_num: string - sender: string - log_addr: string - value: string - } - Returns: undefined - } - insert_temporal_token_send_account_transfer: { - Args: { - workflow_id: string - status: Database["temporal"]["Enums"]["transfer_status"] - block_num: string - f: string - t: string - v: string - log_addr: string - } - Returns: undefined - } - update_temporal_send_account_transfer: { - Args: { - workflow_id: string - status: Database["temporal"]["Enums"]["transfer_status"] - data?: Json - } - Returns: undefined - } + [_ in never]: never } Enums: { transfer_status: diff --git a/supabase/migrations/20250307021828_create_temporal_transfers_table.sql b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql index 453a3dc0d..b7d8c7953 100644 --- a/supabase/migrations/20250307021828_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql @@ -23,15 +23,17 @@ CREATE TYPE temporal.transfer_status AS ENUM( CREATE TABLE temporal.send_account_transfers( id serial primary key, workflow_id text not null, - user_id uuid not null, + user_id uuid not null DEFAULT uuid_nil(), -- rely on trigger to set user_id status temporal.transfer_status not null, + created_at_block_num bigint not null, data jsonb not null, - created_at_block_num numeric not null, created_at timestamp with time zone not null default (now() AT TIME ZONE 'utc'::text), updated_at timestamp with time zone not null default (now() AT TIME ZONE 'utc'::text) ); GRANT ALL ON TABLE temporal.send_account_transfers TO service_role; +GRANT USAGE, SELECT ON SEQUENCE temporal.send_account_transfers_id_seq TO service_role; + alter table "temporal"."send_account_transfers" enable row level security; @@ -55,131 +57,45 @@ CREATE INDEX temporal_send_account_transfers_user_id_idx ON temporal.send_accoun CREATE INDEX temporal_send_account_transfers_created_at_idx ON temporal.send_account_transfers(created_at); CREATE UNIQUE INDEX temporal_send_account_transfers_workflow_id_idx ON temporal.send_account_transfers(workflow_id); -CREATE OR REPLACE FUNCTION temporal.insert_temporal_token_send_account_transfer( - workflow_id text, - status temporal.transfer_status, - block_num text, - f bytea, - t bytea, - v text, - log_addr bytea -) -RETURNS void -LANGUAGE plpgsql -SECURITY DEFINER -AS $$ +-- Temporal transfer insert user_id trigger +CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_insert_user_id() + RETURNS TRIGGER + LANGUAGE plpgsql + SECURITY DEFINER + AS $$ DECLARE _user_id uuid; - _data jsonb; BEGIN - SELECT user_id INTO _user_id - FROM send_accounts - WHERE address = concat('0x', encode(f, 'hex'))::citext; - - -- cast v to text to avoid losing precision when converting to json when sending to clients - _data := json_build_object( - 'f', f, - 't', t, - 'v', v::text, - 'log_addr', log_addr - ); - - INSERT INTO temporal.send_account_transfers( - workflow_id, - user_id, - status, - created_at_block_num, - data - ) - VALUES ( - workflow_id, - _user_id, - status, - block_num::numeric, - _data - ); -END; -$$; + -- Handle token transfers (has 'f' field) + IF NEW.data ? 'f' THEN + SELECT user_id INTO _user_id + FROM send_accounts + WHERE address = concat('0x', encode((NEW.data->>'f')::bytea, 'hex'))::citext; + + -- Handle ETH transfers (has 'sender' field) + ELSIF NEW.data ? 'sender' THEN + SELECT user_id INTO _user_id + FROM send_accounts + WHERE address = concat('0x', encode((NEW.data->>'sender')::bytea, 'hex'))::citext; + END IF; -CREATE OR REPLACE FUNCTION temporal.insert_temporal_eth_send_account_transfer( - workflow_id text, - status temporal.transfer_status, - block_num text, - sender bytea, - log_addr bytea, - value text -) -RETURNS void -LANGUAGE plpgsql -SECURITY DEFINER -AS $$ -DECLARE - _user_id uuid; - _data jsonb; -BEGIN - SELECT user_id INTO _user_id - FROM send_accounts - WHERE address = concat('0x', encode(sender, 'hex'))::citext; + -- If no user_id found, prevent insert and raise error + IF _user_id IS NULL THEN + RAISE EXCEPTION 'No user found for the given address'; + END IF; - -- cast v to text to avoid losing precision when converting to json when sending to clients - _data := json_build_object( - 'log_addr', log_addr, - 'sender', sender, - 'value', value::text - ); + -- Set the user_id before insert + NEW.user_id := _user_id; - INSERT INTO temporal.send_account_transfers( - workflow_id, - user_id, - status, - created_at_block_num, - data - ) - VALUES ( - workflow_id, - _user_id, - status, - block_num::numeric, - _data - ); + RETURN NEW; END; $$; -CREATE OR REPLACE FUNCTION temporal.update_temporal_send_account_transfer( - workflow_id text, - status temporal.transfer_status, - data jsonb DEFAULT NULL -) -RETURNS void -LANGUAGE plpgsql -SECURITY DEFINER -AS $$ -DECLARE - _data jsonb; -BEGIN - -- Only construct _data if input data is not null - IF data IS NOT NULL THEN - _data := json_build_object( - 'user_op_hash', (data->'user_op_hash'), - 'tx_hash', (data->>'tx_hash'), - 'block_num', data->>'block_num'::text - ); - ELSE - _data := '{}'::jsonb; - END IF; - - UPDATE temporal.send_account_transfers - SET - status = update_temporal_send_account_transfer.status, - data = CASE - WHEN _data = '{}'::jsonb THEN temporal.send_account_transfers.data - ELSE temporal.send_account_transfers.data || _data - END, - updated_at = (NOW() AT TIME ZONE 'UTC') - WHERE - temporal.send_account_transfers.workflow_id = update_temporal_send_account_transfer.workflow_id; -END; -$$; +-- Create trigger +CREATE TRIGGER temporal_send_account_transfers_trigger_insert_user_id + AFTER INSERT ON temporal.send_account_transfers + FOR EACH ROW + EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_insert_user_id(); -- Token transfer triggers CREATE OR REPLACE FUNCTION temporal.temporal_token_send_account_transfers_trigger_insert_activity() @@ -320,7 +236,7 @@ CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_dele LANGUAGE plpgsql SECURITY DEFINER AS $$ -BEGIN +BEGINa DELETE FROM activity WHERE event_name = 'temporal_send_account_transfers' AND event_id = OLD.workflow_id; diff --git a/supabase/tests/temporal_transfers_test.sql b/supabase/tests/temporal_transfers_test.sql index fa34a8018..674e29dba 100644 --- a/supabase/tests/temporal_transfers_test.sql +++ b/supabase/tests/temporal_transfers_test.sql @@ -24,14 +24,21 @@ VALUES ( ); -- Test 1: Test token transfer insertion -SELECT temporal.insert_temporal_token_send_account_transfer( - 'test-workflow-1'::text, - 'initialized'::temporal.transfer_status, - '123', - '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, - '\xB0B7D5E8A4B6D534B3F608E9D27871F85A4E98DA'::bytea, - '100'::text, - '\xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266'::bytea +INSERT INTO temporal.send_account_transfers ( + workflow_id, + status, + created_at_block_num, + data +) VALUES ( + 'test-workflow-1', + 'initialized', + 123, + json_build_object( + 'f', '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, + 't', '\xB0B7D5E8A4B6D534B3F608E9D27871F85A4E98DA'::bytea, + 'v', '100', + 'log_addr', '\xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266'::bytea + ) ); SELECT results_eq( @@ -60,13 +67,20 @@ SELECT results_eq( ); -- Test 2: Test ETH transfer insertion -SELECT temporal.insert_temporal_eth_send_account_transfer( - 'test-workflow-2'::text, - 'initialized'::temporal.transfer_status, - '123', - '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, - '\xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266'::bytea, - '1000000000000000000'::text +INSERT INTO temporal.send_account_transfers ( + workflow_id, + status, + created_at_block_num, + data +) VALUES ( + 'test-workflow-2', + 'initialized', + 123, + json_build_object( + 'sender', '\x1234567890ABCDEF1234567890ABCDEF12345678'::bytea, + 'log_addr', '\xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266'::bytea, + 'value', '1000000000000000000' + ) ); SELECT results_eq( @@ -92,16 +106,16 @@ SELECT results_eq( 'Test ETH transfer insertion' ); --- Test 3: Test update function -SELECT temporal.update_temporal_send_account_transfer( - 'test-workflow-1'::text, - 'sent'::temporal.transfer_status, - json_build_object( +-- Test 3: Test update +UPDATE temporal.send_account_transfers +SET + status = 'sent', + data = data || json_build_object( 'user_op_hash', '\x1234'::bytea, 'tx_hash', '\x5678'::bytea, 'block_num', '123' )::jsonb -); +WHERE workflow_id = 'test-workflow-1'; SELECT results_eq( $$ @@ -194,19 +208,5 @@ SELECT results_eq( 'Test activity update' ); --- @TODO update this to test send_account_transfer insert - --- SELECT temporal.delete_temporal_transfer_activity('test-workflow-1'); - --- SELECT is_empty( --- $$ --- SELECT * --- FROM activity --- WHERE event_name = 'temporal_send_account_transfers' --- AND event_id = 'test-workflow-1' --- $$, --- 'Test temporal transfer activity was deleted' --- ); - SELECT * FROM finish(); ROLLBACK; \ No newline at end of file From 4a1dd1cce217db2d2351d170561204fcad9f5aac Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Mar 2025 07:31:42 -0700 Subject: [PATCH 48/58] Add test for decodeTransferUserOp --- .../app/utils/decodeTransferUserOp.test.ts | 213 ++++++++++++++++++ 1 file changed, 213 insertions(+) create mode 100644 packages/app/utils/decodeTransferUserOp.test.ts diff --git a/packages/app/utils/decodeTransferUserOp.test.ts b/packages/app/utils/decodeTransferUserOp.test.ts new file mode 100644 index 000000000..ab65b37fd --- /dev/null +++ b/packages/app/utils/decodeTransferUserOp.test.ts @@ -0,0 +1,213 @@ +import { describe } from '@jest/globals' +import { sendAccountAbi } from '@my/wagmi' +import { decodeTransferUserOp } from './decodeTransferUserOp' +import { encodeFunctionData, erc20Abi } from 'viem' + +jest.mock('./userop', () => ({ + entrypoint: { + address: '0x0000000071727De22E5E9d8BAf0edAc6f37da032', + }, +})) +jest.mock('wagmi') +jest.mock('@my/wagmi', () => ({ + __esModule: true, + ...jest.requireActual('@my/wagmi'), + tokenPaymasterAddress: { + 1: '0xfbbC7F7da495c9957d491F40482710DC5DFd7d85', + 1337: '0xfbbC7F7da495c9957d491F40482710DC5DFd7d85', + 8453: '0xfbbC7F7da495c9957d491F40482710DC5DFd7d85', + 84532: '0xfbbC7F7da495c9957d491F40482710DC5DFd7d85', + 845337: '0xfbbC7F7da495c9957d491F40482710DC5DFd7d85', + }, +})) + +const defaultUserOp = { + callGasLimit: 100000n, + maxFeePerGas: 10000000n, + maxPriorityFeePerGas: 10000000n, + paymaster: '0xfbbC7F7da495c9957d491F40482710DC5DFd7d85', + paymasterData: '0x', + paymasterPostOpGasLimit: 50000n, + paymasterVerificationGasLimit: 150000n, + preVerificationGas: 70000n, + signature: '0x123', + verificationGasLimit: 550000n, +} as const + +describe('decodeTransferUserOp', () => { + it('should decode ETH transfer user operation', () => { + const sender = `0x${'1'.repeat(40)}` as `0x${string}` + const to = `0x${'2'.repeat(40)}` as `0x${string}` + const amount = 1n + const nonce = 0n + const callData = encodeFunctionData({ + abi: sendAccountAbi, + functionName: 'executeBatch', + args: [ + [ + { + dest: to, + value: amount, + data: '0x', + }, + ], + ], + }) + + const userOp = { + sender, + nonce, + callData, + ...defaultUserOp, + } + + const result = decodeTransferUserOp({ userOp }) + + expect(result).toEqual({ + from: sender, + to, + token: 'eth', + amount, + }) + }) + + it('should decode ERC20 transfer user operation', () => { + const sender = `0x${'1'.repeat(40)}` as `0x${string}` + const to = `0x${'2'.repeat(40)}` as `0x${string}` + const token = `0x${'3'.repeat(40)}` as `0x${string}` + const amount = 1n + const nonce = 0n + const callData = encodeFunctionData({ + abi: sendAccountAbi, + functionName: 'executeBatch', + args: [ + [ + { + dest: token, + value: 0n, + data: encodeFunctionData({ + abi: erc20Abi, + functionName: 'transfer', + args: [to, amount], + }), + }, + ], + ], + }) + const userOp = { + sender, + nonce, + callData, + ...defaultUserOp, + } + + const result = decodeTransferUserOp({ userOp }) + + expect(result).toEqual({ + from: sender, + to, + token, + amount, + }) + }) + it('should throw when callData is invalid hex', () => { + const sender = `0x${'1'.repeat(40)}` as `0x${string}` + const userOp = { + sender, + nonce: 0n, + callData: '0xinvalid' as `0x${string}`, + ...defaultUserOp, + } + + expect(() => decodeTransferUserOp({ userOp })).toThrow() + }) + + it('should throw when function signature is not found', () => { + const sender = `0x${'1'.repeat(40)}` as `0x${string}` + const userOp = { + sender, + nonce: 0n, + callData: '0x12345678' as `0x${string}`, // Invalid function selector + ...defaultUserOp, + } + + expect(() => decodeTransferUserOp({ userOp })).toThrow('Encoded function signature') + }) + + it('should throw when trying to decode invalid ERC20 transfer data', () => { + const sender = `0x${'1'.repeat(40)}` as `0x${string}` + const token = `0x${'3'.repeat(40)}` as `0x${string}` + + const callData = encodeFunctionData({ + abi: sendAccountAbi, + functionName: 'executeBatch', + args: [ + [ + { + dest: token, + value: 0n, + data: '0x12345678' as `0x${string}`, // Invalid ERC20 data + }, + ], + ], + }) + + const userOp = { + sender, + nonce: 0n, + callData, + ...defaultUserOp, + } + + expect(() => decodeTransferUserOp({ userOp })).toThrow('Encoded function signature') + }) + + it('should throw when executeBatch has no calls', () => { + const sender = `0x${'1'.repeat(40)}` as `0x${string}` + const callData = encodeFunctionData({ + abi: sendAccountAbi, + functionName: 'executeBatch', + args: [[]], // Empty calls array + }) + + const userOp = { + sender, + nonce: 0n, + callData, + ...defaultUserOp, + } + + expect(() => decodeTransferUserOp({ userOp })).toThrow() + }) + + it('should throw when ERC20 transfer data is malformed', () => { + const sender = `0x${'1'.repeat(40)}` as `0x${string}` + const token = `0x${'3'.repeat(40)}` as `0x${string}` + + // Using transfer function selector with invalid data + const invalidTransferData = '0xa9059cbb000000' as `0x${string}` + + const callData = encodeFunctionData({ + abi: sendAccountAbi, + functionName: 'executeBatch', + args: [ + [ + { + dest: token, + value: 0n, + data: invalidTransferData, + }, + ], + ], + }) + + const userOp = { + sender, + nonce: 0n, + callData, + ...defaultUserOp, + } + + expect(() => decodeTransferUserOp({ userOp })).toThrow() + }) +}) From 021ffd3fd321591385b8609df498f5a44f19a180 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Mar 2025 07:34:45 -0700 Subject: [PATCH 49/58] remove unused css --- apps/next/styles/globals.css | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/next/styles/globals.css b/apps/next/styles/globals.css index 5d49443c2..d94d3dc87 100644 --- a/apps/next/styles/globals.css +++ b/apps/next/styles/globals.css @@ -5,6 +5,7 @@ :root { height: 100%; background-color: transparent; + overflow: hidden; overscroll-behavior: none; } From 7f5cd7defd3f04b3dc2616783e35b49ad4d1f638 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Mar 2025 07:45:35 -0700 Subject: [PATCH 50/58] Upsert temporal transfer if workflow_id conflict Combine update and insert triggers --- .../src/transfer-workflow/activities.ts | 12 +- .../src/transfer-workflow/supabase.ts | 8 +- .../src/transfer-workflow/workflow.ts | 4 +- ...021828_create_temporal_transfers_table.sql | 249 ++++++++---------- 4 files changed, 121 insertions(+), 152 deletions(-) diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index 8c2783fe4..c4ae20d0b 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -1,7 +1,7 @@ import { log, ApplicationFailure } from '@temporalio/activity' import { - insertTemporalTokenSendAccountTransfer, - insertTemporalEthSendAccountTransfer, + upsertTemporalTokenSendAccountTransfer, + upsertTemporalEthSendAccountTransfer, updateTemporalSendAccountTransfer, isRetryableDBError, } from './supabase' @@ -31,7 +31,7 @@ type TransferActivities = { amount: bigint token: PgBytea | null }> - insertTemporalSendAccountTransferActivity: ( + upsertTemporalSendAccountTransferActivity: ( workflowId: string, from: PgBytea, to: PgBytea, @@ -122,7 +122,7 @@ export const createTransferActivities = ( throw error } }, - async insertTemporalSendAccountTransferActivity( + async upsertTemporalSendAccountTransferActivity( workflowId, from, to, @@ -131,7 +131,7 @@ export const createTransferActivities = ( blockNumber ) { const { data, error } = token - ? await insertTemporalTokenSendAccountTransfer({ + ? await upsertTemporalTokenSendAccountTransfer({ workflow_id: workflowId, status: 'initialized', block_num: blockNumber, @@ -140,7 +140,7 @@ export const createTransferActivities = ( v: amount, log_addr: token, }) - : await insertTemporalEthSendAccountTransfer({ + : await upsertTemporalEthSendAccountTransfer({ workflow_id: workflowId, status: 'initialized', block_num: blockNumber, diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index 7f3a23dd2..c5022b3ef 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -2,7 +2,7 @@ import type { PgBytea, Database } from '@my/supabase/database.types' import type { PostgrestError } from '@supabase/supabase-js' import { supabaseAdmin } from 'app/utils/supabase/admin' -export async function insertTemporalTokenSendAccountTransfer({ +export async function upsertTemporalTokenSendAccountTransfer({ workflow_id, status, block_num, @@ -22,7 +22,7 @@ export async function insertTemporalTokenSendAccountTransfer({ return await supabaseAdmin .schema('temporal') .from('send_account_transfers') - .insert({ + .upsert({ workflow_id, status, created_at_block_num: Number(block_num), @@ -37,7 +37,7 @@ export async function insertTemporalTokenSendAccountTransfer({ .single() } -export async function insertTemporalEthSendAccountTransfer({ +export async function upsertTemporalEthSendAccountTransfer({ workflow_id, status, block_num, @@ -55,7 +55,7 @@ export async function insertTemporalEthSendAccountTransfer({ return await supabaseAdmin .schema('temporal') .from('send_account_transfers') - .insert({ + .upsert({ workflow_id, status, created_at_block_num: Number(block_num), diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index 7036730aa..977c22895 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -11,7 +11,7 @@ const { simulateTransferActivity, getBaseBlockNumberActivity, decodeTransferUserOpActivity, - insertTemporalSendAccountTransferActivity, + upsertTemporalSendAccountTransferActivity, sendUserOpActivity, updateTemporalTransferActivity, waitForTransactionReceiptActivity, @@ -37,7 +37,7 @@ export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { log('Decoded transfer userOp', { workflowId, token, from, to, amount: amount.toString() }) log('Inserting temporal transfer into temporal.send_account_transfers', workflowId) - const initialTransfer = await insertTemporalSendAccountTransferActivity( + const initialTransfer = await upsertTemporalSendAccountTransferActivity( workflowId, from, to, diff --git a/supabase/migrations/20250307021828_create_temporal_transfers_table.sql b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql index b7d8c7953..c95e37ca7 100644 --- a/supabase/migrations/20250307021828_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql @@ -42,112 +42,79 @@ create policy "users can see their own temporal transfers" on "temporal"."send_account_transfers" as permissive for select to authenticated using ( - auth.uid() = user_id -); - -create policy "users can only see temporal transfers they initiated" -on "public"."activity" as permissive -for select to authenticated -using ( - (event_name != 'temporal_send_account_transfers') OR - (from_user_id = auth.uid()) + (select auth.uid()) = user_id ); CREATE INDEX temporal_send_account_transfers_user_id_idx ON temporal.send_account_transfers(user_id); CREATE INDEX temporal_send_account_transfers_created_at_idx ON temporal.send_account_transfers(created_at); CREATE UNIQUE INDEX temporal_send_account_transfers_workflow_id_idx ON temporal.send_account_transfers(workflow_id); --- Temporal transfer insert user_id trigger -CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_insert_user_id() - RETURNS TRIGGER - LANGUAGE plpgsql - SECURITY DEFINER - AS $$ +CREATE OR REPLACE FUNCTION temporal.handle_transfer_upsert() +RETURNS TRIGGER AS $$ DECLARE _user_id uuid; BEGIN - -- Handle token transfers (has 'f' field) + -- Get user_id based on transfer type IF NEW.data ? 'f' THEN + -- Token transfer SELECT user_id INTO _user_id FROM send_accounts WHERE address = concat('0x', encode((NEW.data->>'f')::bytea, 'hex'))::citext; - - -- Handle ETH transfers (has 'sender' field) ELSIF NEW.data ? 'sender' THEN + -- ETH transfer SELECT user_id INTO _user_id FROM send_accounts WHERE address = concat('0x', encode((NEW.data->>'sender')::bytea, 'hex'))::citext; END IF; - -- If no user_id found, prevent insert and raise error + -- Validate user_id IF _user_id IS NULL THEN RAISE EXCEPTION 'No user found for the given address'; END IF; - -- Set the user_id before insert + -- Handle conflicts for INSERT operations + IF TG_OP = 'INSERT' THEN + -- Check if conflicting row exists with non-failed/cancelled status + IF EXISTS ( + SELECT 1 + FROM temporal.send_account_transfers + WHERE workflow_id = NEW.workflow_id + AND status NOT IN ('failed', 'cancelled') + ) THEN + RAISE EXCEPTION 'Workflow ID % already exists with status not failed/cancelled', NEW.workflow_id; + END IF; + + -- For existing failed/cancelled transfers, update instead of insert + IF EXISTS ( + SELECT 1 + FROM temporal.send_account_transfers + WHERE workflow_id = NEW.workflow_id + ) THEN + UPDATE temporal.send_account_transfers + SET status = NEW.status, + created_at_block_num = NEW.created_at_block_num, + data = NEW.data, + user_id = _user_id, + updated_at = now() + WHERE workflow_id = NEW.workflow_id; + + RETURN NULL; + END IF; + END IF; + + -- Set user_id for new inserts or updates NEW.user_id := _user_id; RETURN NEW; END; -$$; +$$ LANGUAGE plpgsql; --- Create trigger -CREATE TRIGGER temporal_send_account_transfers_trigger_insert_user_id - AFTER INSERT ON temporal.send_account_transfers +CREATE TRIGGER handle_transfer_upsert_trigger + BEFORE INSERT OR UPDATE ON temporal.send_account_transfers FOR EACH ROW - EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_insert_user_id(); + EXECUTE FUNCTION temporal.handle_transfer_upsert(); --- Token transfer triggers -CREATE OR REPLACE FUNCTION temporal.temporal_token_send_account_transfers_trigger_insert_activity() - RETURNS TRIGGER - LANGUAGE plpgsql - SECURITY DEFINER - AS $$ -DECLARE - _f_user_id uuid; - _t_user_id uuid; - _data jsonb; -BEGIN - SELECT user_id INTO _f_user_id - FROM send_accounts - WHERE address = concat('0x', encode((NEW.data->>'f')::bytea, 'hex'))::citext; - - SELECT user_id INTO _t_user_id - FROM send_accounts - WHERE address = concat('0x', encode((NEW.data->>'t')::bytea, 'hex'))::citext; - - -- cast v to text to avoid losing precision when converting to json when sending to clients - _data := json_build_object( - 'status', NEW.status::text, - 'user_op_hash', (NEW.data->'user_op_hash'), - 'log_addr', (NEW.data->>'log_addr'), - 'f', (NEW.data->>'f'), - 't', (NEW.data->>'t'), - 'v', NEW.data->>'v'::text, - 'tx_hash', (NEW.data->>'tx_hash'), - 'block_num', NEW.data->>'block_num'::text - ); - - INSERT INTO activity( - event_name, - event_id, - from_user_id, - to_user_id, - data - ) - VALUES ( - 'temporal_send_account_transfers', - NEW.workflow_id, - _f_user_id, - _t_user_id, - _data - ); - RETURN NEW; -END; -$$; - --- ETH transfer triggers -CREATE OR REPLACE FUNCTION temporal.temporal_eth_send_account_transfers_trigger_insert_activity() +CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_activity() RETURNS TRIGGER LANGUAGE plpgsql SECURITY DEFINER @@ -157,79 +124,81 @@ DECLARE _to_user_id uuid; _data jsonb; BEGIN - SELECT user_id INTO _from_user_id - FROM send_accounts - WHERE address = concat('0x', encode((NEW.data->>'sender')::bytea, 'hex'))::citext; - - SELECT user_id INTO _to_user_id - FROM send_accounts - WHERE address = concat('0x', encode((NEW.data->>'log_addr')::bytea, 'hex'))::citext; - - -- cast v to text to avoid losing precision when converting to json when sending to clients - _data := json_build_object( - 'status', NEW.status::text, - 'user_op_hash', (NEW.data->'user_op_hash'), - 'log_addr', (NEW.data->>'log_addr'), - 'sender', (NEW.data->>'sender'), - 'value', NEW.data->>'value'::text, - 'tx_hash', (NEW.data->>'tx_hash'), - 'block_num', NEW.data->>'block_num'::text - ); - - INSERT INTO activity( - event_name, - event_id, - from_user_id, - to_user_id, - data - ) - VALUES ( - 'temporal_send_account_transfers', - NEW.workflow_id, - _from_user_id, - _to_user_id, - _data - ); - RETURN NEW; -END; -$$; - --- Create triggers with conditions -CREATE TRIGGER temporal_token_send_account_transfers_trigger_insert_activity - AFTER INSERT ON temporal.send_account_transfers - FOR EACH ROW - WHEN (NEW.data ? 'f') - EXECUTE FUNCTION temporal.temporal_token_send_account_transfers_trigger_insert_activity(); + -- Set user IDs based on whether it's a token or ETH transfer + IF NEW.data ? 'f' THEN + -- Token transfer + SELECT user_id INTO _from_user_id + FROM send_accounts + WHERE address = concat('0x', encode((NEW.data->>'f')::bytea, 'hex'))::citext; -CREATE TRIGGER temporal_eth_send_account_transfers_trigger_insert_activity - AFTER INSERT ON temporal.send_account_transfers - FOR EACH ROW - WHEN (NEW.data ? 'sender') - EXECUTE FUNCTION temporal.temporal_eth_send_account_transfers_trigger_insert_activity(); + SELECT user_id INTO _to_user_id + FROM send_accounts + WHERE address = concat('0x', encode((NEW.data->>'t')::bytea, 'hex'))::citext; + + _data := json_build_object( + 'status', NEW.status::text, + 'user_op_hash', (NEW.data->'user_op_hash'), + 'log_addr', (NEW.data->>'log_addr'), + 'f', (NEW.data->>'f'), + 't', (NEW.data->>'t'), + 'v', NEW.data->>'v'::text, + 'tx_hash', (NEW.data->>'tx_hash'), + 'block_num', NEW.data->>'block_num'::text + ); + ELSE + -- ETH transfer + SELECT user_id INTO _from_user_id + FROM send_accounts + WHERE address = concat('0x', encode((NEW.data->>'sender')::bytea, 'hex'))::citext; -CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_update_activity() - RETURNS TRIGGER - LANGUAGE plpgsql - SECURITY DEFINER - AS $$ -DECLARE - _data jsonb; -BEGIN - _data := NEW.data || json_build_object('status', NEW.status::text)::jsonb; + SELECT user_id INTO _to_user_id + FROM send_accounts + WHERE address = concat('0x', encode((NEW.data->>'log_addr')::bytea, 'hex'))::citext; + + _data := json_build_object( + 'status', NEW.status::text, + 'user_op_hash', (NEW.data->'user_op_hash'), + 'log_addr', (NEW.data->>'log_addr'), + 'sender', (NEW.data->>'sender'), + 'value', NEW.data->>'value'::text, + 'tx_hash', (NEW.data->>'tx_hash'), + 'block_num', NEW.data->>'block_num'::text + ); + END IF; - UPDATE activity - SET data = _data - WHERE event_name = 'temporal_send_account_transfers' - AND event_id = NEW.workflow_id; + -- For INSERT operations + IF TG_OP = 'INSERT' THEN + INSERT INTO activity( + event_name, + event_id, + from_user_id, + to_user_id, + data + ) + VALUES ( + 'temporal_send_account_transfers', + NEW.workflow_id, + _from_user_id, + _to_user_id, + _data + ); + -- For UPDATE operations + ELSIF TG_OP = 'UPDATE' THEN + UPDATE activity + SET data = _data + WHERE event_name = 'temporal_send_account_transfers' + AND event_id = NEW.workflow_id; + END IF; RETURN NEW; END; $$; -CREATE TRIGGER temporal_send_account_transfers_trigger_update_activity - AFTER UPDATE ON temporal.send_account_transfers +-- Single trigger for both token and ETH transfers, handling both INSERT and UPDATE +CREATE TRIGGER temporal_send_account_transfers_trigger_activity + AFTER INSERT OR UPDATE ON temporal.send_account_transfers FOR EACH ROW - EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_update_activity(); + EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_activity(); CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_delete_activity() RETURNS TRIGGER From fb6e623450bfd42fb4d2db2585ad6dedd1e8196c Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Mar 2025 07:45:54 -0700 Subject: [PATCH 51/58] Remove uneccessary with-env script --- packages/workflows/package.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/workflows/package.json b/packages/workflows/package.json index 5b706dc92..8563e6ff7 100644 --- a/packages/workflows/package.json +++ b/packages/workflows/package.json @@ -26,8 +26,7 @@ "lint": "tsc", "test": "jest", "build": "yarn bundle", - "bundle": "yarn with-env node --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\"));' src/scripts/build-workflow-bundle.ts", - "with-env": "dotenv -e ../../.env -c --" + "bundle": "yarn --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\"));' src/scripts/build-workflow-bundle.ts" }, "devDependencies": { "@jest/globals": "^29.7.0", From 966a92c6a875afc955e28cdc3c7de0d3cee80804 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Mar 2025 09:13:43 -0700 Subject: [PATCH 52/58] make the dist directory during build --- packages/workflows/src/scripts/build-workflow-bundle.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/workflows/src/scripts/build-workflow-bundle.ts b/packages/workflows/src/scripts/build-workflow-bundle.ts index 6bec5e9be..297642382 100644 --- a/packages/workflows/src/scripts/build-workflow-bundle.ts +++ b/packages/workflows/src/scripts/build-workflow-bundle.ts @@ -1,5 +1,5 @@ import { bundleWorkflowCode } from '@temporalio/worker' -import { writeFile } from 'node:fs/promises' +import { mkdir, writeFile } from 'node:fs/promises' import path, { dirname } from 'node:path' import { createRequire } from 'node:module' const require = createRequire(import.meta.url) @@ -14,6 +14,8 @@ async function bundle() { }) const codePath = path.join(__dirname, '../../dist/workflow-bundle.js') + await mkdir(dirname(codePath), { recursive: true }) + await writeFile(codePath, code) console.log(`Bundle written to ${codePath}`) } From 2c16d30ca7a4a5b571526f2adf4b4c0f1416e83c Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Wed, 12 Mar 2025 09:14:23 -0700 Subject: [PATCH 53/58] fix workflows:build script --- packages/workflows/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/workflows/package.json b/packages/workflows/package.json index 8563e6ff7..3a16390be 100644 --- a/packages/workflows/package.json +++ b/packages/workflows/package.json @@ -26,7 +26,7 @@ "lint": "tsc", "test": "jest", "build": "yarn bundle", - "bundle": "yarn --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\"));' src/scripts/build-workflow-bundle.ts" + "bundle": "yarn src/scripts/build-workflow-bundle.ts" }, "devDependencies": { "@jest/globals": "^29.7.0", From c61cfd0585d326d2cf79613d0d63bd8221f50443 Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Thu, 13 Mar 2025 15:18:59 -0700 Subject: [PATCH 54/58] Clean up decodeTransferUserOp --- .../app/utils/decodeTransferUserOp.test.ts | 4 +-- packages/app/utils/decodeTransferUserOp.ts | 30 ++++++++++++------- 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/packages/app/utils/decodeTransferUserOp.test.ts b/packages/app/utils/decodeTransferUserOp.test.ts index ab65b37fd..dce1b2727 100644 --- a/packages/app/utils/decodeTransferUserOp.test.ts +++ b/packages/app/utils/decodeTransferUserOp.test.ts @@ -1,5 +1,5 @@ import { describe } from '@jest/globals' -import { sendAccountAbi } from '@my/wagmi' +import { baseMainnet, sendAccountAbi, sendTokenAddress } from '@my/wagmi' import { decodeTransferUserOp } from './decodeTransferUserOp' import { encodeFunctionData, erc20Abi } from 'viem' @@ -74,7 +74,7 @@ describe('decodeTransferUserOp', () => { it('should decode ERC20 transfer user operation', () => { const sender = `0x${'1'.repeat(40)}` as `0x${string}` const to = `0x${'2'.repeat(40)}` as `0x${string}` - const token = `0x${'3'.repeat(40)}` as `0x${string}` + const token = sendTokenAddress[baseMainnet.id] const amount = 1n const nonce = 0n const callData = encodeFunctionData({ diff --git a/packages/app/utils/decodeTransferUserOp.ts b/packages/app/utils/decodeTransferUserOp.ts index 83035043a..8c9c088d3 100644 --- a/packages/app/utils/decodeTransferUserOp.ts +++ b/packages/app/utils/decodeTransferUserOp.ts @@ -1,23 +1,31 @@ -import { decodeFunctionData } from 'viem' +import { decodeFunctionData, isAddress } from 'viem' import { sendAccountAbi, erc20Abi } from '@my/wagmi' import type { UserOperation } from 'permissionless' -import type { allCoinsDict } from 'app/data/coins' +import { allCoinsDict } from 'app/data/coins' export function decodeTransferUserOp({ userOp }: { userOp: UserOperation<'v0.7'> }) { const { args } = decodeFunctionData({ abi: sendAccountAbi, data: userOp.callData }) + const from = isAddress(userOp.sender) ? userOp.sender : null + const decodedTokenTransfer = args?.[0]?.[0].data !== '0x' ? decodeFunctionData({ abi: erc20Abi, data: args?.[0]?.[0].data }) - : undefined + : null + + const rawAmount = decodedTokenTransfer ? decodedTokenTransfer.args?.[1] : args?.[0]?.[0].value + const amount = typeof rawAmount === 'bigint' ? rawAmount : null + + const rawTo = decodedTokenTransfer ? decodedTokenTransfer.args[0] : args?.[0]?.[0].dest + const to = isAddress(rawTo) ? rawTo : null - const amount = ( - decodedTokenTransfer ? decodedTokenTransfer.args[1] : args?.[0]?.[0].value - ) as bigint + const rawToken = decodedTokenTransfer ? args?.[0]?.[0].dest : 'eth' + const token = rawToken in allCoinsDict || rawToken === 'eth' ? rawToken : null - const to = ( - decodedTokenTransfer ? decodedTokenTransfer.args[0] : args?.[0]?.[0].dest - ) as `0x${string}` - const token = (decodedTokenTransfer ? args?.[0]?.[0].dest : 'eth') as keyof allCoinsDict - return { from: userOp.sender, to, token, amount } + return { + from, + to, + token, + amount, + } } From 486a77c75ae085a424322106cd94b38d05fa3f9c Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Thu, 13 Mar 2025 15:20:56 -0700 Subject: [PATCH 55/58] Always track failed workflows --- .../activity/TemporalTransfersEventSchema.ts | 4 +- packages/snaplet/.snaplet/dataModel.json | 25 +- .../src/transfer-workflow/activities.ts | 234 ++++++++++-------- .../src/transfer-workflow/supabase.ts | 92 ++----- .../src/transfer-workflow/workflow.ts | 56 +++-- supabase/database-generated.types.ts | 21 +- ...021828_create_temporal_transfers_table.sql | 138 ++++------- 7 files changed, 273 insertions(+), 297 deletions(-) diff --git a/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts b/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts index 8e740ad96..88696dfba 100644 --- a/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts +++ b/packages/app/utils/zod/activity/TemporalTransfersEventSchema.ts @@ -10,6 +10,7 @@ import type { Database } from '@my/supabase/database-generated.types' /** Temporal transfers status */ export const temporalTransferStatus = z.enum([ 'initialized', + 'submitted', 'sent', 'confirmed', 'failed', @@ -21,10 +22,10 @@ export const temporalTransferStatus = z.enum([ */ const BaseTemporalTransfersDataSchema = z.object({ status: temporalTransferStatus, + log_addr: byteaToHexEthAddress, user_op_hash: byteaToHexTxHash.nullable(), tx_hash: byteaToHexTxHash.nullable(), block_num: decimalStrToBigInt.nullable(), - log_addr: byteaToHexEthAddress, }) /** @@ -98,6 +99,7 @@ export const temporalEventNameFromStatus = ( ) => { switch (status) { case 'initialized': + case 'submitted': return 'Sending...' case 'sent': return 'Confirming...' diff --git a/packages/snaplet/.snaplet/dataModel.json b/packages/snaplet/.snaplet/dataModel.json index 6755c1d76..4e3023e8a 100644 --- a/packages/snaplet/.snaplet/dataModel.json +++ b/packages/snaplet/.snaplet/dataModel.json @@ -6190,10 +6190,10 @@ "maxLength": null }, { - "id": "temporal.send_account_transfers.user_id", - "name": "user_id", - "columnName": "user_id", - "type": "uuid", + "id": "temporal.send_account_transfers.status", + "name": "status", + "columnName": "status", + "type": "transfer_status", "isRequired": true, "kind": "scalar", "isList": false, @@ -6204,11 +6204,11 @@ "maxLength": null }, { - "id": "temporal.send_account_transfers.status", - "name": "status", - "columnName": "status", - "type": "transfer_status", - "isRequired": true, + "id": "temporal.send_account_transfers.user_id", + "name": "user_id", + "columnName": "user_id", + "type": "uuid", + "isRequired": false, "kind": "scalar", "isList": false, "isGenerated": false, @@ -6222,7 +6222,7 @@ "name": "created_at_block_num", "columnName": "created_at_block_num", "type": "int8", - "isRequired": true, + "isRequired": false, "kind": "scalar", "isList": false, "isGenerated": false, @@ -6236,7 +6236,7 @@ "name": "data", "columnName": "data", "type": "jsonb", - "isRequired": true, + "isRequired": false, "kind": "scalar", "isList": false, "isGenerated": false, @@ -9761,6 +9761,9 @@ }, { "name": "sent" + }, + { + "name": "submitted" } ] } diff --git a/packages/workflows/src/transfer-workflow/activities.ts b/packages/workflows/src/transfer-workflow/activities.ts index c4ae20d0b..ea203a1d7 100644 --- a/packages/workflows/src/transfer-workflow/activities.ts +++ b/packages/workflows/src/transfer-workflow/activities.ts @@ -1,9 +1,11 @@ import { log, ApplicationFailure } from '@temporalio/activity' import { - upsertTemporalTokenSendAccountTransfer, - upsertTemporalEthSendAccountTransfer, + upsertTemporalSendAccountTransfer, updateTemporalSendAccountTransfer, isRetryableDBError, + type TemporalTransfer, + type TemporalTransferInsert, + type TemporalTransferUpdate, } from './supabase' import { simulateUserOperation, @@ -15,30 +17,25 @@ import type { UserOperation } from 'permissionless' import { bootstrap } from '@my/workflows/utils' import { decodeTransferUserOp } from 'app/utils/decodeTransferUserOp' import { hexToBytea } from 'app/utils/hexToBytea' -import type { Json, Database, PgBytea } from '@my/supabase/database.types' +import type { PgBytea } from '@my/supabase/database.types' import superjson from 'superjson' import { byteaToHex } from 'app/utils/byteaToHex' import { allCoins } from 'app/data/coins' -type TemporalTransfer = Database['temporal']['Tables']['send_account_transfers']['Row'] - type TransferActivities = { - simulateTransferActivity: (userOp: UserOperation<'v0.7'>) => Promise + upsertTemporalSendAccountTransferActivity: (TemporalTransferInsert) => Promise + simulateTransferActivity: (workflowId: string, userOp: UserOperation<'v0.7'>) => Promise getBaseBlockNumberActivity: () => Promise - decodeTransferUserOpActivity: (userOp: UserOperation<'v0.7'>) => Promise<{ + decodeTransferUserOpActivity: ( + workflowId: string, + userOp: UserOperation<'v0.7'> + ) => Promise<{ from: PgBytea to: PgBytea amount: bigint token: PgBytea | null }> - upsertTemporalSendAccountTransferActivity: ( - workflowId: string, - from: PgBytea, - to: PgBytea, - amount: bigint, - token: PgBytea | null, - blockNumber: bigint - ) => Promise + updateTemporalSendAccountTransferActivity: (TemporalTransferUpdate) => Promise sendUserOpActivity: (workflowId: string, userOp: UserOperation<'v0.7'>) => Promise waitForTransactionReceiptActivity: ( workflowId: string, @@ -47,16 +44,6 @@ type TransferActivities = { transactionHash: `0x${string}` blockNumber: bigint }> - updateTemporalTransferActivity: (params: { - workflowId: string - status: Database['temporal']['Enums']['transfer_status'] - data: Json - failureError?: { - message?: string | null - type?: string | null - details?: unknown[] - } - }) => Promise } export const createTransferActivities = ( @@ -65,8 +52,60 @@ export const createTransferActivities = ( bootstrap(env) return { - async simulateTransferActivity(userOp) { - await simulateUserOperation(userOp).catch((error) => { + async upsertTemporalSendAccountTransferActivity({ workflowId, data }) { + const { data: upsertData, error } = await upsertTemporalSendAccountTransfer({ + workflow_id: workflowId, + status: 'initialized', + data, + }) + + if (error) { + if (isRetryableDBError(error)) { + throw ApplicationFailure.retryable('Database connection error, retrying...', error.code, { + error, + workflowId, + }) + } + + const { error: upsertFailedError } = await upsertTemporalSendAccountTransfer({ + workflow_id: workflowId, + status: 'failed', + }) + if (upsertFailedError) { + throw ApplicationFailure.retryable( + 'Error upserting failed transfer from temporal.send_account_transfers', + upsertFailedError.code, + { + error: upsertFailedError, + workflowId, + } + ) + } + throw ApplicationFailure.nonRetryable('Database error occurred', error.code, { + error, + workflowId, + }) + } + + return upsertData + }, + async simulateTransferActivity(workflowId, userOp) { + await simulateUserOperation(userOp).catch(async (error) => { + log.error('decodeTransferUserOpActivity failed', { error }) + const { error: updateError } = await updateTemporalSendAccountTransfer({ + workflow_id: workflowId, + status: 'failed', + }) + if (updateError) { + throw ApplicationFailure.retryable( + 'Error updating transfer status to failed from temporal.send_account_transferss', + updateError.code, + { + error: updateError, + workflowId, + } + ) + } throw ApplicationFailure.nonRetryable('Error simulating user operation', error.code, error) }) }, @@ -78,24 +117,24 @@ export const createTransferActivities = ( throw ApplicationFailure.retryable('Failed to get block number') } }, - async decodeTransferUserOpActivity(userOp) { + async decodeTransferUserOpActivity(workflowId, userOp) { try { const { from, to, token, amount } = decodeTransferUserOp({ userOp }) if (!from || !to || !amount || !token) { log.error('Failed to decode transfer user op', { from, to, amount, token }) - throw ApplicationFailure.nonRetryable('User Operation is not a valid transfer') + throw new Error('User Operation is not a valid transfer') } if (!allCoins.find((c) => c.token === token)) { log.error('Token ${token} is not a supported', { token }) - throw ApplicationFailure.nonRetryable('Token ${token} is not a supported') + throw new Error(`Token ${token} is not a supported`) } - if (amount <= 0n) { - log.error('User Operation has amount <= 0', { amount }) - throw ApplicationFailure.nonRetryable('User Operation has amount <= 0') + if (amount < 0n) { + log.error('User Operation has amount < 0', { amount }) + throw new Error('User Operation has amount < 0') } if (!userOp.signature) { log.error('UserOp signature is required') - throw ApplicationFailure.nonRetryable('UserOp signature is required') + throw new Error('UserOp signature is required') } const fromBytea = hexToBytea(from) @@ -103,51 +142,46 @@ export const createTransferActivities = ( const tokenBytea = token === 'eth' ? null : hexToBytea(token) return { from: fromBytea, to: toBytea, amount, token: tokenBytea } } catch (error) { - // Handle viem decode errors - if ( - error.name === 'AbiFunctionSignatureNotFoundError' || - error.name === 'DecodeAbiParametersError' || - error.name === 'FormatAbiItemError' || - error.name === 'ToFunctionSelectorError' || - error.name === 'SliceError' - ) { - log.error('Failed to decode function data', { error }) - throw ApplicationFailure.nonRetryable('Invalid transfer function data', error.name, error) - } - // Handle hex conversion errors - if (error.message === 'Hex string must start with 0x') { - log.error('Invalid hex address format', { error }) - throw ApplicationFailure.nonRetryable('Invalid hex address format') + log.error('decodeTransferUserOpActivity failed', { error }) + const { error: updateError } = await updateTemporalSendAccountTransfer({ + workflow_id: workflowId, + status: 'failed', + }) + if (updateError) { + throw ApplicationFailure.retryable( + 'Error updating transfer status to failed from temporal.send_account_transfers', + updateError.code, + { + error: updateError, + workflowId, + } + ) } - throw error + log.error('Error decoding user operation:', { + code: error.code, + name: error.name, + message: error.message, + }) + throw ApplicationFailure.nonRetryable( + 'Error decoding user operation', + error.code, + error.name, + error.message + ) } }, - async upsertTemporalSendAccountTransferActivity( + async updateTemporalSendAccountTransferActivity({ workflowId, - from, - to, - amount, - token, - blockNumber - ) { - const { data, error } = token - ? await upsertTemporalTokenSendAccountTransfer({ - workflow_id: workflowId, - status: 'initialized', - block_num: blockNumber, - f: from, - t: to, - v: amount, - log_addr: token, - }) - : await upsertTemporalEthSendAccountTransfer({ - workflow_id: workflowId, - status: 'initialized', - block_num: blockNumber, - sender: from, - value: amount, - log_addr: to, - }) + status, + createdAtBlockNum, + data, + }) { + const { data: upsertedData, error } = await updateTemporalSendAccountTransfer({ + workflow_id: workflowId, + status, + created_at_block_num: createdAtBlockNum ? Number(createdAtBlockNum) : null, + data, + }) if (error) { if (isRetryableDBError(error)) { @@ -157,13 +191,28 @@ export const createTransferActivities = ( }) } + const { error: updateError } = await updateTemporalSendAccountTransfer({ + workflow_id: workflowId, + status: 'failed', + }) + if (updateError) { + throw ApplicationFailure.retryable( + 'Error updating transfer status to failed from temporal.send_account_transfers', + updateError.code, + { + error: updateError, + workflowId, + } + ) + } + throw ApplicationFailure.nonRetryable('Database error occurred', error.code, { error, workflowId, }) } - return data + return upsertedData }, async sendUserOpActivity(workflowId, userOp) { try { @@ -178,7 +227,7 @@ export const createTransferActivities = ( }) if (updateError) { throw ApplicationFailure.retryable( - 'Error deleting transfer from temporal.send_account_transfers', + 'Error updating transfer status to failed from temporal.send_account_transfers', updateError.code, { error: updateError, @@ -210,28 +259,19 @@ export const createTransferActivities = ( workflow_id: workflowId, status: 'failed', }) - throw ApplicationFailure.nonRetryable(updateError?.message) - } - }, - async updateTemporalTransferActivity({ workflowId, status, data, failureError }) { - const { data: updatedData, error } = await updateTemporalSendAccountTransfer({ - workflow_id: workflowId, - status, - data, - }) - if (error) { - if (isRetryableDBError(error)) { - throw ApplicationFailure.retryable('Database connection error, retrying...', error.code, { - error, - workflowId, - }) + if (updateError) { + throw ApplicationFailure.retryable( + 'Error updating transfer status to failed from temporal.send_account_transfers', + updateError.code, + { + error: updateError, + workflowId, + } + ) } - throw ApplicationFailure.nonRetryable('Database error occurred', error.code, { - error, - workflowId, - }) + + throw ApplicationFailure.nonRetryable('Error sending user operation', error.code, error) } - return updatedData }, } } diff --git a/packages/workflows/src/transfer-workflow/supabase.ts b/packages/workflows/src/transfer-workflow/supabase.ts index c5022b3ef..dda9de26a 100644 --- a/packages/workflows/src/transfer-workflow/supabase.ts +++ b/packages/workflows/src/transfer-workflow/supabase.ts @@ -1,70 +1,28 @@ -import type { PgBytea, Database } from '@my/supabase/database.types' +import type { Database } from '@my/supabase/database.types' import type { PostgrestError } from '@supabase/supabase-js' import { supabaseAdmin } from 'app/utils/supabase/admin' -export async function upsertTemporalTokenSendAccountTransfer({ - workflow_id, - status, - block_num, - f, - t, - v, - log_addr, -}: { - workflow_id: string - status: Database['temporal']['Enums']['transfer_status'] - block_num: bigint - f: PgBytea - t: PgBytea - v: bigint - log_addr: PgBytea -}) { - return await supabaseAdmin - .schema('temporal') - .from('send_account_transfers') - .upsert({ - workflow_id, - status, - created_at_block_num: Number(block_num), - data: { - f, - t, - v: v.toString(), - log_addr, - }, - }) - .select('*') - .single() -} +export type TemporalTransfer = Database['temporal']['Tables']['send_account_transfers']['Row'] +export type TemporalTransferInsert = + Database['temporal']['Tables']['send_account_transfers']['Insert'] +export type TemporalTransferUpdate = + Database['temporal']['Tables']['send_account_transfers']['Update'] -export async function upsertTemporalEthSendAccountTransfer({ +export async function upsertTemporalSendAccountTransfer({ workflow_id, status, - block_num, - sender, - log_addr, - value, -}: { - workflow_id: string - status: Database['temporal']['Enums']['transfer_status'] - block_num: bigint - sender: PgBytea - log_addr: PgBytea - value: bigint -}) { + data, +}: TemporalTransferInsert) { return await supabaseAdmin .schema('temporal') .from('send_account_transfers') - .upsert({ - workflow_id, - status, - created_at_block_num: Number(block_num), - data: { - sender, - value: value.toString(), - log_addr, - }, - }) + .upsert( + { workflow_id, status, data }, + { + onConflict: 'workflow_id', + ignoreDuplicates: false, // false means do update on conflict + } + ) .select('*') .single() } @@ -72,19 +30,19 @@ export async function upsertTemporalEthSendAccountTransfer({ export async function updateTemporalSendAccountTransfer({ workflow_id, status, + created_at_block_num, data, -}: { - workflow_id: string - status: Database['temporal']['Enums']['transfer_status'] - data?: Database['temporal']['Tables']['send_account_transfers']['Row']['data'] -}) { +}: TemporalTransferUpdate) { + if (!workflow_id) throw new Error('Workflow ID is required to update temporal transfer') + const payload = { + status, + } as TemporalTransferUpdate + if (created_at_block_num) payload.created_at_block_num = created_at_block_num + if (data) payload.data = data return await supabaseAdmin .schema('temporal') .from('send_account_transfers') - .update({ - status, - data, - }) + .update(payload) .eq('workflow_id', workflow_id) .select('*') .single() diff --git a/packages/workflows/src/transfer-workflow/workflow.ts b/packages/workflows/src/transfer-workflow/workflow.ts index 977c22895..056cb1f04 100644 --- a/packages/workflows/src/transfer-workflow/workflow.ts +++ b/packages/workflows/src/transfer-workflow/workflow.ts @@ -8,53 +8,73 @@ import { hexToBytea } from 'app/utils/hexToBytea' const log = debug('workflows:transfer') const { + upsertTemporalSendAccountTransferActivity, simulateTransferActivity, getBaseBlockNumberActivity, decodeTransferUserOpActivity, - upsertTemporalSendAccountTransferActivity, + updateTemporalSendAccountTransferActivity, sendUserOpActivity, - updateTemporalTransferActivity, waitForTransactionReceiptActivity, } = proxyActivities>({ - // TODO: make this configurable + // TODO: make this configurablea startToCloseTimeout: '10 minutes', }) export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { const workflowId = workflowInfo().workflowId log('Starting SendTransfer Workflow with userOp:', workflowId) + await upsertTemporalSendAccountTransferActivity({ + workflowId, + data: { + sender: hexToBytea(userOp.sender), + }, + }) log('Simulating transfer', workflowId) - await simulateTransferActivity(userOp) + const _ = await simulateTransferActivity(workflowId, userOp) log('Successfully simulated transfer', workflowId) log('Getting latest base block', workflowId) - const blockNumber = await getBaseBlockNumberActivity() - log('Base block:', { workflowId, blockNumber: blockNumber.toString() }) + const createdAtBlockNum = await getBaseBlockNumberActivity() + log('Base block:', { workflowId, createdAtBlockNum: createdAtBlockNum.toString() }) log('Decoding transfer userOp', workflowId) - const { token, from, to, amount } = await decodeTransferUserOpActivity(userOp) + const { token, from, to, amount } = await decodeTransferUserOpActivity(workflowId, userOp) log('Decoded transfer userOp', { workflowId, token, from, to, amount: amount.toString() }) log('Inserting temporal transfer into temporal.send_account_transfers', workflowId) - const initialTransfer = await upsertTemporalSendAccountTransferActivity( - workflowId, - from, - to, - amount, - token, - blockNumber - ) + const submittedTransfer = token + ? await updateTemporalSendAccountTransferActivity({ + workflowId, + status: 'submitted', + createdAtBlockNum, + data: { + f: from, + t: to, + v: amount.toString(), + log_addr: token, + }, + }) + : await updateTemporalSendAccountTransferActivity({ + workflowId, + status: 'submitted', + createdAtBlockNum, + data: { + sender: from, + value: amount.toString(), + log_addr: to, + }, + }) log('Inserted temporal transfer into temporal.send_account_transfers', workflowId) log('Sending UserOperation', superjson.stringify(userOp)) const hash = await sendUserOpActivity(workflowId, userOp) log('UserOperation sent, hash:', hash) - const sentTransfer = await updateTemporalTransferActivity({ + const sentTransfer = await updateTemporalSendAccountTransferActivity({ workflowId, status: 'sent', data: { - ...(initialTransfer.data as Record), + ...(submittedTransfer.data as Record), user_op_hash: hash, }, }) @@ -62,7 +82,7 @@ export async function TransferWorkflow(userOp: UserOperation<'v0.7'>) { const receipt = await waitForTransactionReceiptActivity(workflowId, hash) log('Receipt received:', { tx_hash: receipt.transactionHash }) - await updateTemporalTransferActivity({ + await updateTemporalSendAccountTransferActivity({ workflowId, status: 'confirmed', data: { diff --git a/supabase/database-generated.types.ts b/supabase/database-generated.types.ts index 94193176f..795a2f294 100644 --- a/supabase/database-generated.types.ts +++ b/supabase/database-generated.types.ts @@ -1467,32 +1467,32 @@ export type Database = { send_account_transfers: { Row: { created_at: string - created_at_block_num: number - data: Json + created_at_block_num: number | null + data: Json | null id: number status: Database["temporal"]["Enums"]["transfer_status"] updated_at: string - user_id: string + user_id: string | null workflow_id: string } Insert: { created_at?: string - created_at_block_num: number - data: Json + created_at_block_num?: number | null + data?: Json | null id?: number - status: Database["temporal"]["Enums"]["transfer_status"] + status?: Database["temporal"]["Enums"]["transfer_status"] updated_at?: string - user_id?: string + user_id?: string | null workflow_id: string } Update: { created_at?: string - created_at_block_num?: number - data?: Json + created_at_block_num?: number | null + data?: Json | null id?: number status?: Database["temporal"]["Enums"]["transfer_status"] updated_at?: string - user_id?: string + user_id?: string | null workflow_id?: string } Relationships: [] @@ -1507,6 +1507,7 @@ export type Database = { Enums: { transfer_status: | "initialized" + | "submitted" | "sent" | "confirmed" | "failed" diff --git a/supabase/migrations/20250307021828_create_temporal_transfers_table.sql b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql index c95e37ca7..3d4f833b5 100644 --- a/supabase/migrations/20250307021828_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql @@ -14,6 +14,7 @@ ALTER DEFAULT PRIVILEGES IN SCHEMA temporal CREATE TYPE temporal.transfer_status AS ENUM( 'initialized', + 'submitted', 'sent', 'confirmed', 'failed', @@ -23,10 +24,10 @@ CREATE TYPE temporal.transfer_status AS ENUM( CREATE TABLE temporal.send_account_transfers( id serial primary key, workflow_id text not null, - user_id uuid not null DEFAULT uuid_nil(), -- rely on trigger to set user_id - status temporal.transfer_status not null, - created_at_block_num bigint not null, - data jsonb not null, + status temporal.transfer_status not null default 'initialized', + user_id uuid, -- rely on trigger to set user_id + created_at_block_num bigint, + data jsonb, created_at timestamp with time zone not null default (now() AT TIME ZONE 'utc'::text), updated_at timestamp with time zone not null default (now() AT TIME ZONE 'utc'::text) ); @@ -49,88 +50,52 @@ CREATE INDEX temporal_send_account_transfers_user_id_idx ON temporal.send_accoun CREATE INDEX temporal_send_account_transfers_created_at_idx ON temporal.send_account_transfers(created_at); CREATE UNIQUE INDEX temporal_send_account_transfers_workflow_id_idx ON temporal.send_account_transfers(workflow_id); -CREATE OR REPLACE FUNCTION temporal.handle_transfer_upsert() +CREATE OR REPLACE FUNCTION temporal.temporal_transfer_before_insert() RETURNS TRIGGER AS $$ DECLARE _user_id uuid; + _address text; BEGIN - -- Get user_id based on transfer type IF NEW.data ? 'f' THEN - -- Token transfer - SELECT user_id INTO _user_id - FROM send_accounts - WHERE address = concat('0x', encode((NEW.data->>'f')::bytea, 'hex'))::citext; + _address := concat('0x', encode((NEW.data->>'f')::bytea, 'hex')); ELSIF NEW.data ? 'sender' THEN - -- ETH transfer - SELECT user_id INTO _user_id - FROM send_accounts - WHERE address = concat('0x', encode((NEW.data->>'sender')::bytea, 'hex'))::citext; + _address := concat('0x', encode((NEW.data->>'sender')::bytea, 'hex')); + ELSE + RAISE NOTICE 'No address given'; + RETURN NEW; END IF; + SELECT user_id INTO _user_id + FROM send_accounts + WHERE address = _address::citext; + -- Validate user_id IF _user_id IS NULL THEN - RAISE EXCEPTION 'No user found for the given address'; + RAISE NOTICE 'No user found for address: %', _address; + RETURN NEW; END IF; - -- Handle conflicts for INSERT operations - IF TG_OP = 'INSERT' THEN - -- Check if conflicting row exists with non-failed/cancelled status - IF EXISTS ( - SELECT 1 - FROM temporal.send_account_transfers - WHERE workflow_id = NEW.workflow_id - AND status NOT IN ('failed', 'cancelled') - ) THEN - RAISE EXCEPTION 'Workflow ID % already exists with status not failed/cancelled', NEW.workflow_id; - END IF; - - -- For existing failed/cancelled transfers, update instead of insert - IF EXISTS ( - SELECT 1 - FROM temporal.send_account_transfers - WHERE workflow_id = NEW.workflow_id - ) THEN - UPDATE temporal.send_account_transfers - SET status = NEW.status, - created_at_block_num = NEW.created_at_block_num, - data = NEW.data, - user_id = _user_id, - updated_at = now() - WHERE workflow_id = NEW.workflow_id; - - RETURN NULL; - END IF; - END IF; - - -- Set user_id for new inserts or updates - NEW.user_id := _user_id; + NEW.user_id = _user_id; RETURN NEW; END; $$ LANGUAGE plpgsql; -CREATE TRIGGER handle_transfer_upsert_trigger - BEFORE INSERT OR UPDATE ON temporal.send_account_transfers +CREATE TRIGGER temporal_send_account_transfers_trigger_before_insert + BEFORE INSERT ON temporal.send_account_transfers FOR EACH ROW - EXECUTE FUNCTION temporal.handle_transfer_upsert(); + EXECUTE FUNCTION temporal.temporal_transfer_before_insert(); -CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_activity() +CREATE OR REPLACE FUNCTION temporal.temporal_transfer_after_update() RETURNS TRIGGER LANGUAGE plpgsql SECURITY DEFINER AS $$ DECLARE - _from_user_id uuid; _to_user_id uuid; _data jsonb; BEGIN - -- Set user IDs based on whether it's a token or ETH transfer - IF NEW.data ? 'f' THEN - -- Token transfer - SELECT user_id INTO _from_user_id - FROM send_accounts - WHERE address = concat('0x', encode((NEW.data->>'f')::bytea, 'hex'))::citext; - + IF NEW.data ? 't' THEN SELECT user_id INTO _to_user_id FROM send_accounts WHERE address = concat('0x', encode((NEW.data->>'t')::bytea, 'hex'))::citext; @@ -146,11 +111,6 @@ BEGIN 'block_num', NEW.data->>'block_num'::text ); ELSE - -- ETH transfer - SELECT user_id INTO _from_user_id - FROM send_accounts - WHERE address = concat('0x', encode((NEW.data->>'sender')::bytea, 'hex'))::citext; - SELECT user_id INTO _to_user_id FROM send_accounts WHERE address = concat('0x', encode((NEW.data->>'log_addr')::bytea, 'hex'))::citext; @@ -166,46 +126,38 @@ BEGIN ); END IF; - -- For INSERT operations - IF TG_OP = 'INSERT' THEN - INSERT INTO activity( - event_name, - event_id, - from_user_id, - to_user_id, - data - ) - VALUES ( - 'temporal_send_account_transfers', - NEW.workflow_id, - _from_user_id, - _to_user_id, - _data - ); - -- For UPDATE operations - ELSIF TG_OP = 'UPDATE' THEN - UPDATE activity - SET data = _data - WHERE event_name = 'temporal_send_account_transfers' - AND event_id = NEW.workflow_id; - END IF; - + INSERT INTO activity( + event_name, + event_id, + from_user_id, + to_user_id, + data + ) + VALUES ( + 'temporal_send_account_transfers', + NEW.workflow_id, + NEW.user_id, + _to_user_id, + _data + ) + ON CONFLICT (event_name, event_id) + DO UPDATE SET + data = EXCLUDED.data; RETURN NEW; END; $$; --- Single trigger for both token and ETH transfers, handling both INSERT and UPDATE -CREATE TRIGGER temporal_send_account_transfers_trigger_activity - AFTER INSERT OR UPDATE ON temporal.send_account_transfers +CREATE TRIGGER temporal_send_account_transfers_trigger_after_update + AFTER UPDATE ON temporal.send_account_transfers FOR EACH ROW - EXECUTE FUNCTION temporal.temporal_send_account_transfers_trigger_activity(); + EXECUTE FUNCTION temporal.temporal_transfer_after_update(); CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_delete_activity() RETURNS TRIGGER LANGUAGE plpgsql SECURITY DEFINER AS $$ -BEGINa +BEGIN DELETE FROM activity WHERE event_name = 'temporal_send_account_transfers' AND event_id = OLD.workflow_id; From 88e55be7862fb0b93e352e6c3b6acdf84cb0e07f Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Thu, 13 Mar 2025 15:21:13 -0700 Subject: [PATCH 56/58] Query temporal status before navigating --- packages/api/src/routers/temporal.ts | 24 +- packages/app/features/send/confirm/screen.tsx | 208 +++++++----------- packages/app/features/send/screen.tsx | 88 +++++++- packages/app/utils/useTemporalStatus.ts | 42 ++++ 4 files changed, 234 insertions(+), 128 deletions(-) create mode 100644 packages/app/utils/useTemporalStatus.ts diff --git a/packages/api/src/routers/temporal.ts b/packages/api/src/routers/temporal.ts index 4b5f5e280..ed8c252fd 100644 --- a/packages/api/src/routers/temporal.ts +++ b/packages/api/src/routers/temporal.ts @@ -7,6 +7,7 @@ import type { UserOperation } from 'permissionless' import { TransferWorkflow } from '@my/workflows/all-workflows' import { baseMainnetClient, entryPointAddress } from '@my/wagmi' import { getUserOperationHash } from 'permissionless/utils' +import { supabaseAdmin } from 'app/utils/supabase/admin' const log = debug('api:temporal') @@ -38,8 +39,29 @@ export const temporalRouter = createTRPCRouter({ workflowId: `temporal/transfer/${user.id}/${userOpHash}`, args: [userOp], }) + await baseMainnetClient.call({ + account: entryPointAddress[baseMainnetClient.chain.id], + to: userOp.sender, + data: userOp.callData, + }) log(`Workflow Created: ${workflowId}`) - return workflowId + const { data: transfer, error: transferError } = await supabaseAdmin + .schema('temporal') + .from('send_account_transfers') + .select('status') + .eq('workflow_id', workflowId) + .single() + + if (transferError) { + log('Error fetching transfer status', transferError) + return { workflowId, status: null } + } + + if (!transfer) { + return { workflowId, status: null } + } + + return { workflowId, status: transfer.status } } catch (error) { throw new TRPCError({ code: 'INTERNAL_SERVER_ERROR', diff --git a/packages/app/features/send/confirm/screen.tsx b/packages/app/features/send/confirm/screen.tsx index 5563c5b29..537d4e137 100644 --- a/packages/app/features/send/confirm/screen.tsx +++ b/packages/app/features/send/confirm/screen.tsx @@ -1,8 +1,6 @@ import { Avatar, Button, - ButtonText, - Label, LinkableAvatar, Paragraph, type ParagraphProps, @@ -12,10 +10,9 @@ import { XStack, YStack, type TamaguiElement, - type YStackProps, } from '@my/ui' import { baseMainnet, baseMainnetClient, entryPointAddress } from '@my/wagmi' -import { useQueryClient } from '@tanstack/react-query' +import { useQueryClient, useMutation } from '@tanstack/react-query' import { IconAccount } from 'app/components/icons' import { IconCoin } from 'app/components/icons/IconCoin' import { useSendScreenParams } from 'app/routers/params' @@ -34,7 +31,7 @@ import { formatUnits, isAddress } from 'viem' import { useEstimateFeesPerGas } from 'wagmi' import { useCoin } from 'app/provider/coins' import { useCoinFromSendTokenParam } from 'app/utils/useCoinFromTokenParam' -import { allCoinsDict } from 'app/data/coins' +import { allCoins, allCoinsDict } from 'app/data/coins' import debug from 'debug' import { useTokenPrices } from 'app/utils/useTokenPrices' @@ -42,6 +39,9 @@ import { useTokenPrices } from 'app/utils/useTokenPrices' const log = debug('app:features:send:confirm:screen') import { api } from 'app/utils/api' import { signUserOp } from 'app/utils/signUserOp' +import { decodeTransferUserOp } from 'app/utils/decodeTransferUserOp' +import type { UserOperation } from 'permissionless' +import { TEMPORAL_STATUS_INTERVAL, useTemporalStatus } from 'app/utils/useTemporalStatus' export function SendConfirmScreen() { const [queryParams] = useSendScreenParams() @@ -75,8 +75,11 @@ export function SendConfirm() { const [queryParams] = useSendScreenParams() const { sendToken, recipient, idType, amount } = queryParams const { data: sendAccount, isLoading: isSendAccountLoading } = useSendAccount() - const { coin: selectedCoin, tokensQuery, ethQuery } = useCoinFromSendTokenParam() + const { coin: selectedCoin } = useCoinFromSendTokenParam() + const [workflowId, setWorkflowId] = useState(null) + // states for auth flow + const [error, setError] = useState(null) const { mutateAsync: transfer, isPending: isTransferPending, @@ -106,7 +109,9 @@ export function SendConfirm() { } = useAccountNonce({ sender: sendAccount?.address, }) - const { data: userOp } = useGenerateTransferUserOp({ + + // Only generate UserOp when signing starts + const { data: userOp, isLoading: isGeneratingUserOp } = useGenerateTransferUserOp({ sender: sendAccount?.address, // @ts-expect-error some work to` do here to: profile?.address ?? recipient, @@ -115,6 +120,15 @@ export function SendConfirm() { nonce, }) + const { mutateAsync: validateUserOp, isPending: isValidatePending } = useValidateTransferUserOp() + + const { data: transferStatus, isLoading: isTransferStatusLoading } = useTemporalStatus({ + workflowId: workflowId, + table: 'send_account_transfers', + enabled: workflowId !== null, + refetchInterval: TEMPORAL_STATUS_INTERVAL, + }) + const { data: usdcFees, isLoading: isFeesLoading, @@ -131,14 +145,24 @@ export function SendConfirm() { chainId: baseMainnet.id, }) - const [error, setError] = useState() - const hasEnoughBalance = selectedCoin?.balance && selectedCoin.balance >= BigInt(amount ?? '0') const gas = usdcFees ? usdcFees.baseFee + usdcFees.gasFees : BigInt(Number.MAX_SAFE_INTEGER) const hasEnoughGas = (usdc?.balance ?? BigInt(0)) > (isUSDCSelected ? BigInt(amount ?? '0') + gas : gas) - const canSubmit = BigInt(queryParams.amount ?? '0') > 0 && hasEnoughGas && hasEnoughBalance + const isLoading = + nonceIsLoading || + isProfileLoading || + isSendAccountLoading || + isGeneratingUserOp || + isValidatePending || + isGasLoading || + isFeesLoading || + isTransferPending || + isTransferInitialized || + isTransferStatusLoading + + const canSubmit = !isLoading && hasEnoughBalance && hasEnoughGas && feesPerGas const localizedAmount = localizeAmount( formatUnits( @@ -201,18 +225,28 @@ export function SendConfirm() { }) userOp.signature = signature - const workflowId = await transfer({ userOp }) + const validatedUserOp = await validateUserOp(userOp) + assert(!!validatedUserOp, 'Operation expected to fail') - if (workflowId) { + const { workflowId, status } = await transfer({ userOp: validatedUserOp }) + + if (workflowId && status !== 'initialized') { router.replace({ pathname: '/', query: { token: sendToken } }) } + setWorkflowId(workflowId) } catch (e) { console.error(e) setError(e) + setWorkflowId(null) await queryClient.invalidateQueries({ queryKey: [useAccountNonce.queryKey] }) } } + useEffect(() => { + if (!transferStatus || transferStatus === 'initialized') return + router.replace({ pathname: '/', query: { token: sendToken } }) + }, [transferStatus, router, sendToken]) + useEffect(() => { if (submitButtonRef.current) { submitButtonRef.current.focus() @@ -223,8 +257,10 @@ export function SendConfirm() { nonceIsLoading || isProfileLoading || isSendAccountLoading || + isValidatePending || isTransferPending || - isTransferInitialized + isTransferInitialized || + isTransferStatusLoading ) return @@ -365,9 +401,9 @@ export function SendConfirm() { ref={submitButtonRef} theme={canSubmit ? 'green' : 'red_alt1'} onPress={onSubmit} - br={'$4'} disabledStyle={{ opacity: 0.7, cursor: 'not-allowed', pointerEvents: 'none' }} disabled={!canSubmit} + br={'$4'} gap={4} py={'$5'} width={'100%'} @@ -389,122 +425,42 @@ export function SendConfirm() { } })()} - {/* TODO add this back when backend is ready - - - setParams({ note: text }, { webBehavior: 'replace' })} - fontSize={20} - fontWeight="400" - lineHeight={1} - color="$color12" - borderColor="transparent" - outlineColor="transparent" - $theme-light={{ bc: '$gray3Light' }} - br={'$3'} - bc="$metalTouch" - hoverStyle={{ - borderColor: 'transparent', - outlineColor: 'transparent', - }} - focusStyle={{ - borderColor: 'transparent', - outlineColor: 'transparent', - }} - fontFamily="$mono" - /> - */} ) } -export function SendRecipient({ ...props }: YStackProps) { - const [queryParams] = useSendScreenParams() - const { recipient, idType } = queryParams - const router = useRouter() - const { data: profile, isLoading, error } = useProfileLookup(idType ?? 'tag', recipient ?? '') - const href = profile ? `/profile/${profile?.sendid}` : '' - - if (isLoading) return - if (error) throw new Error(error.message) - - return ( - - - - - - - - - - - - - - - {profile?.name} - - - {(() => { - switch (true) { - case idType === 'address': - return shorten(recipient, 5, 4) - case !!profile?.tag: - return `/${profile?.tag}` - default: - return `#${profile?.sendid}` - } - })()} - - - - - ) +function useValidateTransferUserOp() { + return useMutation({ + mutationFn: async (userOp?: UserOperation<'v0.7'>) => { + if (!userOp?.signature) return null + + try { + await baseMainnetClient.call({ + account: entryPointAddress[baseMainnetClient.chain.id], + to: userOp.sender, + data: userOp.callData, + }) + + const { from, to, token, amount } = decodeTransferUserOp({ userOp }) + if (!from || !to || !amount || !token) { + log('Failed to decode transfer user op', { from, to, amount, token }) + throw new Error('Not a valid transfer') + } + if (!allCoins.find((c) => c.token === token)) { + log('Token ${token} is not a supported', { token }) + throw new Error(`Token ${token} is not a supported`) + } + if (amount < 0n) { + log('User Operation has amount < 0', { amount }) + throw new Error('User Operation has amount < 0') + } + return userOp + } catch (e) { + const error = e instanceof Error ? e : new Error('Validation failed') + throw error + } + }, + }) } function ErrorMessage({ error, ...props }: ParagraphProps & { error?: string }) { diff --git a/packages/app/features/send/screen.tsx b/packages/app/features/send/screen.tsx index 42aeb8fb4..5a91b00ea 100644 --- a/packages/app/features/send/screen.tsx +++ b/packages/app/features/send/screen.tsx @@ -2,13 +2,18 @@ import type { Functions } from '@my/supabase/database.types' import { Anchor, AnimatePresence, + Avatar, Button, Fade, H4, + Label, + LinkableAvatar, Paragraph, Spinner, Text, + XStack, YStack, + type YStackProps, useToastController, } from '@my/ui' import Search from 'app/components/SearchBar' @@ -17,8 +22,10 @@ import { useSendScreenParams } from 'app/routers/params' import { useProfileLookup } from 'app/utils/useProfileLookup' import { useState } from 'react' import { SendAmountForm } from './SendAmountForm' -import { SendRecipient } from './confirm/screen' import { type Address, isAddress } from 'viem' +import { useRouter } from 'solito/router' +import { IconAccount } from 'app/components/icons' +import { shorten } from 'app/utils/strings' export const SendScreen = () => { const [{ recipient, idType }] = useSendScreenParams() @@ -69,6 +76,85 @@ function SendSearchBody() { ) } +export function SendRecipient({ ...props }: YStackProps) { + const [queryParams] = useSendScreenParams() + const { recipient, idType } = queryParams + const router = useRouter() + const { data: profile, isLoading, error } = useProfileLookup(idType ?? 'tag', recipient ?? '') + const href = profile ? `/profile/${profile?.sendid}` : '' + + if (isLoading) return + if (error) throw new Error(error.message) + + return ( + + + + + + + + + + + + + + + {profile?.name} + + + {(() => { + switch (true) { + case idType === 'address': + return shorten(recipient, 5, 4) + case !!profile?.tag: + return `/${profile?.tag}` + default: + return `#${profile?.sendid}` + } + })()} + + + + + ) +} + function NoSendAccount({ profile }: { profile: Functions<'profile_lookup'>[number] }) { const toast = useToastController() const [clicked, setClicked] = useState(false) diff --git a/packages/app/utils/useTemporalStatus.ts b/packages/app/utils/useTemporalStatus.ts new file mode 100644 index 000000000..1498198d8 --- /dev/null +++ b/packages/app/utils/useTemporalStatus.ts @@ -0,0 +1,42 @@ +import { useQuery, type UseQueryOptions } from '@tanstack/react-query' +import { useSupabase } from './supabase/useSupabase' +import type { Database } from '@my/supabase/database-generated.types' + +type TemporalTables = Database['temporal']['Tables'] +type TableNames = keyof TemporalTables + +export type TemporalStatus = { + [Table in TableNames]: TemporalTables[Table]['Row']['status'] +} + +export const TEMPORAL_STATUS_INTERVAL = 1000 + +export function useTemporalStatus({ + workflowId, + table, + ...options +}: { workflowId: string | null; table: TableNames } & Omit< + UseQueryOptions, + 'queryKey' | 'queryFn' +>) { + const supabase = useSupabase() + + return useQuery({ + ...options, + queryKey: ['workflowStatus', workflowId, table], + queryFn: async () => { + if (!workflowId) return null + + const { data, error } = await supabase + .schema('temporal') + .from(table) + .select('status') + .eq('workflow_id', workflowId) + .single() + + if (error) throw error + + return data.status + }, + }) +} From 6c10ae3873e64acbe4ba6d8ddd55fde1d331e52b Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Fri, 14 Mar 2025 13:11:56 -0700 Subject: [PATCH 57/58] extend trigger activity insert and update --- ...0250307021828_create_temporal_transfers_table.sql | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/supabase/migrations/20250307021828_create_temporal_transfers_table.sql b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql index 3d4f833b5..ceb8a7b32 100644 --- a/supabase/migrations/20250307021828_create_temporal_transfers_table.sql +++ b/supabase/migrations/20250307021828_create_temporal_transfers_table.sql @@ -86,7 +86,7 @@ CREATE TRIGGER temporal_send_account_transfers_trigger_before_insert FOR EACH ROW EXECUTE FUNCTION temporal.temporal_transfer_before_insert(); -CREATE OR REPLACE FUNCTION temporal.temporal_transfer_after_update() +CREATE OR REPLACE FUNCTION temporal.temporal_transfer_after_upsert() RETURNS TRIGGER LANGUAGE plpgsql SECURITY DEFINER @@ -95,6 +95,10 @@ DECLARE _to_user_id uuid; _data jsonb; BEGIN + IF NOT NEW.data ? 'log_addr' THEN + RETURN NEW; + END IF; + IF NEW.data ? 't' THEN SELECT user_id INTO _to_user_id FROM send_accounts @@ -147,10 +151,10 @@ BEGIN END; $$; -CREATE TRIGGER temporal_send_account_transfers_trigger_after_update - AFTER UPDATE ON temporal.send_account_transfers +CREATE TRIGGER temporal_send_account_transfers_trigger_after_upsert + AFTER INSERT OR UPDATE ON temporal.send_account_transfers FOR EACH ROW - EXECUTE FUNCTION temporal.temporal_transfer_after_update(); + EXECUTE FUNCTION temporal.temporal_transfer_after_upsert(); CREATE OR REPLACE FUNCTION temporal.temporal_send_account_transfers_trigger_delete_activity() RETURNS TRIGGER From b528196fbc18aaeadaa56de1ce205d8b0c66f90f Mon Sep 17 00:00:00 2001 From: youngkidwarrior Date: Fri, 14 Mar 2025 13:14:46 -0700 Subject: [PATCH 58/58] Fix TokenDetails test --- .../__snapshots__/TokenDetails.test.tsx.snap | 2490 ++++++----------- .../utils/__mocks__/useTokenActivityFeed.ts | 8 +- 2 files changed, 788 insertions(+), 1710 deletions(-) diff --git a/packages/app/features/home/__snapshots__/TokenDetails.test.tsx.snap b/packages/app/features/home/__snapshots__/TokenDetails.test.tsx.snap index b4cb0f2ff..4e6fc9b4a 100644 --- a/packages/app/features/home/__snapshots__/TokenDetails.test.tsx.snap +++ b/packages/app/features/home/__snapshots__/TokenDetails.test.tsx.snap @@ -238,45 +238,36 @@ exports[`TokenDetails renders correctly 1`] = ` } } > - - Activity - + + Activity + - - - - - - + + + + + + + + + + + + - + + Withdraw + + + 10 USDC + + + + > + + 0x93F...761a + + + 7 mon ago + + - - +
- - Withdraw - - - 10 USDC - - - - - 0x93F...761a - - - 7 mon ago - - - - - - - - - - - - - - + + + + + + + + + + + + - + + Deposit + + + 20 USDC + + + + > + + 0xa71...0000 + + + 7 mon ago + + - - +
- - Deposit - - - 20 USDC - - - - - 0xa71...0000 - - - 7 mon ago - - - - - - - - - - - - - - - - - - - - - - - - - Received - - - 30 USDC - - - - - - /alice - - - - 7 mon ago - - - - - - - - - "backgroundColor": "#111f22", - "borderBottomLeftRadius": 16, - "borderBottomRightRadius": 16, - "borderTopLeftRadius": 16, - "borderTopRightRadius": 16, - "flexDirection": "column", - "paddingBottom": 7, - "paddingLeft": 7, - "paddingRight": 7, - "paddingTop": 7, - "position": "relative", - } - } - testID="TokenDetailsHistory" - > - - - - - - - - - - - - - - - - - - - Withdraw - - - 10 USDC - - - - - 0x93F...761a - - - 7 mon ago - - - - - - - - - - - - - - - - - - - - - - - - - Deposit - - - 20 USDC - - - - - 0xa71...0000 - - - 7 mon ago - - - - - - - - - - - - - - - - + > + + + + + + + + + + + + + + + - + + Received + + + 30 USDC + + + + > + + + + /alice + + + + + 7 mon ago + + - - - - - - Received - - - 30 USDC - - - - - - - /alice - - - - - 7 mon ago - - - - +
+
+
-
diff --git a/packages/app/features/home/utils/__mocks__/useTokenActivityFeed.ts b/packages/app/features/home/utils/__mocks__/useTokenActivityFeed.ts index 8fe6970ee..59f2b667f 100644 --- a/packages/app/features/home/utils/__mocks__/useTokenActivityFeed.ts +++ b/packages/app/features/home/utils/__mocks__/useTokenActivityFeed.ts @@ -1,6 +1,5 @@ import { SendAccountTransfersEventSchema } from 'app/utils/zod/activity' import { mockUsdcTransfers } from './mock-usdc-transfers' -import { hexToBytea } from 'app/utils/hexToBytea' const tokenTransfersByLogAddr = { '\\x833589fcd6edb6e08f4c7c32d4f71b54bda02913': mockUsdcTransfers.map((t) => @@ -8,13 +7,12 @@ const tokenTransfersByLogAddr = { ), } -const mockUseTokenActivityFeed = jest.fn(({ token }) => { - const logAddress = hexToBytea(token) - const pages = tokenTransfersByLogAddr[logAddress] +const mockUseTokenActivityFeed = jest.fn(({ address }) => { + const pages = tokenTransfersByLogAddr[address] if (!pages) throw new Error('No pages found') return { data: { - pages: [tokenTransfersByLogAddr[logAddress]], + pages: [tokenTransfersByLogAddr[address]], }, isLoading: false, error: null,