From 118a8f095d60f3ee39df604d8061d05abd171b99 Mon Sep 17 00:00:00 2001 From: waleed Date: Thu, 14 May 2026 13:49:16 -0700 Subject: [PATCH 01/12] fix(security): supabase rpc path validation, ssh stream byte cap, storage quota coverage --- apps/sim/app/api/files/multipart/route.ts | 27 ++++++++++++------- .../api/tools/ssh/read-file-content/route.ts | 7 +++++ apps/sim/lib/uploads/shared/types.ts | 11 ++++++++ apps/sim/tools/supabase/vector_search.ts | 6 +++-- biome.json | 5 ++-- 5 files changed, 41 insertions(+), 15 deletions(-) diff --git a/apps/sim/app/api/files/multipart/route.ts b/apps/sim/app/api/files/multipart/route.ts index 836fc40ad6b..235c357cd44 100644 --- a/apps/sim/app/api/files/multipart/route.ts +++ b/apps/sim/app/api/files/multipart/route.ts @@ -22,7 +22,7 @@ import { type UploadTokenPayload, verifyUploadToken, } from '@/lib/uploads/core/upload-token' -import type { StorageConfig } from '@/lib/uploads/shared/types' +import { QUOTA_EXEMPT_STORAGE_CONTEXTS, type StorageConfig } from '@/lib/uploads/shared/types' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const logger = createLogger('MultipartUploadAPI') @@ -135,6 +135,22 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const config = getStorageConfig(storageContext) + // Apply storage quota to all user-driven upload contexts (not system/metadata contexts) + if ( + !QUOTA_EXEMPT_STORAGE_CONTEXTS.has(context as StorageContext) && + typeof fileSize === 'number' && + fileSize > 0 + ) { + const { checkStorageQuota } = await import('@/lib/billing/storage') + const quotaCheck = await checkStorageQuota(userId, fileSize) + if (!quotaCheck.allowed) { + return NextResponse.json( + { error: quotaCheck.error || 'Storage limit exceeded' }, + { status: 413 } + ) + } + } + let customKey: string | undefined if (context === 'workspace' || context === 'mothership') { const { MAX_WORKSPACE_FILE_SIZE } = await import('@/lib/uploads/shared/types') @@ -149,15 +165,6 @@ export const POST = withRouteHandler(async (request: NextRequest) => { '@/lib/uploads/contexts/workspace/workspace-file-manager' ) customKey = generateWorkspaceFileKey(workspaceId, fileName) - - const { checkStorageQuota } = await import('@/lib/billing/storage') - const quotaCheck = await checkStorageQuota(userId, fileSize) - if (!quotaCheck.allowed) { - return NextResponse.json( - { error: quotaCheck.error || 'Storage limit exceeded' }, - { status: 413 } - ) - } } else if (context === 'execution') { const workflowId = (data as { workflowId?: unknown }).workflowId const executionId = (data as { executionId?: unknown }).executionId diff --git a/apps/sim/app/api/tools/ssh/read-file-content/route.ts b/apps/sim/app/api/tools/ssh/read-file-content/route.ts index 8a91bf6edd5..778bbc1f634 100644 --- a/apps/sim/app/api/tools/ssh/read-file-content/route.ts +++ b/apps/sim/app/api/tools/ssh/read-file-content/route.ts @@ -73,9 +73,16 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const content = await new Promise((resolve, reject) => { const chunks: Buffer[] = [] + let totalBytes = 0 const readStream = sftp.createReadStream(filePath) readStream.on('data', (chunk: Buffer) => { + totalBytes += chunk.length + if (totalBytes > maxBytes) { + readStream.destroy() + reject(new Error(`File exceeds maximum allowed size of ${params.maxSize}MB`)) + return + } chunks.push(chunk) }) diff --git a/apps/sim/lib/uploads/shared/types.ts b/apps/sim/lib/uploads/shared/types.ts index 30b5cd6b7b6..976a8104b10 100644 --- a/apps/sim/lib/uploads/shared/types.ts +++ b/apps/sim/lib/uploads/shared/types.ts @@ -23,6 +23,17 @@ export type StorageContext = | 'logs' | 'workspace-logos' +/** + * Contexts exempt from storage quota checks — small metadata assets not managed + * by the user (profile pictures, logos, OG images). All other contexts represent + * user-driven uploads and must pass quota validation before upload is initiated. + */ +export const QUOTA_EXEMPT_STORAGE_CONTEXTS = new Set([ + 'profile-pictures', + 'workspace-logos', + 'og-images', +]) + export interface FileInfo { path: string key: string diff --git a/apps/sim/tools/supabase/vector_search.ts b/apps/sim/tools/supabase/vector_search.ts index ba9a3a1b8ae..e8c8cc5d166 100644 --- a/apps/sim/tools/supabase/vector_search.ts +++ b/apps/sim/tools/supabase/vector_search.ts @@ -1,3 +1,4 @@ +import { validateDatabaseIdentifier } from '@/lib/core/security/input-validation' import type { SupabaseVectorSearchParams, SupabaseVectorSearchResponse, @@ -56,8 +57,9 @@ export const vectorSearchTool: ToolConfig< request: { url: (params) => { - // Use RPC endpoint for calling PostgreSQL functions - return `${supabaseBaseUrl(params.projectId)}/rest/v1/rpc/${params.functionName}` + const fnValidation = validateDatabaseIdentifier(params.functionName, 'functionName') + if (!fnValidation.isValid) throw new Error(fnValidation.error) + return `${supabaseBaseUrl(params.projectId)}/rest/v1/rpc/${encodeURIComponent(params.functionName)}` }, method: 'POST', headers: (params) => ({ diff --git a/biome.json b/biome.json index f240ffb0979..19d6eea37d3 100644 --- a/biome.json +++ b/biome.json @@ -6,7 +6,7 @@ "includes": [ "**", "!**/.next", - "!**/.next/**", + "!**/.next", "!**/next-env.d.ts", "!**/out", "!**/dist", @@ -69,8 +69,7 @@ "rules": { "recommended": true, "nursery": { - "useSortedClasses": "warn", - "noNestedComponentDefinitions": "off" + "useSortedClasses": "warn" }, "a11y": { "noSvgWithoutTitle": "off", From 1469ba16805f80e513d751d89a8c0f7287c642f7 Mon Sep 17 00:00:00 2001 From: waleed Date: Thu, 14 May 2026 14:07:29 -0700 Subject: [PATCH 02/12] fix(security): scope execution log writes to owning workflow; add env-var workspace membership guard Closes two cross-tenant vulnerabilities: 1. Workflow log cross-tenant write (route.ts + logging-session.ts): - Route: SELECT before creating LoggingSession to verify executionId belongs to the claimed workflowId; reject with 404 if owned by a different workflow. - LoggingSession: add workflow_id to all UPDATE/SELECT WHERE clauses (raw SQL marker queries, flushAccumulatedCost, loadExistingCost) so writes are a no-op if executionId was somehow injected. 2. Env-var workspace membership guard (environment/utils.ts): - getPersonalAndWorkspaceEnv now calls checkWorkspaceAccess when workspaceId is provided; throws if the userId is not a member, preventing any future caller from reading another workspace's decrypted secrets without explicit membership verification at the call site. --- apps/sim/app/api/workflows/[id]/log/route.ts | 18 +++++++++++++++ apps/sim/lib/environment/utils.ts | 8 +++++++ .../sim/lib/logs/execution/logging-session.ts | 22 ++++++++++++++++--- 3 files changed, 45 insertions(+), 3 deletions(-) diff --git a/apps/sim/app/api/workflows/[id]/log/route.ts b/apps/sim/app/api/workflows/[id]/log/route.ts index 266f53771b7..765797c567f 100644 --- a/apps/sim/app/api/workflows/[id]/log/route.ts +++ b/apps/sim/app/api/workflows/[id]/log/route.ts @@ -1,4 +1,7 @@ +import { db } from '@sim/db' +import { workflowExecutionLogs } from '@sim/db/schema' import { createLogger } from '@sim/logger' +import { eq } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { workflowLogContract } from '@/lib/api/contracts/workflows' import { parseRequest } from '@/lib/api/server' @@ -40,6 +43,21 @@ export const POST = withRouteHandler( return createErrorResponse('executionId is required when logging results', 400) } + // Verify that if executionId already exists in the DB it belongs to this workflow, + // preventing a cross-tenant log write by a caller who owns a different workflow. + const [existingLog] = await db + .select({ workflowId: workflowExecutionLogs.workflowId }) + .from(workflowExecutionLogs) + .where(eq(workflowExecutionLogs.executionId, executionId)) + .limit(1) + + if (existingLog && existingLog.workflowId !== id) { + logger.warn( + `[${requestId}] executionId ${executionId} belongs to workflow ${existingLog.workflowId}, not ${id}` + ) + return createErrorResponse('Execution not found', 404) + } + logger.info(`[${requestId}] Persisting execution result for workflow: ${id}`, { executionId, success: result.success, diff --git a/apps/sim/lib/environment/utils.ts b/apps/sim/lib/environment/utils.ts index 7442506b8f1..6f26baae83c 100644 --- a/apps/sim/lib/environment/utils.ts +++ b/apps/sim/lib/environment/utils.ts @@ -9,6 +9,7 @@ import { getAccessibleEnvCredentials, syncPersonalEnvCredentialsForUser, } from '@/lib/credentials/environment' +import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils' const logger = createLogger('EnvironmentUtils') const EFFECTIVE_ENV_CACHE_TTL_MS = 15_000 @@ -72,6 +73,13 @@ export async function getPersonalAndWorkspaceEnv( conflicts: string[] decryptionFailures: string[] }> { + if (workspaceId) { + const access = await checkWorkspaceAccess(workspaceId, userId) + if (!access.hasAccess) { + throw new Error(`Access denied to workspace ${workspaceId}`) + } + } + const [personalRows, workspaceRows, accessibleEnvCredentials] = await Promise.all([ db.select().from(environment).where(eq(environment.userId, userId)).limit(1), workspaceId diff --git a/apps/sim/lib/logs/execution/logging-session.ts b/apps/sim/lib/logs/execution/logging-session.ts index a2df1c58b7c..6c8452d177d 100644 --- a/apps/sim/lib/logs/execution/logging-session.ts +++ b/apps/sim/lib/logs/execution/logging-session.ts @@ -2,7 +2,7 @@ import { db } from '@sim/db' import { workflowExecutionLogs } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { toError } from '@sim/utils/errors' -import { eq, sql } from 'drizzle-orm' +import { and, eq, sql } from 'drizzle-orm' import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants' import { executionLogger } from '@/lib/logs/execution/logger' import { @@ -29,6 +29,7 @@ type TriggerData = Record & { function buildStartedMarkerPersistenceQuery(params: { executionId: string + workflowId: string marker: ExecutionLastStartedBlock }) { const markerJson = JSON.stringify(params.marker) @@ -41,6 +42,7 @@ function buildStartedMarkerPersistenceQuery(params: { true ) WHERE execution_id = ${params.executionId} + AND workflow_id = ${params.workflowId} AND COALESCE( jsonb_extract_path_text(COALESCE(execution_data, '{}'::jsonb), 'lastStartedBlock', 'startedAt'), '' @@ -49,6 +51,7 @@ function buildStartedMarkerPersistenceQuery(params: { function buildCompletedMarkerPersistenceQuery(params: { executionId: string + workflowId: string marker: ExecutionLastCompletedBlock }) { const markerJson = JSON.stringify(params.marker) @@ -61,6 +64,7 @@ function buildCompletedMarkerPersistenceQuery(params: { true ) WHERE execution_id = ${params.executionId} + AND workflow_id = ${params.workflowId} AND COALESCE( jsonb_extract_path_text(COALESCE(execution_data, '{}'::jsonb), 'lastCompletedBlock', 'endedAt'), '' @@ -190,6 +194,7 @@ export class LoggingSession { await db.execute( buildStartedMarkerPersistenceQuery({ executionId: this.executionId, + workflowId: this.workflowId, marker, }) ) @@ -205,6 +210,7 @@ export class LoggingSession { await db.execute( buildCompletedMarkerPersistenceQuery({ executionId: this.executionId, + workflowId: this.workflowId, marker, }) ) @@ -357,7 +363,12 @@ export class LoggingSession { models: this.accumulatedCost.models, }, }) - .where(eq(workflowExecutionLogs.executionId, this.executionId)) + .where( + and( + eq(workflowExecutionLogs.workflowId, this.workflowId), + eq(workflowExecutionLogs.executionId, this.executionId) + ) + ) this.costFlushed = true } catch (error) { @@ -372,7 +383,12 @@ export class LoggingSession { const [existing] = await db .select({ cost: workflowExecutionLogs.cost }) .from(workflowExecutionLogs) - .where(eq(workflowExecutionLogs.executionId, this.executionId)) + .where( + and( + eq(workflowExecutionLogs.workflowId, this.workflowId), + eq(workflowExecutionLogs.executionId, this.executionId) + ) + ) .limit(1) if (existing?.cost) { From cc09c93b2d2810083d61c76fdb66ab3d864d9be7 Mon Sep 17 00:00:00 2001 From: waleed Date: Thu, 14 May 2026 14:24:51 -0700 Subject: [PATCH 03/12] fix(security): remove fileSize > 0 quota bypass gate; exempt logs context from quota --- apps/sim/app/api/files/multipart/route.ts | 3 +-- apps/sim/lib/uploads/shared/types.ts | 1 + 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/sim/app/api/files/multipart/route.ts b/apps/sim/app/api/files/multipart/route.ts index 235c357cd44..465a1898051 100644 --- a/apps/sim/app/api/files/multipart/route.ts +++ b/apps/sim/app/api/files/multipart/route.ts @@ -138,8 +138,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => { // Apply storage quota to all user-driven upload contexts (not system/metadata contexts) if ( !QUOTA_EXEMPT_STORAGE_CONTEXTS.has(context as StorageContext) && - typeof fileSize === 'number' && - fileSize > 0 + typeof fileSize === 'number' ) { const { checkStorageQuota } = await import('@/lib/billing/storage') const quotaCheck = await checkStorageQuota(userId, fileSize) diff --git a/apps/sim/lib/uploads/shared/types.ts b/apps/sim/lib/uploads/shared/types.ts index 976a8104b10..0cd8705b1cb 100644 --- a/apps/sim/lib/uploads/shared/types.ts +++ b/apps/sim/lib/uploads/shared/types.ts @@ -32,6 +32,7 @@ export const QUOTA_EXEMPT_STORAGE_CONTEXTS = new Set([ 'profile-pictures', 'workspace-logos', 'og-images', + 'logs', ]) export interface FileInfo { From 5effc1f2bc7d7d3842a9a494f8ddb42ec890cae9 Mon Sep 17 00:00:00 2001 From: waleed Date: Thu, 14 May 2026 14:34:40 -0700 Subject: [PATCH 04/12] chore: remove extraneous inline comments --- apps/sim/app/api/files/multipart/route.ts | 1 - apps/sim/app/api/workflows/[id]/log/route.ts | 2 -- 2 files changed, 3 deletions(-) diff --git a/apps/sim/app/api/files/multipart/route.ts b/apps/sim/app/api/files/multipart/route.ts index 465a1898051..71cd8b82287 100644 --- a/apps/sim/app/api/files/multipart/route.ts +++ b/apps/sim/app/api/files/multipart/route.ts @@ -135,7 +135,6 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const config = getStorageConfig(storageContext) - // Apply storage quota to all user-driven upload contexts (not system/metadata contexts) if ( !QUOTA_EXEMPT_STORAGE_CONTEXTS.has(context as StorageContext) && typeof fileSize === 'number' diff --git a/apps/sim/app/api/workflows/[id]/log/route.ts b/apps/sim/app/api/workflows/[id]/log/route.ts index 765797c567f..8d319910a88 100644 --- a/apps/sim/app/api/workflows/[id]/log/route.ts +++ b/apps/sim/app/api/workflows/[id]/log/route.ts @@ -43,8 +43,6 @@ export const POST = withRouteHandler( return createErrorResponse('executionId is required when logging results', 400) } - // Verify that if executionId already exists in the DB it belongs to this workflow, - // preventing a cross-tenant log write by a caller who owns a different workflow. const [existingLog] = await db .select({ workflowId: workflowExecutionLogs.workflowId }) .from(workflowExecutionLogs) From f7e587a9b53b811a9d31593a2f7807da37371f31 Mon Sep 17 00:00:00 2001 From: waleed Date: Thu, 14 May 2026 14:35:53 -0700 Subject: [PATCH 05/12] fix(security): scope markExecutionAsFailed UPDATE by workflowId; thread workflowId through HITL callers --- apps/sim/lib/logs/execution/logging-session.ts | 17 ++++++++++++++--- .../executor/human-in-the-loop-manager.ts | 8 ++++++-- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/apps/sim/lib/logs/execution/logging-session.ts b/apps/sim/lib/logs/execution/logging-session.ts index 6c8452d177d..e517d6bc1ac 100644 --- a/apps/sim/lib/logs/execution/logging-session.ts +++ b/apps/sim/lib/logs/execution/logging-session.ts @@ -1080,13 +1080,19 @@ export class LoggingSession { async markAsFailed(errorMessage?: string): Promise { await this.waitForCompletion() - await LoggingSession.markExecutionAsFailed(this.executionId, errorMessage, this.requestId) + await LoggingSession.markExecutionAsFailed( + this.executionId, + errorMessage, + this.requestId, + this.workflowId + ) } static async markExecutionAsFailed( executionId: string, errorMessage?: string, - requestId?: string + requestId?: string, + workflowId?: string ): Promise { try { const message = errorMessage || 'Run failed' @@ -1109,7 +1115,12 @@ export class LoggingSession { to_jsonb('force_failed'::text) )`, }) - .where(eq(workflowExecutionLogs.executionId, executionId)) + .where( + and( + eq(workflowExecutionLogs.executionId, executionId), + workflowId ? eq(workflowExecutionLogs.workflowId, workflowId) : undefined + ) + ) logger.info(`[${requestId || 'unknown'}] Marked execution ${executionId} as failed`) } catch (error) { diff --git a/apps/sim/lib/workflows/executor/human-in-the-loop-manager.ts b/apps/sim/lib/workflows/executor/human-in-the-loop-manager.ts index 06b67ab1806..96054e5108e 100644 --- a/apps/sim/lib/workflows/executor/human-in-the-loop-manager.ts +++ b/apps/sim/lib/workflows/executor/human-in-the-loop-manager.ts @@ -444,7 +444,9 @@ export class PauseResumeManager { }) await LoggingSession.markExecutionAsFailed( effectiveExecutionId, - 'Missing snapshot seed for paused execution' + 'Missing snapshot seed for paused execution', + undefined, + pausedExecution.workflowId ) } else { try { @@ -462,7 +464,9 @@ export class PauseResumeManager { }) await LoggingSession.markExecutionAsFailed( effectiveExecutionId, - `Failed to persist pause state: ${toError(pauseError).message}` + `Failed to persist pause state: ${toError(pauseError).message}`, + undefined, + pausedExecution.workflowId ) } } From 0402e4320c1843aa29ee682546401912485a349d Mon Sep 17 00:00:00 2001 From: waleed Date: Thu, 14 May 2026 14:38:24 -0700 Subject: [PATCH 06/12] fix(security): add personal credential ownership check in sharepoint site route; scope markExecutionAsFailed by workflowId --- apps/sim/app/api/tools/sharepoint/site/route.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/apps/sim/app/api/tools/sharepoint/site/route.ts b/apps/sim/app/api/tools/sharepoint/site/route.ts index e69c854b026..c422c35e285 100644 --- a/apps/sim/app/api/tools/sharepoint/site/route.ts +++ b/apps/sim/app/api/tools/sharepoint/site/route.ts @@ -70,6 +70,10 @@ export const GET = withRouteHandler(async (request: NextRequest) => { const accountRow = credentials[0] + if (!resolved.workspaceId && accountRow.userId !== session.user.id) { + return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + } + const accessToken = await refreshAccessTokenIfNeeded( resolved.accountId, accountRow.userId, From 737489e4fbf2f1632483317ab9dccfca5c60bbff Mon Sep 17 00:00:00 2001 From: waleed Date: Thu, 14 May 2026 14:39:36 -0700 Subject: [PATCH 07/12] fix: remove logs from user-accessible upload contexts; restore distinct biome .next glob --- apps/sim/app/api/files/multipart/route.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/apps/sim/app/api/files/multipart/route.ts b/apps/sim/app/api/files/multipart/route.ts index 71cd8b82287..c6fd224afb1 100644 --- a/apps/sim/app/api/files/multipart/route.ts +++ b/apps/sim/app/api/files/multipart/route.ts @@ -36,7 +36,6 @@ const ALLOWED_UPLOAD_CONTEXTS = new Set([ 'workspace', 'profile-pictures', 'og-images', - 'logs', 'workspace-logos', ]) From 3e9a5ad156959960324a39c41e13644761db195a Mon Sep 17 00:00:00 2001 From: waleed Date: Thu, 14 May 2026 14:48:23 -0700 Subject: [PATCH 08/12] fix(sharepoint): migrate site route to authorizeCredentialUse The previous fix only checked userId equality for personal credentials and workspace membership (via getUserEntityPermissions) for workspace credentials. authorizeCredentialUse additionally enforces credentialMember access for workspace-scoped credentials, matching the standard pattern used by all other tool selector routes. --- .../app/api/tools/sharepoint/site/route.ts | 49 +++---------------- 1 file changed, 7 insertions(+), 42 deletions(-) diff --git a/apps/sim/app/api/tools/sharepoint/site/route.ts b/apps/sim/app/api/tools/sharepoint/site/route.ts index c422c35e285..ce7bcefcbc9 100644 --- a/apps/sim/app/api/tools/sharepoint/site/route.ts +++ b/apps/sim/app/api/tools/sharepoint/site/route.ts @@ -1,15 +1,12 @@ -import { db } from '@sim/db' -import { account } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { generateId } from '@sim/utils/id' -import { eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { sharepointSiteQuerySchema } from '@/lib/api/contracts/selectors/sharepoint' import { getValidationErrorMessage } from '@/lib/api/server' -import { getSession } from '@/lib/auth' +import { authorizeCredentialUse } from '@/lib/auth/credential-access' import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' -import { refreshAccessTokenIfNeeded, resolveOAuthAccountId } from '@/app/api/auth/oauth/utils' +import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils' export const dynamic = 'force-dynamic' @@ -19,11 +16,6 @@ export const GET = withRouteHandler(async (request: NextRequest) => { const requestId = generateId().slice(0, 8) try { - const session = await getSession() - if (!session?.user?.id) { - return NextResponse.json({ error: 'User not authenticated' }, { status: 401 }) - } - const { searchParams } = new URL(request.url) const validation = sharepointSiteQuerySchema.safeParse({ credentialId: searchParams.get('credentialId') ?? '', @@ -42,41 +34,14 @@ export const GET = withRouteHandler(async (request: NextRequest) => { return NextResponse.json({ error: siteIdValidation.error }, { status: 400 }) } - const resolved = await resolveOAuthAccountId(credentialId) - if (!resolved) { - return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) - } - - if (resolved.workspaceId) { - const { getUserEntityPermissions } = await import('@/lib/workspaces/permissions/utils') - const perm = await getUserEntityPermissions( - session.user.id, - 'workspace', - resolved.workspaceId - ) - if (perm === null) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) - } - } - - const credentials = await db - .select() - .from(account) - .where(eq(account.id, resolved.accountId)) - .limit(1) - if (!credentials.length) { - return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) - } - - const accountRow = credentials[0] - - if (!resolved.workspaceId && accountRow.userId !== session.user.id) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + const authz = await authorizeCredentialUse(request, { credentialId }) + if (!authz.ok || !authz.credentialOwnerUserId || !authz.resolvedCredentialId) { + return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 }) } const accessToken = await refreshAccessTokenIfNeeded( - resolved.accountId, - accountRow.userId, + authz.resolvedCredentialId, + authz.credentialOwnerUserId, requestId ) if (!accessToken) { From 98c52f0caeb8f2efe6cd161bd54c041a0fd01e8f Mon Sep 17 00:00:00 2001 From: waleed Date: Thu, 14 May 2026 14:54:11 -0700 Subject: [PATCH 09/12] fix(logging): make workflowId required in markExecutionAsFailed MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Making workflowId optional left a footgun — future callers could silently omit it and the WHERE clause would degrade to executionId-only, losing the cross-tenant scoping guarantee. All callers already supply workflowId, so making it required (with string | undefined for the middle params to keep call sites unchanged) closes the gap without touching any caller. --- apps/sim/lib/logs/execution/logging-session.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/apps/sim/lib/logs/execution/logging-session.ts b/apps/sim/lib/logs/execution/logging-session.ts index e517d6bc1ac..88ac4cbcad2 100644 --- a/apps/sim/lib/logs/execution/logging-session.ts +++ b/apps/sim/lib/logs/execution/logging-session.ts @@ -1090,9 +1090,9 @@ export class LoggingSession { static async markExecutionAsFailed( executionId: string, - errorMessage?: string, - requestId?: string, - workflowId?: string + errorMessage: string | undefined, + requestId: string | undefined, + workflowId: string ): Promise { try { const message = errorMessage || 'Run failed' @@ -1118,7 +1118,7 @@ export class LoggingSession { .where( and( eq(workflowExecutionLogs.executionId, executionId), - workflowId ? eq(workflowExecutionLogs.workflowId, workflowId) : undefined + eq(workflowExecutionLogs.workflowId, workflowId) ) ) From be7e6431499976c9319c758c079067cdc7b5ac4a Mon Sep 17 00:00:00 2001 From: waleed Date: Thu, 14 May 2026 15:08:25 -0700 Subject: [PATCH 10/12] test(security): add tests for cross-tenant log guard, quota bypass fix, and workflowId scoping - log/route.test.ts: verifies cross-tenant executionId guard returns 404 when the execution belongs to a different workflow, and passes for same workflow or fresh executions - multipart/route.test.ts: verifies fileSize:0 no longer bypasses quota check and that the logs context is rejected at the endpoint level - logging-session.test.ts: verifies markExecutionAsFailed scopes by both executionId and workflowId, and that the instance method forwards workflowId --- .../sim/app/api/files/multipart/route.test.ts | 75 ++++++++++++ .../app/api/workflows/[id]/log/route.test.ts | 108 ++++++++++++++++++ .../logs/execution/logging-session.test.ts | 42 +++++++ 3 files changed, 225 insertions(+) create mode 100644 apps/sim/app/api/workflows/[id]/log/route.test.ts diff --git a/apps/sim/app/api/files/multipart/route.test.ts b/apps/sim/app/api/files/multipart/route.test.ts index b70fed81b82..520a05dd065 100644 --- a/apps/sim/app/api/files/multipart/route.test.ts +++ b/apps/sim/app/api/files/multipart/route.test.ts @@ -53,6 +53,15 @@ vi.mock('@/lib/uploads/providers/blob/client', () => ({ vi.mock('@/lib/workspaces/permissions/utils', () => permissionsMock) +const { mockCheckStorageQuota, mockInitiateS3MultipartUpload } = vi.hoisted(() => ({ + mockCheckStorageQuota: vi.fn(), + mockInitiateS3MultipartUpload: vi.fn(), +})) + +vi.mock('@/lib/billing/storage', () => ({ + checkStorageQuota: mockCheckStorageQuota, +})) + import { POST } from '@/app/api/files/multipart/route' const tokenPayload = { @@ -200,3 +209,69 @@ describe('POST /api/files/multipart action=complete', () => { expect(mockCompleteS3MultipartUpload).toHaveBeenCalledTimes(2) }) }) + +describe('POST /api/files/multipart action=initiate quota enforcement', () => { + const makeInitiateRequest = (body: unknown) => + new NextRequest('http://localhost/api/files/multipart?action=initiate', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body), + }) + + beforeEach(() => { + vi.clearAllMocks() + authMockFns.mockGetSession.mockResolvedValue({ user: { id: 'user-1' } }) + permissionsMockFns.mockGetUserEntityPermissions.mockResolvedValue('write') + mockIsUsingCloudStorage.mockReturnValue(true) + mockGetStorageProvider.mockReturnValue('s3') + mockGetStorageConfig.mockReturnValue({ bucket: 'b', region: 'r' }) + mockSignUploadToken.mockReturnValue('signed-token') + mockCheckStorageQuota.mockResolvedValue({ allowed: true }) + mockInitiateS3MultipartUpload.mockResolvedValue({ uploadId: 'up-1', key: 'k/file.bin' }) + }) + + it('blocks upload when fileSize: 0 exceeds quota', async () => { + mockCheckStorageQuota.mockResolvedValue({ allowed: false, error: 'Storage limit exceeded' }) + + const res = await makeInitiateRequest({ + fileName: 'file.bin', + contentType: 'application/octet-stream', + fileSize: 0, + workspaceId: 'ws-1', + context: 'knowledge-base', + }) + + const response = await POST(res) + expect(response.status).toBe(413) + const body = await response.json() + expect(body.error).toContain('Storage limit exceeded') + }) + + it('does not check quota for quota-exempt contexts (og-images)', async () => { + const res = await makeInitiateRequest({ + fileName: 'img.png', + contentType: 'image/png', + fileSize: 99999, + workspaceId: 'ws-1', + context: 'og-images', + }) + + const response = await POST(res) + expect(mockCheckStorageQuota).not.toHaveBeenCalled() + }) + + it('rejects logs context — not allowed via the multipart endpoint', async () => { + const res = await makeInitiateRequest({ + fileName: 'exec.log', + contentType: 'text/plain', + fileSize: 1000, + workspaceId: 'ws-1', + context: 'logs', + }) + + const response = await POST(res) + expect(response.status).toBe(400) + const body = await response.json() + expect(body.error).toMatch(/invalid storage context/i) + }) +}) diff --git a/apps/sim/app/api/workflows/[id]/log/route.test.ts b/apps/sim/app/api/workflows/[id]/log/route.test.ts new file mode 100644 index 00000000000..f607ba3d0f0 --- /dev/null +++ b/apps/sim/app/api/workflows/[id]/log/route.test.ts @@ -0,0 +1,108 @@ +/** + * @vitest-environment node + */ +import { authMockFns, dbChainMock, dbChainMockFns, resetDbChainMock } from '@sim/testing' +import { NextRequest } from 'next/server' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +// Override global db mock with the configurable chain mock +vi.mock('@sim/db', () => dbChainMock) + +const { mockValidateWorkflowAccess, mockGetWorkspaceBilledAccountUserId } = vi.hoisted(() => ({ + mockValidateWorkflowAccess: vi.fn(), + mockGetWorkspaceBilledAccountUserId: vi.fn(), +})) + +vi.mock('@/app/api/workflows/middleware', () => ({ + validateWorkflowAccess: mockValidateWorkflowAccess, +})) + +vi.mock('@/lib/workspaces/utils', () => ({ + getWorkspaceBilledAccountUserId: mockGetWorkspaceBilledAccountUserId, +})) + +vi.mock('@/lib/logs/execution/logging-session', () => ({ + LoggingSession: vi.fn().mockImplementation(() => ({ + start: vi.fn().mockResolvedValue(undefined), + markAsFailed: vi.fn().mockResolvedValue(undefined), + safeCompleteWithError: vi.fn().mockResolvedValue(undefined), + safeComplete: vi.fn().mockResolvedValue(undefined), + })), +})) + +vi.mock('@/lib/logs/execution/trace-spans/trace-spans', () => ({ + buildTraceSpans: vi.fn().mockReturnValue([]), +})) + +import { POST } from './route' + +const makeRequest = (workflowId: string, body: unknown) => + new NextRequest(`http://localhost/api/workflows/${workflowId}/log`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body), + }) + +const validResult = { success: true, output: { value: 42 } } + +describe('POST /api/workflows/[id]/log cross-tenant guard', () => { + const OWNER_WORKFLOW_ID = 'wf-owner' + const ATTACKER_WORKFLOW_ID = 'wf-attacker' + const VICTIM_EXECUTION_ID = 'exec-victim-uuid' + + beforeEach(() => { + vi.clearAllMocks() + resetDbChainMock() + authMockFns.mockGetSession.mockResolvedValue({ user: { id: 'user-1' } }) + mockValidateWorkflowAccess.mockResolvedValue({ error: null }) + mockGetWorkspaceBilledAccountUserId.mockResolvedValue('user-1') + // Default: no existing log (fresh execution) + dbChainMockFns.limit.mockResolvedValue([]) + }) + + it('returns 404 when executionId belongs to a different workflow', async () => { + dbChainMockFns.limit.mockResolvedValueOnce([{ workflowId: OWNER_WORKFLOW_ID }]) + + const res = await POST( + makeRequest(ATTACKER_WORKFLOW_ID, { + executionId: VICTIM_EXECUTION_ID, + result: validResult, + }), + { params: Promise.resolve({ id: ATTACKER_WORKFLOW_ID }) } + ) + + expect(res.status).toBe(404) + const body = await res.json() + expect(body.error).toBe('Execution not found') + }) + + it('proceeds when executionId belongs to the same workflow', async () => { + dbChainMockFns.limit.mockResolvedValueOnce([{ workflowId: OWNER_WORKFLOW_ID }]) + + const res = await POST( + makeRequest(OWNER_WORKFLOW_ID, { + executionId: VICTIM_EXECUTION_ID, + result: validResult, + }), + { params: Promise.resolve({ id: OWNER_WORKFLOW_ID }) } + ) + + expect(res.status).not.toBe(404) + expect(res.status).not.toBe(403) + }) + + it('proceeds when executionId has no existing log row (fresh execution)', async () => { + dbChainMockFns.limit.mockResolvedValueOnce([]) + + const res = await POST( + makeRequest(OWNER_WORKFLOW_ID, { + executionId: 'brand-new-execution-id', + result: validResult, + }), + { params: Promise.resolve({ id: OWNER_WORKFLOW_ID }) } + ) + + expect(res.status).not.toBe(404) + expect(res.status).not.toBe(403) + }) +}) diff --git a/apps/sim/lib/logs/execution/logging-session.test.ts b/apps/sim/lib/logs/execution/logging-session.test.ts index 33d854e6b87..b4a42952b58 100644 --- a/apps/sim/lib/logs/execution/logging-session.test.ts +++ b/apps/sim/lib/logs/execution/logging-session.test.ts @@ -10,6 +10,7 @@ const dbMocks = vi.hoisted(() => { const update = vi.fn() const execute = vi.fn() const eq = vi.fn() + const and = vi.fn((...args: unknown[]) => ({ type: 'and', args })) const sql = vi.fn((strings: TemplateStringsArray, ...values: unknown[]) => ({ strings, values })) select.mockReturnValue({ from: selectFrom }) @@ -29,6 +30,7 @@ const dbMocks = vi.hoisted(() => { updateWhere, execute, eq, + and, sql, } }) @@ -47,6 +49,7 @@ vi.mock('@sim/db', () => ({ vi.mock('drizzle-orm', () => ({ eq: dbMocks.eq, + and: dbMocks.and, sql: dbMocks.sql, })) @@ -449,3 +452,42 @@ describe('LoggingSession completion retries', () => { expect(session.completing).toBe(true) }) }) + +describe('LoggingSession.markExecutionAsFailed workflowId scoping', () => { + beforeEach(() => { + vi.clearAllMocks() + dbMocks.updateWhere.mockResolvedValue(undefined) + }) + + it('scopes UPDATE by both executionId and workflowId', async () => { + await LoggingSession.markExecutionAsFailed('exec-1', undefined, undefined, 'wf-1') + + expect(dbMocks.update).toHaveBeenCalledTimes(1) + expect(dbMocks.updateSet).toHaveBeenCalledTimes(1) + expect(dbMocks.updateWhere).toHaveBeenCalledTimes(1) + + const whereArgs = dbMocks.updateWhere.mock.calls[0] + expect(whereArgs).toBeDefined() + }) + + it('instance markAsFailed forwards workflowId to the static method', async () => { + const updateWhereSpy = dbMocks.updateWhere + dbMocks.selectLimit.mockResolvedValue([{ executionData: {} }]) + + const session = new LoggingSession('wf-42', 'exec-42', 'api', 'req-1') + await session.markAsFailed('something went wrong') + + expect(updateWhereSpy).toHaveBeenCalledTimes(1) + }) + + it('uses the provided errorMessage in the SQL set', async () => { + const sqlMock = dbMocks.sql + await LoggingSession.markExecutionAsFailed('exec-2', 'custom error', undefined, 'wf-2') + + expect(sqlMock).toHaveBeenCalled() + const lastCall = sqlMock.mock.calls.at(-1)! + const [strings, ...values] = lastCall + const combined = String(Array.from(strings)).toLowerCase() + values.join(' ').toLowerCase() + expect(combined).toContain('force_failed') + }) +}) From 102cf76370a6a25c945a08cc115010ff5e80e8a0 Mon Sep 17 00:00:00 2001 From: waleed Date: Thu, 14 May 2026 16:36:31 -0700 Subject: [PATCH 11/12] fix(lint): move IconComponent outside ToolInput to fix noNestedComponentDefinitions --- .../components/tool-input/tool-input.tsx | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx index 4462bcda2c5..1fa4255a6f1 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx @@ -416,6 +416,18 @@ function createToolIcon( * - Allows drag-and-drop reordering of selected tools * - Supports tool usage control (auto/force/none) for compatible LLM providers */ + +function IconComponent({ + icon: Icon, + className, +}: { + icon?: React.ComponentType<{ className?: string }> + className?: string +}) { + if (!Icon) return null + return +} + export const ToolInput = memo(function ToolInput({ blockId, subBlockId, @@ -1071,17 +1083,6 @@ export const ToolInput = memo(function ToolInput({ setDragOverIndex(null) } - const IconComponent = ({ - icon: Icon, - className, - }: { - icon?: React.ComponentType<{ className?: string }> - className?: string - }) => { - if (!Icon) return null - return - } - const evaluateParameterCondition = (param: ToolParameterConfig, tool: StoredTool): boolean => { if (!('uiComponent' in param) || !param.uiComponent?.condition) return true const currentValues: Record = { operation: tool.operation, ...tool.params } From 1f7c652b6a857996a781739563fd8bf4913bc507 Mon Sep 17 00:00:00 2001 From: waleed Date: Thu, 14 May 2026 16:55:56 -0700 Subject: [PATCH 12/12] fix(logging): scope completeWithCancellation and completeWithPause reads by workflowId Both SELECT queries that check execution status before writing a terminal result were only filtering on executionId. Adds workflowId to the WHERE clause so all seven reads and writes in LoggingSession consistently scope by (workflowId, executionId). --- apps/sim/lib/logs/execution/logging-session.ts | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/apps/sim/lib/logs/execution/logging-session.ts b/apps/sim/lib/logs/execution/logging-session.ts index 88ac4cbcad2..571242afb63 100644 --- a/apps/sim/lib/logs/execution/logging-session.ts +++ b/apps/sim/lib/logs/execution/logging-session.ts @@ -671,7 +671,12 @@ export class LoggingSession { const currentLog = await db .select({ status: workflowExecutionLogs.status }) .from(workflowExecutionLogs) - .where(eq(workflowExecutionLogs.executionId, this.executionId)) + .where( + and( + eq(workflowExecutionLogs.workflowId, this.workflowId), + eq(workflowExecutionLogs.executionId, this.executionId) + ) + ) .limit(1) .then((rows) => rows[0]) @@ -770,7 +775,12 @@ export class LoggingSession { const currentLog = await db .select({ status: workflowExecutionLogs.status }) .from(workflowExecutionLogs) - .where(eq(workflowExecutionLogs.executionId, this.executionId)) + .where( + and( + eq(workflowExecutionLogs.workflowId, this.workflowId), + eq(workflowExecutionLogs.executionId, this.executionId) + ) + ) .limit(1) .then((rows) => rows[0])