From 3788660366664cb070e13bc4873d0f6897a266ac Mon Sep 17 00:00:00 2001 From: Waleed Date: Sat, 28 Feb 2026 13:58:21 -0800 Subject: [PATCH 001/152] fix(monitoring): set MemoryTelemetry logger to INFO level for production visibility (#3386) Production defaults to ERROR-only logging. Without this override, memory snapshots would be silently suppressed. --- apps/sim/lib/monitoring/memory-telemetry.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/sim/lib/monitoring/memory-telemetry.ts b/apps/sim/lib/monitoring/memory-telemetry.ts index a730e0b9761..d9383c64117 100644 --- a/apps/sim/lib/monitoring/memory-telemetry.ts +++ b/apps/sim/lib/monitoring/memory-telemetry.ts @@ -12,7 +12,7 @@ import { getActiveSSEConnectionsByRoute, } from '@/lib/monitoring/sse-connections' -const logger = createLogger('MemoryTelemetry') +const logger = createLogger('MemoryTelemetry', { logLevel: 'INFO' }) const MB = 1024 * 1024 From ee20e119deac25a08860375b4bf17af3add9963c Mon Sep 17 00:00:00 2001 From: Waleed Date: Sat, 28 Feb 2026 18:56:34 -0800 Subject: [PATCH 002/152] feat(integrations): add amplitude, google pagespeed insights, and pagerduty integrations (#3385) * feat(integrations): add amplitude and google pagespeed insights integrations * verified and regen docs * fix icons * fix(integrations): add pagerduty to tool and block registries Re-add registry entries that were reverted after initial commit. Co-Authored-By: Claude Opus 4.6 * more updates * ack comemnts --------- Co-authored-by: Claude Opus 4.6 --- apps/docs/components/icons.tsx | 69 +- apps/docs/components/ui/icon-mapping.ts | 6 + apps/docs/content/docs/en/tools/amplitude.mdx | 313 ++++++++ .../docs/en/tools/google_pagespeed.mdx | 84 ++ apps/docs/content/docs/en/tools/meta.json | 3 + apps/docs/content/docs/en/tools/pagerduty.mdx | 217 +++++ apps/sim/blocks/blocks/amplitude.ts | 745 ++++++++++++++++++ apps/sim/blocks/blocks/google_pagespeed.ts | 86 ++ apps/sim/blocks/blocks/pagerduty.ts | 482 +++++++++++ apps/sim/blocks/registry.ts | 6 + apps/sim/components/icons.tsx | 69 +- .../sim/tools/amplitude/event_segmentation.ts | 134 ++++ apps/sim/tools/amplitude/get_active_users.ts | 105 +++ apps/sim/tools/amplitude/get_revenue.ts | 102 +++ apps/sim/tools/amplitude/group_identify.ts | 99 +++ apps/sim/tools/amplitude/identify_user.ts | 97 +++ apps/sim/tools/amplitude/index.ts | 23 + apps/sim/tools/amplitude/list_events.ts | 79 ++ .../tools/amplitude/realtime_active_users.ts | 74 ++ apps/sim/tools/amplitude/send_event.ts | 214 +++++ apps/sim/tools/amplitude/types.ts | 241 ++++++ apps/sim/tools/amplitude/user_activity.ts | 144 ++++ apps/sim/tools/amplitude/user_profile.ts | 120 +++ apps/sim/tools/amplitude/user_search.ts | 89 +++ apps/sim/tools/google_pagespeed/analyze.ts | 223 ++++++ apps/sim/tools/google_pagespeed/index.ts | 5 + apps/sim/tools/google_pagespeed/types.ts | 37 + apps/sim/tools/pagerduty/add_note.ts | 78 ++ apps/sim/tools/pagerduty/create_incident.ts | 149 ++++ apps/sim/tools/pagerduty/index.ts | 13 + apps/sim/tools/pagerduty/list_incidents.ts | 161 ++++ apps/sim/tools/pagerduty/list_oncalls.ts | 145 ++++ apps/sim/tools/pagerduty/list_services.ts | 108 +++ apps/sim/tools/pagerduty/types.ts | 169 ++++ apps/sim/tools/pagerduty/update_incident.ts | 117 +++ apps/sim/tools/registry.ts | 40 + 36 files changed, 4826 insertions(+), 20 deletions(-) create mode 100644 apps/docs/content/docs/en/tools/amplitude.mdx create mode 100644 apps/docs/content/docs/en/tools/google_pagespeed.mdx create mode 100644 apps/docs/content/docs/en/tools/pagerduty.mdx create mode 100644 apps/sim/blocks/blocks/amplitude.ts create mode 100644 apps/sim/blocks/blocks/google_pagespeed.ts create mode 100644 apps/sim/blocks/blocks/pagerduty.ts create mode 100644 apps/sim/tools/amplitude/event_segmentation.ts create mode 100644 apps/sim/tools/amplitude/get_active_users.ts create mode 100644 apps/sim/tools/amplitude/get_revenue.ts create mode 100644 apps/sim/tools/amplitude/group_identify.ts create mode 100644 apps/sim/tools/amplitude/identify_user.ts create mode 100644 apps/sim/tools/amplitude/index.ts create mode 100644 apps/sim/tools/amplitude/list_events.ts create mode 100644 apps/sim/tools/amplitude/realtime_active_users.ts create mode 100644 apps/sim/tools/amplitude/send_event.ts create mode 100644 apps/sim/tools/amplitude/types.ts create mode 100644 apps/sim/tools/amplitude/user_activity.ts create mode 100644 apps/sim/tools/amplitude/user_profile.ts create mode 100644 apps/sim/tools/amplitude/user_search.ts create mode 100644 apps/sim/tools/google_pagespeed/analyze.ts create mode 100644 apps/sim/tools/google_pagespeed/index.ts create mode 100644 apps/sim/tools/google_pagespeed/types.ts create mode 100644 apps/sim/tools/pagerduty/add_note.ts create mode 100644 apps/sim/tools/pagerduty/create_incident.ts create mode 100644 apps/sim/tools/pagerduty/index.ts create mode 100644 apps/sim/tools/pagerduty/list_incidents.ts create mode 100644 apps/sim/tools/pagerduty/list_oncalls.ts create mode 100644 apps/sim/tools/pagerduty/list_services.ts create mode 100644 apps/sim/tools/pagerduty/types.ts create mode 100644 apps/sim/tools/pagerduty/update_incident.ts diff --git a/apps/docs/components/icons.tsx b/apps/docs/components/icons.tsx index 51cd709bb71..c4666fba176 100644 --- a/apps/docs/components/icons.tsx +++ b/apps/docs/components/icons.tsx @@ -1209,6 +1209,17 @@ export function AlgoliaIcon(props: SVGProps) { ) } +export function AmplitudeIcon(props: SVGProps) { + return ( + + + + ) +} + export function GoogleBooksIcon(props: SVGProps) { return ( @@ -1938,13 +1949,11 @@ export function ElevenLabsIcon(props: SVGProps) { export function LinkupIcon(props: SVGProps) { return ( - - - - + + ) } @@ -2453,6 +2462,17 @@ export function OutlookIcon(props: SVGProps) { ) } +export function PagerDutyIcon(props: SVGProps) { + return ( + + + + ) +} + export function MicrosoftExcelIcon(props: SVGProps) { const id = useId() const gradientId = `excel_gradient_${id}` @@ -3996,10 +4016,10 @@ export function IntercomIcon(props: SVGProps) { export function LoopsIcon(props: SVGProps) { return ( - + ) @@ -5578,6 +5598,35 @@ export function GoogleMapsIcon(props: SVGProps) { ) } +export function GooglePagespeedIcon(props: SVGProps) { + return ( + + + + + + + + + + ) +} + export function GoogleTranslateIcon(props: SVGProps) { return ( diff --git a/apps/docs/components/ui/icon-mapping.ts b/apps/docs/components/ui/icon-mapping.ts index a69b0d90f5b..6d15e539dc2 100644 --- a/apps/docs/components/ui/icon-mapping.ts +++ b/apps/docs/components/ui/icon-mapping.ts @@ -9,6 +9,7 @@ import { AirtableIcon, AirweaveIcon, AlgoliaIcon, + AmplitudeIcon, ApifyIcon, ApolloIcon, ArxivIcon, @@ -56,6 +57,7 @@ import { GoogleGroupsIcon, GoogleIcon, GoogleMapsIcon, + GooglePagespeedIcon, GoogleSheetsIcon, GoogleSlidesIcon, GoogleTasksIcon, @@ -102,6 +104,7 @@ import { OpenAIIcon, OutlookIcon, PackageSearchIcon, + PagerDutyIcon, ParallelIcon, PerplexityIcon, PineconeIcon, @@ -167,6 +170,7 @@ export const blockTypeToIconMap: Record = { airtable: AirtableIcon, airweave: AirweaveIcon, algolia: AlgoliaIcon, + amplitude: AmplitudeIcon, apify: ApifyIcon, apollo: ApolloIcon, arxiv: ArxivIcon, @@ -211,6 +215,7 @@ export const blockTypeToIconMap: Record = { google_forms: GoogleFormsIcon, google_groups: GoogleGroupsIcon, google_maps: GoogleMapsIcon, + google_pagespeed: GooglePagespeedIcon, google_search: GoogleIcon, google_sheets_v2: GoogleSheetsIcon, google_slides_v2: GoogleSlidesIcon, @@ -258,6 +263,7 @@ export const blockTypeToIconMap: Record = { onepassword: OnePasswordIcon, openai: OpenAIIcon, outlook: OutlookIcon, + pagerduty: PagerDutyIcon, parallel_ai: ParallelIcon, perplexity: PerplexityIcon, pinecone: PineconeIcon, diff --git a/apps/docs/content/docs/en/tools/amplitude.mdx b/apps/docs/content/docs/en/tools/amplitude.mdx new file mode 100644 index 00000000000..177b5c5455e --- /dev/null +++ b/apps/docs/content/docs/en/tools/amplitude.mdx @@ -0,0 +1,313 @@ +--- +title: Amplitude +description: Track events and query analytics from Amplitude +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +{/* MANUAL-CONTENT-START:intro */} +[Amplitude](https://amplitude.com/) is a leading digital analytics platform that helps teams understand user behavior, measure product performance, and make data-driven decisions at scale. + +The Amplitude integration in Sim connects with the Amplitude HTTP and Dashboard REST APIs using API key and secret key authentication, allowing your agents to track events, manage user properties, and query analytics data programmatically. This API-based approach ensures secure access to Amplitude's full suite of analytics capabilities. + +With the Amplitude integration, your agents can: + +- **Track events**: Send custom events to Amplitude with rich properties, revenue data, and user context directly from your workflows +- **Identify users**: Set and update user properties using operations like $set, $setOnce, $add, $append, and $unset to maintain detailed user profiles +- **Search for users**: Look up users by User ID, Device ID, or Amplitude ID to retrieve profile information and metadata +- **Query event analytics**: Run event segmentation queries with grouping, custom metrics (uniques, totals, averages, DAU percentages), and flexible date ranges +- **Monitor user activity**: Retrieve event streams for specific users to understand individual user journeys and behavior patterns +- **Analyze active users**: Get active or new user counts over time with daily, weekly, or monthly granularity +- **Track revenue**: Access revenue LTV metrics including ARPU, ARPPU, total revenue, and paying user counts + +In Sim, the Amplitude integration enables powerful analytics automation scenarios. Your agents can track product events in real time based on workflow triggers, enrich user profiles as new data becomes available, query segmentation data to inform downstream decisions, or build monitoring workflows that alert on changes in key metrics. By connecting Sim with Amplitude, you can build intelligent agents that bridge the gap between analytics insights and automated action, enabling data-driven workflows that respond to user behavior patterns and product performance trends. +{/* MANUAL-CONTENT-END */} + + +## Usage Instructions + +Integrate Amplitude into your workflow to track events, identify users and groups, search for users, query analytics, and retrieve revenue data. + + + +## Tools + +### `amplitude_send_event` + +Track an event in Amplitude using the HTTP V2 API. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Amplitude API Key | +| `userId` | string | No | User ID \(required if no device_id\) | +| `deviceId` | string | No | Device ID \(required if no user_id\) | +| `eventType` | string | Yes | Name of the event \(e.g., "page_view", "purchase"\) | +| `eventProperties` | string | No | JSON object of custom event properties | +| `userProperties` | string | No | JSON object of user properties to set \(supports $set, $setOnce, $add, $append, $unset\) | +| `time` | string | No | Event timestamp in milliseconds since epoch | +| `sessionId` | string | No | Session start time in milliseconds since epoch | +| `insertId` | string | No | Unique ID for deduplication \(within 7-day window\) | +| `appVersion` | string | No | Application version string | +| `platform` | string | No | Platform \(e.g., "Web", "iOS", "Android"\) | +| `country` | string | No | Two-letter country code | +| `language` | string | No | Language code \(e.g., "en"\) | +| `ip` | string | No | IP address for geo-location | +| `price` | string | No | Price of the item purchased | +| `quantity` | string | No | Quantity of items purchased | +| `revenue` | string | No | Revenue amount | +| `productId` | string | No | Product identifier | +| `revenueType` | string | No | Revenue type \(e.g., "purchase", "refund"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `code` | number | Response code \(200 for success\) | +| `eventsIngested` | number | Number of events ingested | +| `payloadSizeBytes` | number | Size of the payload in bytes | +| `serverUploadTime` | number | Server upload timestamp | + +### `amplitude_identify_user` + +Set user properties in Amplitude using the Identify API. Supports $set, $setOnce, $add, $append, $unset operations. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Amplitude API Key | +| `userId` | string | No | User ID \(required if no device_id\) | +| `deviceId` | string | No | Device ID \(required if no user_id\) | +| `userProperties` | string | Yes | JSON object of user properties. Use operations like $set, $setOnce, $add, $append, $unset. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `code` | number | HTTP response status code | +| `message` | string | Response message | + +### `amplitude_group_identify` + +Set group-level properties in Amplitude. Supports $set, $setOnce, $add, $append, $unset operations. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Amplitude API Key | +| `groupType` | string | Yes | Group classification \(e.g., "company", "org_id"\) | +| `groupValue` | string | Yes | Specific group identifier \(e.g., "Acme Corp"\) | +| `groupProperties` | string | Yes | JSON object of group properties. Use operations like $set, $setOnce, $add, $append, $unset. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `code` | number | HTTP response status code | +| `message` | string | Response message | + +### `amplitude_user_search` + +Search for a user by User ID, Device ID, or Amplitude ID using the Dashboard REST API. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Amplitude API Key | +| `secretKey` | string | Yes | Amplitude Secret Key | +| `user` | string | Yes | User ID, Device ID, or Amplitude ID to search for | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `matches` | array | List of matching users | +| ↳ `amplitudeId` | number | Amplitude internal user ID | +| ↳ `userId` | string | External user ID | +| `type` | string | Match type \(e.g., match_user_or_device_id\) | + +### `amplitude_user_activity` + +Get the event stream for a specific user by their Amplitude ID. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Amplitude API Key | +| `secretKey` | string | Yes | Amplitude Secret Key | +| `amplitudeId` | string | Yes | Amplitude internal user ID | +| `offset` | string | No | Offset for pagination \(default 0\) | +| `limit` | string | No | Maximum number of events to return \(default 1000, max 1000\) | +| `direction` | string | No | Sort direction: "latest" or "earliest" \(default: latest\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `events` | array | List of user events | +| ↳ `eventType` | string | Type of event | +| ↳ `eventTime` | string | Event timestamp | +| ↳ `eventProperties` | json | Custom event properties | +| ↳ `userProperties` | json | User properties at event time | +| ↳ `sessionId` | number | Session ID | +| ↳ `platform` | string | Platform | +| ↳ `country` | string | Country | +| ↳ `city` | string | City | +| `userData` | json | User metadata | +| ↳ `userId` | string | External user ID | +| ↳ `canonicalAmplitudeId` | number | Canonical Amplitude ID | +| ↳ `numEvents` | number | Total event count | +| ↳ `numSessions` | number | Total session count | +| ↳ `platform` | string | Primary platform | +| ↳ `country` | string | Country | + +### `amplitude_user_profile` + +Get a user profile including properties, cohort memberships, and computed properties. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `secretKey` | string | Yes | Amplitude Secret Key | +| `userId` | string | No | External user ID \(required if no device_id\) | +| `deviceId` | string | No | Device ID \(required if no user_id\) | +| `getAmpProps` | string | No | Include Amplitude user properties \(true/false, default: false\) | +| `getCohortIds` | string | No | Include cohort IDs the user belongs to \(true/false, default: false\) | +| `getComputations` | string | No | Include computed user properties \(true/false, default: false\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `userId` | string | External user ID | +| `deviceId` | string | Device ID | +| `ampProps` | json | Amplitude user properties \(library, first_used, last_used, custom properties\) | +| `cohortIds` | array | List of cohort IDs the user belongs to | +| `computations` | json | Computed user properties | + +### `amplitude_event_segmentation` + +Query event analytics data with segmentation. Get event counts, uniques, averages, and more. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Amplitude API Key | +| `secretKey` | string | Yes | Amplitude Secret Key | +| `eventType` | string | Yes | Event type name to analyze | +| `start` | string | Yes | Start date in YYYYMMDD format | +| `end` | string | Yes | End date in YYYYMMDD format | +| `metric` | string | No | Metric type: uniques, totals, pct_dau, average, histogram, sums, value_avg, or formula \(default: uniques\) | +| `interval` | string | No | Time interval: 1 \(daily\), 7 \(weekly\), or 30 \(monthly\) | +| `groupBy` | string | No | Property name to group by \(prefix custom user properties with "gp:"\) | +| `limit` | string | No | Maximum number of group-by values \(max 1000\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `series` | json | Time-series data arrays indexed by series | +| `seriesLabels` | array | Labels for each data series | +| `seriesCollapsed` | json | Collapsed aggregate totals per series | +| `xValues` | array | Date values for the x-axis | + +### `amplitude_get_active_users` + +Get active or new user counts over a date range from the Dashboard REST API. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Amplitude API Key | +| `secretKey` | string | Yes | Amplitude Secret Key | +| `start` | string | Yes | Start date in YYYYMMDD format | +| `end` | string | Yes | End date in YYYYMMDD format | +| `metric` | string | No | Metric type: "active" or "new" \(default: active\) | +| `interval` | string | No | Time interval: 1 \(daily\), 7 \(weekly\), or 30 \(monthly\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `series` | json | Array of data series with user counts per time interval | +| `seriesMeta` | array | Metadata labels for each data series \(e.g., segment names\) | +| `xValues` | array | Date values for the x-axis | + +### `amplitude_realtime_active_users` + +Get real-time active user counts at 5-minute granularity for the last 2 days. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Amplitude API Key | +| `secretKey` | string | Yes | Amplitude Secret Key | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `series` | json | Array of data series with active user counts at 5-minute intervals | +| `seriesLabels` | array | Labels for each series \(e.g., "Today", "Yesterday"\) | +| `xValues` | array | Time values for the x-axis \(e.g., "15:00", "15:05"\) | + +### `amplitude_list_events` + +List all event types in the Amplitude project with their weekly totals and unique counts. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Amplitude API Key | +| `secretKey` | string | Yes | Amplitude Secret Key | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `events` | array | List of event types in the project | +| ↳ `value` | string | Event type name | +| ↳ `displayName` | string | Event display name | +| ↳ `totals` | number | Weekly total count | +| ↳ `hidden` | boolean | Whether the event is hidden | +| ↳ `deleted` | boolean | Whether the event is deleted | + +### `amplitude_get_revenue` + +Get revenue LTV data including ARPU, ARPPU, total revenue, and paying user counts. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Amplitude API Key | +| `secretKey` | string | Yes | Amplitude Secret Key | +| `start` | string | Yes | Start date in YYYYMMDD format | +| `end` | string | Yes | End date in YYYYMMDD format | +| `metric` | string | No | Metric: 0 \(ARPU\), 1 \(ARPPU\), 2 \(Total Revenue\), 3 \(Paying Users\) | +| `interval` | string | No | Time interval: 1 \(daily\), 7 \(weekly\), or 30 \(monthly\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `series` | json | Array of revenue data series | +| `seriesLabels` | array | Labels for each data series | +| `xValues` | array | Date values for the x-axis | + + diff --git a/apps/docs/content/docs/en/tools/google_pagespeed.mdx b/apps/docs/content/docs/en/tools/google_pagespeed.mdx new file mode 100644 index 00000000000..65b62e0e750 --- /dev/null +++ b/apps/docs/content/docs/en/tools/google_pagespeed.mdx @@ -0,0 +1,84 @@ +--- +title: Google PageSpeed +description: Analyze webpage performance with Google PageSpeed Insights +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +{/* MANUAL-CONTENT-START:intro */} +[Google PageSpeed Insights](https://pagespeed.web.dev/) is a web performance analysis tool powered by Lighthouse that evaluates the quality of web pages across multiple dimensions including performance, accessibility, SEO, and best practices. + +With the Google PageSpeed integration in Sim, you can: + +- **Analyze webpage performance**: Get detailed performance scores and metrics for any public URL, including First Contentful Paint, Largest Contentful Paint, and Speed Index +- **Evaluate accessibility**: Check how well a webpage meets accessibility standards and identify areas for improvement +- **Audit SEO**: Assess a page's search engine optimization and discover opportunities to improve rankings +- **Review best practices**: Verify that a webpage follows modern web development best practices +- **Compare strategies**: Run analyses using either desktop or mobile strategies to understand performance across device types +- **Localize results**: Retrieve analysis results in different locales for internationalized reporting + +In Sim, the Google PageSpeed integration enables your agents to programmatically audit web pages as part of automated workflows. This is useful for monitoring site performance over time, triggering alerts when scores drop below thresholds, generating performance reports, and ensuring that deployed changes meet quality standards before release. + +### Getting Your API Key + +1. Go to the [Google Cloud Console](https://console.cloud.google.com/) +2. Create or select a project +3. Enable the **PageSpeed Insights API** from the API Library +4. Navigate to **Credentials** and create an API key +5. Use the API key in the Sim block configuration +{/* MANUAL-CONTENT-END */} + + +## Usage Instructions + +Analyze web pages for performance, accessibility, SEO, and best practices using Google PageSpeed Insights API powered by Lighthouse. + + + +## Tools + +### `google_pagespeed_analyze` + +Analyze a webpage for performance, accessibility, SEO, and best practices using Google PageSpeed Insights. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Google PageSpeed Insights API Key | +| `url` | string | Yes | The URL of the webpage to analyze | +| `category` | string | No | Lighthouse categories to analyze \(comma-separated\): performance, accessibility, best-practices, seo | +| `strategy` | string | No | Analysis strategy: desktop or mobile | +| `locale` | string | No | Locale for results \(e.g., en, fr, de\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `finalUrl` | string | The final URL after redirects | +| `performanceScore` | number | Performance category score \(0-1\) | +| `accessibilityScore` | number | Accessibility category score \(0-1\) | +| `bestPracticesScore` | number | Best Practices category score \(0-1\) | +| `seoScore` | number | SEO category score \(0-1\) | +| `firstContentfulPaint` | string | Time to First Contentful Paint \(display value\) | +| `firstContentfulPaintMs` | number | Time to First Contentful Paint in milliseconds | +| `largestContentfulPaint` | string | Time to Largest Contentful Paint \(display value\) | +| `largestContentfulPaintMs` | number | Time to Largest Contentful Paint in milliseconds | +| `totalBlockingTime` | string | Total Blocking Time \(display value\) | +| `totalBlockingTimeMs` | number | Total Blocking Time in milliseconds | +| `cumulativeLayoutShift` | string | Cumulative Layout Shift \(display value\) | +| `cumulativeLayoutShiftValue` | number | Cumulative Layout Shift numeric value | +| `speedIndex` | string | Speed Index \(display value\) | +| `speedIndexMs` | number | Speed Index in milliseconds | +| `interactive` | string | Time to Interactive \(display value\) | +| `interactiveMs` | number | Time to Interactive in milliseconds | +| `overallCategory` | string | Overall loading experience category \(FAST, AVERAGE, SLOW, or NONE\) | +| `analysisTimestamp` | string | UTC timestamp of the analysis | +| `lighthouseVersion` | string | Version of Lighthouse used for the analysis | + + diff --git a/apps/docs/content/docs/en/tools/meta.json b/apps/docs/content/docs/en/tools/meta.json index 3a1917fc19a..612cba972c1 100644 --- a/apps/docs/content/docs/en/tools/meta.json +++ b/apps/docs/content/docs/en/tools/meta.json @@ -6,6 +6,7 @@ "airtable", "airweave", "algolia", + "amplitude", "apify", "apollo", "arxiv", @@ -50,6 +51,7 @@ "google_forms", "google_groups", "google_maps", + "google_pagespeed", "google_search", "google_sheets", "google_slides", @@ -97,6 +99,7 @@ "onepassword", "openai", "outlook", + "pagerduty", "parallel_ai", "perplexity", "pinecone", diff --git a/apps/docs/content/docs/en/tools/pagerduty.mdx b/apps/docs/content/docs/en/tools/pagerduty.mdx new file mode 100644 index 00000000000..5876c1cd7da --- /dev/null +++ b/apps/docs/content/docs/en/tools/pagerduty.mdx @@ -0,0 +1,217 @@ +--- +title: PagerDuty +description: Manage incidents and on-call schedules with PagerDuty +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +{/* MANUAL-CONTENT-START:intro */} +[PagerDuty](https://www.pagerduty.com/) is a leading incident management platform that helps engineering and operations teams detect, triage, and resolve infrastructure and application issues in real time. PagerDuty integrates with monitoring tools, orchestrates on-call schedules, and ensures the right people are alerted when incidents occur. + +The PagerDuty integration in Sim connects with the PagerDuty REST API v2 using API key authentication, enabling your agents to manage the full incident lifecycle and query on-call information programmatically. + +With the PagerDuty integration, your agents can: + +- **List and filter incidents**: Retrieve incidents filtered by status (triggered, acknowledged, resolved), service, date range, and sort order to monitor your operational health +- **Create incidents**: Trigger new incidents on specific services with custom titles, descriptions, urgency levels, and assignees directly from your workflows +- **Update incidents**: Acknowledge or resolve incidents, change urgency, and add resolution notes to keep your incident management in sync with automated processes +- **Add notes to incidents**: Attach contextual information, investigation findings, or automated diagnostics as notes on existing incidents +- **List services**: Query your PagerDuty service catalog to discover service IDs and metadata for use in other operations +- **Check on-call schedules**: Retrieve current on-call entries filtered by escalation policy or schedule to determine who is responsible at any given time + +In Sim, the PagerDuty integration enables powerful incident automation scenarios. Your agents can automatically create incidents based on monitoring alerts, enrich incidents with diagnostic data from other tools, resolve incidents when automated remediation succeeds, or build escalation workflows that check on-call schedules and route notifications accordingly. By connecting Sim with PagerDuty, you can build intelligent agents that bridge the gap between detection and response, reducing mean time to resolution and ensuring consistent incident handling across your organization. +{/* MANUAL-CONTENT-END */} + + +## Usage Instructions + +Integrate PagerDuty into your workflow to list, create, and update incidents, add notes, list services, and check on-call schedules. + + + +## Tools + +### `pagerduty_list_incidents` + +List incidents from PagerDuty with optional filters. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | PagerDuty REST API Key | +| `statuses` | string | No | Comma-separated statuses to filter \(triggered, acknowledged, resolved\) | +| `serviceIds` | string | No | Comma-separated service IDs to filter | +| `since` | string | No | Start date filter \(ISO 8601 format\) | +| `until` | string | No | End date filter \(ISO 8601 format\) | +| `sortBy` | string | No | Sort field \(e.g., created_at:desc\) | +| `limit` | string | No | Maximum number of results \(max 100\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `incidents` | array | Array of incidents | +| ↳ `id` | string | Incident ID | +| ↳ `incidentNumber` | number | Incident number | +| ↳ `title` | string | Incident title | +| ↳ `status` | string | Incident status | +| ↳ `urgency` | string | Incident urgency | +| ↳ `createdAt` | string | Creation timestamp | +| ↳ `updatedAt` | string | Last updated timestamp | +| ↳ `serviceName` | string | Service name | +| ↳ `serviceId` | string | Service ID | +| ↳ `assigneeName` | string | Assignee name | +| ↳ `assigneeId` | string | Assignee ID | +| ↳ `escalationPolicyName` | string | Escalation policy name | +| ↳ `htmlUrl` | string | PagerDuty web URL | +| `total` | number | Total number of matching incidents | +| `more` | boolean | Whether more results are available | + +### `pagerduty_create_incident` + +Create a new incident in PagerDuty. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | PagerDuty REST API Key | +| `fromEmail` | string | Yes | Email address of a valid PagerDuty user | +| `title` | string | Yes | Incident title/summary | +| `serviceId` | string | Yes | ID of the PagerDuty service | +| `urgency` | string | No | Urgency level \(high or low\) | +| `body` | string | No | Detailed description of the incident | +| `escalationPolicyId` | string | No | Escalation policy ID to assign | +| `assigneeId` | string | No | User ID to assign the incident to | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Created incident ID | +| `incidentNumber` | number | Incident number | +| `title` | string | Incident title | +| `status` | string | Incident status | +| `urgency` | string | Incident urgency | +| `createdAt` | string | Creation timestamp | +| `serviceName` | string | Service name | +| `serviceId` | string | Service ID | +| `htmlUrl` | string | PagerDuty web URL | + +### `pagerduty_update_incident` + +Update an incident in PagerDuty (acknowledge, resolve, change urgency, etc.). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | PagerDuty REST API Key | +| `fromEmail` | string | Yes | Email address of a valid PagerDuty user | +| `incidentId` | string | Yes | ID of the incident to update | +| `status` | string | No | New status \(acknowledged or resolved\) | +| `title` | string | No | New incident title | +| `urgency` | string | No | New urgency \(high or low\) | +| `escalationLevel` | string | No | Escalation level to escalate to | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Incident ID | +| `incidentNumber` | number | Incident number | +| `title` | string | Incident title | +| `status` | string | Updated status | +| `urgency` | string | Updated urgency | +| `updatedAt` | string | Last updated timestamp | +| `htmlUrl` | string | PagerDuty web URL | + +### `pagerduty_add_note` + +Add a note to an existing PagerDuty incident. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | PagerDuty REST API Key | +| `fromEmail` | string | Yes | Email address of a valid PagerDuty user | +| `incidentId` | string | Yes | ID of the incident to add the note to | +| `content` | string | Yes | Note content text | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Note ID | +| `content` | string | Note content | +| `createdAt` | string | Creation timestamp | +| `userName` | string | Name of the user who created the note | + +### `pagerduty_list_services` + +List services from PagerDuty with optional name filter. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | PagerDuty REST API Key | +| `query` | string | No | Filter services by name | +| `limit` | string | No | Maximum number of results \(max 100\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `services` | array | Array of services | +| ↳ `id` | string | Service ID | +| ↳ `name` | string | Service name | +| ↳ `description` | string | Service description | +| ↳ `status` | string | Service status | +| ↳ `escalationPolicyName` | string | Escalation policy name | +| ↳ `escalationPolicyId` | string | Escalation policy ID | +| ↳ `createdAt` | string | Creation timestamp | +| ↳ `htmlUrl` | string | PagerDuty web URL | +| `total` | number | Total number of matching services | +| `more` | boolean | Whether more results are available | + +### `pagerduty_list_oncalls` + +List current on-call entries from PagerDuty. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | PagerDuty REST API Key | +| `escalationPolicyIds` | string | No | Comma-separated escalation policy IDs to filter | +| `scheduleIds` | string | No | Comma-separated schedule IDs to filter | +| `since` | string | No | Start time filter \(ISO 8601 format\) | +| `until` | string | No | End time filter \(ISO 8601 format\) | +| `limit` | string | No | Maximum number of results \(max 100\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `oncalls` | array | Array of on-call entries | +| ↳ `userName` | string | On-call user name | +| ↳ `userId` | string | On-call user ID | +| ↳ `escalationLevel` | number | Escalation level | +| ↳ `escalationPolicyName` | string | Escalation policy name | +| ↳ `escalationPolicyId` | string | Escalation policy ID | +| ↳ `scheduleName` | string | Schedule name | +| ↳ `scheduleId` | string | Schedule ID | +| ↳ `start` | string | On-call start time | +| ↳ `end` | string | On-call end time | +| `total` | number | Total number of matching on-call entries | +| `more` | boolean | Whether more results are available | + + diff --git a/apps/sim/blocks/blocks/amplitude.ts b/apps/sim/blocks/blocks/amplitude.ts new file mode 100644 index 00000000000..e9cbf618342 --- /dev/null +++ b/apps/sim/blocks/blocks/amplitude.ts @@ -0,0 +1,745 @@ +import { AmplitudeIcon } from '@/components/icons' +import { AuthMode, type BlockConfig } from '@/blocks/types' + +export const AmplitudeBlock: BlockConfig = { + type: 'amplitude', + name: 'Amplitude', + description: 'Track events and query analytics from Amplitude', + longDescription: + 'Integrate Amplitude into your workflow to track events, identify users and groups, search for users, query analytics, and retrieve revenue data.', + docsLink: 'https://docs.sim.ai/tools/amplitude', + category: 'tools', + bgColor: '#1B1F3B', + icon: AmplitudeIcon, + authMode: AuthMode.ApiKey, + + subBlocks: [ + { + id: 'operation', + title: 'Operation', + type: 'dropdown', + options: [ + { label: 'Send Event', id: 'send_event' }, + { label: 'Identify User', id: 'identify_user' }, + { label: 'Group Identify', id: 'group_identify' }, + { label: 'User Search', id: 'user_search' }, + { label: 'User Activity', id: 'user_activity' }, + { label: 'User Profile', id: 'user_profile' }, + { label: 'Event Segmentation', id: 'event_segmentation' }, + { label: 'Get Active Users', id: 'get_active_users' }, + { label: 'Real-time Active Users', id: 'realtime_active_users' }, + { label: 'List Events', id: 'list_events' }, + { label: 'Get Revenue', id: 'get_revenue' }, + ], + value: () => 'send_event', + }, + + // API Key (required for all operations) + { + id: 'apiKey', + title: 'API Key', + type: 'short-input', + required: true, + placeholder: 'Enter your Amplitude API Key', + password: true, + condition: { + field: 'operation', + value: 'user_profile', + not: true, + }, + }, + + // API Key for user_profile (not required - uses only secretKey) + // User Profile uses Api-Key header with secret key only + + // Secret Key (required for Dashboard REST API operations + User Profile) + { + id: 'secretKey', + title: 'Secret Key', + type: 'short-input', + required: { + field: 'operation', + value: [ + 'user_search', + 'user_activity', + 'user_profile', + 'event_segmentation', + 'get_active_users', + 'realtime_active_users', + 'list_events', + 'get_revenue', + ], + }, + placeholder: 'Enter your Amplitude Secret Key', + password: true, + condition: { + field: 'operation', + value: [ + 'user_search', + 'user_activity', + 'user_profile', + 'event_segmentation', + 'get_active_users', + 'realtime_active_users', + 'list_events', + 'get_revenue', + ], + }, + }, + + // --- Send Event fields --- + { + id: 'eventType', + title: 'Event Type', + type: 'short-input', + required: { field: 'operation', value: 'send_event' }, + placeholder: 'e.g., page_view, purchase, signup', + condition: { field: 'operation', value: 'send_event' }, + }, + { + id: 'userId', + title: 'User ID', + type: 'short-input', + placeholder: 'User identifier', + condition: { field: 'operation', value: ['send_event', 'identify_user'] }, + }, + { + id: 'profileUserId', + title: 'User ID', + type: 'short-input', + placeholder: 'External user ID (required if no Device ID)', + condition: { field: 'operation', value: 'user_profile' }, + }, + { + id: 'deviceId', + title: 'Device ID', + type: 'short-input', + placeholder: 'Device identifier', + condition: { field: 'operation', value: ['send_event', 'identify_user'] }, + mode: 'advanced', + }, + { + id: 'profileDeviceId', + title: 'Device ID', + type: 'short-input', + placeholder: 'Device ID (required if no User ID)', + condition: { field: 'operation', value: 'user_profile' }, + mode: 'advanced', + }, + { + id: 'eventProperties', + title: 'Event Properties', + type: 'long-input', + placeholder: '{"button": "signup", "page": "/home"}', + condition: { field: 'operation', value: 'send_event' }, + wandConfig: { + enabled: true, + prompt: + 'Generate a JSON object of event properties for an Amplitude event. Return ONLY the JSON object - no explanations, no extra text.', + generationType: 'json-object', + }, + }, + { + id: 'sendEventUserProperties', + title: 'User Properties', + type: 'long-input', + placeholder: '{"$set": {"plan": "premium"}}', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + wandConfig: { + enabled: true, + prompt: + 'Generate a JSON object of user properties for Amplitude. Use $set, $setOnce, $add, $append, or $unset operations. Return ONLY the JSON object - no explanations, no extra text.', + generationType: 'json-object', + }, + }, + { + id: 'platform', + title: 'Platform', + type: 'short-input', + placeholder: 'e.g., Web, iOS, Android', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + }, + { + id: 'appVersion', + title: 'App Version', + type: 'short-input', + placeholder: 'e.g., 1.0.0', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + }, + { + id: 'insertId', + title: 'Insert ID', + type: 'short-input', + placeholder: 'Unique ID for deduplication', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + }, + { + id: 'price', + title: 'Price', + type: 'short-input', + placeholder: '9.99', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + }, + { + id: 'quantity', + title: 'Quantity', + type: 'short-input', + placeholder: '1', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + }, + { + id: 'revenue', + title: 'Revenue', + type: 'short-input', + placeholder: '9.99', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + }, + { + id: 'productId', + title: 'Product ID', + type: 'short-input', + placeholder: 'Product identifier', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + }, + { + id: 'revenueType', + title: 'Revenue Type', + type: 'short-input', + placeholder: 'e.g., purchase, refund', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + }, + { + id: 'country', + title: 'Country', + type: 'short-input', + placeholder: 'Two-letter country code (e.g., US)', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + }, + { + id: 'language', + title: 'Language', + type: 'short-input', + placeholder: 'Language code (e.g., en)', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + }, + { + id: 'ip', + title: 'IP Address', + type: 'short-input', + placeholder: 'IP for geo-location (use "$remote" for request IP)', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + }, + { + id: 'time', + title: 'Timestamp', + type: 'short-input', + placeholder: 'Milliseconds since epoch', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + wandConfig: { + enabled: true, + prompt: + 'Generate a timestamp in milliseconds since epoch for the current time. Return ONLY the number - no explanations, no extra text.', + generationType: 'timestamp', + }, + }, + { + id: 'sessionId', + title: 'Session ID', + type: 'short-input', + placeholder: 'Session start time in milliseconds (-1 for no session)', + condition: { field: 'operation', value: 'send_event' }, + mode: 'advanced', + }, + + // --- Identify User fields --- + { + id: 'identifyUserProperties', + title: 'User Properties', + type: 'long-input', + required: { field: 'operation', value: 'identify_user' }, + placeholder: '{"$set": {"plan": "premium", "company": "Acme"}}', + condition: { field: 'operation', value: 'identify_user' }, + wandConfig: { + enabled: true, + prompt: + 'Generate a JSON object of user properties for Amplitude Identify API. Use $set, $setOnce, $add, $append, or $unset operations. Return ONLY the JSON object - no explanations, no extra text.', + generationType: 'json-object', + }, + }, + + // --- Group Identify fields --- + { + id: 'groupType', + title: 'Group Type', + type: 'short-input', + required: { field: 'operation', value: 'group_identify' }, + placeholder: 'e.g., company, org_id', + condition: { field: 'operation', value: 'group_identify' }, + }, + { + id: 'groupValue', + title: 'Group Value', + type: 'short-input', + required: { field: 'operation', value: 'group_identify' }, + placeholder: 'e.g., Acme Corp', + condition: { field: 'operation', value: 'group_identify' }, + }, + { + id: 'groupProperties', + title: 'Group Properties', + type: 'long-input', + required: { field: 'operation', value: 'group_identify' }, + placeholder: '{"$set": {"industry": "tech", "employee_count": 500}}', + condition: { field: 'operation', value: 'group_identify' }, + wandConfig: { + enabled: true, + prompt: + 'Generate a JSON object of group properties for Amplitude Group Identify API. Use $set, $setOnce, $add, $append, or $unset operations. Return ONLY the JSON object - no explanations, no extra text.', + generationType: 'json-object', + }, + }, + + // --- User Search fields --- + { + id: 'searchUser', + title: 'User', + type: 'short-input', + required: { field: 'operation', value: 'user_search' }, + placeholder: 'User ID, Device ID, or Amplitude ID', + condition: { field: 'operation', value: 'user_search' }, + }, + + // --- User Activity fields --- + { + id: 'amplitudeId', + title: 'Amplitude ID', + type: 'short-input', + required: { field: 'operation', value: 'user_activity' }, + placeholder: 'Amplitude internal user ID', + condition: { field: 'operation', value: 'user_activity' }, + }, + { + id: 'activityOffset', + title: 'Offset', + type: 'short-input', + placeholder: '0', + condition: { field: 'operation', value: 'user_activity' }, + mode: 'advanced', + }, + { + id: 'activityLimit', + title: 'Limit', + type: 'short-input', + placeholder: '1000', + condition: { field: 'operation', value: 'user_activity' }, + mode: 'advanced', + }, + { + id: 'activityDirection', + title: 'Direction', + type: 'dropdown', + options: [ + { label: 'Latest First', id: 'latest' }, + { label: 'Earliest First', id: 'earliest' }, + ], + value: () => 'latest', + condition: { field: 'operation', value: 'user_activity' }, + mode: 'advanced', + }, + + // --- User Profile fields --- + { + id: 'getAmpProps', + title: 'Include User Properties', + type: 'dropdown', + options: [ + { label: 'No', id: 'false' }, + { label: 'Yes', id: 'true' }, + ], + value: () => 'false', + condition: { field: 'operation', value: 'user_profile' }, + mode: 'advanced', + }, + { + id: 'getCohortIds', + title: 'Include Cohort IDs', + type: 'dropdown', + options: [ + { label: 'No', id: 'false' }, + { label: 'Yes', id: 'true' }, + ], + value: () => 'false', + condition: { field: 'operation', value: 'user_profile' }, + mode: 'advanced', + }, + { + id: 'getComputations', + title: 'Include Computed Properties', + type: 'dropdown', + options: [ + { label: 'No', id: 'false' }, + { label: 'Yes', id: 'true' }, + ], + value: () => 'false', + condition: { field: 'operation', value: 'user_profile' }, + mode: 'advanced', + }, + + // --- Event Segmentation fields --- + { + id: 'segmentationEventType', + title: 'Event Type', + type: 'short-input', + required: { field: 'operation', value: 'event_segmentation' }, + placeholder: 'Event type to analyze', + condition: { field: 'operation', value: 'event_segmentation' }, + }, + { + id: 'segmentationStart', + title: 'Start Date', + type: 'short-input', + required: { field: 'operation', value: 'event_segmentation' }, + placeholder: 'YYYYMMDD', + condition: { field: 'operation', value: 'event_segmentation' }, + wandConfig: { + enabled: true, + prompt: + 'Generate a date in YYYYMMDD format. Return ONLY the date string - no explanations, no extra text.', + generationType: 'timestamp', + }, + }, + { + id: 'segmentationEnd', + title: 'End Date', + type: 'short-input', + required: { field: 'operation', value: 'event_segmentation' }, + placeholder: 'YYYYMMDD', + condition: { field: 'operation', value: 'event_segmentation' }, + wandConfig: { + enabled: true, + prompt: + 'Generate a date in YYYYMMDD format. Return ONLY the date string - no explanations, no extra text.', + generationType: 'timestamp', + }, + }, + { + id: 'segmentationMetric', + title: 'Metric', + type: 'dropdown', + options: [ + { label: 'Uniques', id: 'uniques' }, + { label: 'Totals', id: 'totals' }, + { label: '% DAU', id: 'pct_dau' }, + { label: 'Average', id: 'average' }, + { label: 'Histogram', id: 'histogram' }, + { label: 'Sums', id: 'sums' }, + { label: 'Value Average', id: 'value_avg' }, + { label: 'Formula', id: 'formula' }, + ], + value: () => 'uniques', + condition: { field: 'operation', value: 'event_segmentation' }, + mode: 'advanced', + }, + { + id: 'segmentationInterval', + title: 'Interval', + type: 'dropdown', + options: [ + { label: 'Daily', id: '1' }, + { label: 'Weekly', id: '7' }, + { label: 'Monthly', id: '30' }, + ], + value: () => '1', + condition: { field: 'operation', value: 'event_segmentation' }, + mode: 'advanced', + }, + { + id: 'segmentationGroupBy', + title: 'Group By', + type: 'short-input', + placeholder: 'Property name (prefix custom with "gp:")', + condition: { field: 'operation', value: 'event_segmentation' }, + mode: 'advanced', + }, + { + id: 'segmentationLimit', + title: 'Limit', + type: 'short-input', + placeholder: 'Max group-by values (max 1000)', + condition: { field: 'operation', value: 'event_segmentation' }, + mode: 'advanced', + }, + + // --- Get Active Users fields --- + { + id: 'activeUsersStart', + title: 'Start Date', + type: 'short-input', + required: { field: 'operation', value: 'get_active_users' }, + placeholder: 'YYYYMMDD', + condition: { field: 'operation', value: 'get_active_users' }, + wandConfig: { + enabled: true, + prompt: + 'Generate a date in YYYYMMDD format. Return ONLY the date string - no explanations, no extra text.', + generationType: 'timestamp', + }, + }, + { + id: 'activeUsersEnd', + title: 'End Date', + type: 'short-input', + required: { field: 'operation', value: 'get_active_users' }, + placeholder: 'YYYYMMDD', + condition: { field: 'operation', value: 'get_active_users' }, + wandConfig: { + enabled: true, + prompt: + 'Generate a date in YYYYMMDD format. Return ONLY the date string - no explanations, no extra text.', + generationType: 'timestamp', + }, + }, + { + id: 'activeUsersMetric', + title: 'Metric', + type: 'dropdown', + options: [ + { label: 'Active Users', id: 'active' }, + { label: 'New Users', id: 'new' }, + ], + value: () => 'active', + condition: { field: 'operation', value: 'get_active_users' }, + mode: 'advanced', + }, + { + id: 'activeUsersInterval', + title: 'Interval', + type: 'dropdown', + options: [ + { label: 'Daily', id: '1' }, + { label: 'Weekly', id: '7' }, + { label: 'Monthly', id: '30' }, + ], + value: () => '1', + condition: { field: 'operation', value: 'get_active_users' }, + mode: 'advanced', + }, + + // --- Get Revenue fields --- + { + id: 'revenueStart', + title: 'Start Date', + type: 'short-input', + required: { field: 'operation', value: 'get_revenue' }, + placeholder: 'YYYYMMDD', + condition: { field: 'operation', value: 'get_revenue' }, + wandConfig: { + enabled: true, + prompt: + 'Generate a date in YYYYMMDD format. Return ONLY the date string - no explanations, no extra text.', + generationType: 'timestamp', + }, + }, + { + id: 'revenueEnd', + title: 'End Date', + type: 'short-input', + required: { field: 'operation', value: 'get_revenue' }, + placeholder: 'YYYYMMDD', + condition: { field: 'operation', value: 'get_revenue' }, + wandConfig: { + enabled: true, + prompt: + 'Generate a date in YYYYMMDD format. Return ONLY the date string - no explanations, no extra text.', + generationType: 'timestamp', + }, + }, + { + id: 'revenueMetric', + title: 'Metric', + type: 'dropdown', + options: [ + { label: 'ARPU', id: '0' }, + { label: 'ARPPU', id: '1' }, + { label: 'Total Revenue', id: '2' }, + { label: 'Paying Users', id: '3' }, + ], + value: () => '2', + condition: { field: 'operation', value: 'get_revenue' }, + mode: 'advanced', + }, + { + id: 'revenueInterval', + title: 'Interval', + type: 'dropdown', + options: [ + { label: 'Daily', id: '1' }, + { label: 'Weekly', id: '7' }, + { label: 'Monthly', id: '30' }, + ], + value: () => '1', + condition: { field: 'operation', value: 'get_revenue' }, + mode: 'advanced', + }, + ], + + tools: { + access: [ + 'amplitude_send_event', + 'amplitude_identify_user', + 'amplitude_group_identify', + 'amplitude_user_search', + 'amplitude_user_activity', + 'amplitude_user_profile', + 'amplitude_event_segmentation', + 'amplitude_get_active_users', + 'amplitude_realtime_active_users', + 'amplitude_list_events', + 'amplitude_get_revenue', + ], + config: { + tool: (params) => `amplitude_${params.operation}`, + params: (params) => { + const result: Record = {} + + switch (params.operation) { + case 'send_event': + if (params.sendEventUserProperties) + result.userProperties = params.sendEventUserProperties + break + + case 'identify_user': + if (params.identifyUserProperties) result.userProperties = params.identifyUserProperties + break + + case 'user_search': + if (params.searchUser) result.user = params.searchUser + break + + case 'user_activity': + if (params.activityOffset) result.offset = params.activityOffset + if (params.activityLimit) result.limit = params.activityLimit + if (params.activityDirection) result.direction = params.activityDirection + break + + case 'user_profile': + if (params.profileUserId) result.userId = params.profileUserId + if (params.profileDeviceId) result.deviceId = params.profileDeviceId + break + + case 'event_segmentation': + if (params.segmentationEventType) result.eventType = params.segmentationEventType + if (params.segmentationStart) result.start = params.segmentationStart + if (params.segmentationEnd) result.end = params.segmentationEnd + if (params.segmentationMetric) result.metric = params.segmentationMetric + if (params.segmentationInterval) result.interval = params.segmentationInterval + if (params.segmentationGroupBy) result.groupBy = params.segmentationGroupBy + if (params.segmentationLimit) result.limit = params.segmentationLimit + break + + case 'get_active_users': + if (params.activeUsersStart) result.start = params.activeUsersStart + if (params.activeUsersEnd) result.end = params.activeUsersEnd + if (params.activeUsersMetric) result.metric = params.activeUsersMetric + if (params.activeUsersInterval) result.interval = params.activeUsersInterval + break + + case 'get_revenue': + if (params.revenueStart) result.start = params.revenueStart + if (params.revenueEnd) result.end = params.revenueEnd + if (params.revenueMetric) result.metric = params.revenueMetric + if (params.revenueInterval) result.interval = params.revenueInterval + break + } + + return result + }, + }, + }, + + inputs: { + operation: { type: 'string', description: 'Operation to perform' }, + apiKey: { type: 'string', description: 'Amplitude API Key' }, + secretKey: { type: 'string', description: 'Amplitude Secret Key' }, + eventType: { type: 'string', description: 'Event type name' }, + userId: { type: 'string', description: 'User ID' }, + deviceId: { type: 'string', description: 'Device ID' }, + eventProperties: { type: 'string', description: 'Event properties JSON' }, + sendEventUserProperties: { type: 'string', description: 'User properties for send event' }, + identifyUserProperties: { type: 'string', description: 'User properties for identify' }, + groupType: { type: 'string', description: 'Group type classification' }, + groupValue: { type: 'string', description: 'Group identifier value' }, + groupProperties: { type: 'string', description: 'Group properties JSON' }, + searchUser: { type: 'string', description: 'User to search for' }, + amplitudeId: { type: 'string', description: 'Amplitude internal user ID' }, + profileUserId: { type: 'string', description: 'User ID for profile lookup' }, + profileDeviceId: { type: 'string', description: 'Device ID for profile lookup' }, + segmentationEventType: { type: 'string', description: 'Event type to analyze' }, + segmentationStart: { type: 'string', description: 'Segmentation start date' }, + segmentationEnd: { type: 'string', description: 'Segmentation end date' }, + activeUsersStart: { type: 'string', description: 'Active users start date' }, + activeUsersEnd: { type: 'string', description: 'Active users end date' }, + revenueStart: { type: 'string', description: 'Revenue start date' }, + revenueEnd: { type: 'string', description: 'Revenue end date' }, + }, + + outputs: { + code: { + type: 'number', + description: 'Response status code', + }, + message: { + type: 'string', + description: 'Response message (identify_user, group_identify)', + }, + eventsIngested: { + type: 'number', + description: 'Number of events ingested (send_event)', + }, + matches: { + type: 'json', + description: 'User search matches (amplitudeId, userId)', + }, + events: { + type: 'json', + description: 'Event list (list_events, user_activity)', + }, + userData: { + type: 'json', + description: 'User metadata (user_activity)', + }, + series: { + type: 'json', + description: 'Time-series data (segmentation, active_users, revenue, realtime)', + }, + seriesLabels: { + type: 'json', + description: 'Labels for each data series (segmentation, realtime, revenue)', + }, + seriesMeta: { + type: 'json', + description: 'Metadata labels for data series (active_users)', + }, + seriesCollapsed: { + type: 'json', + description: 'Collapsed aggregate totals per series (segmentation)', + }, + xValues: { + type: 'json', + description: 'X-axis date/time values for chart data', + }, + }, +} diff --git a/apps/sim/blocks/blocks/google_pagespeed.ts b/apps/sim/blocks/blocks/google_pagespeed.ts new file mode 100644 index 00000000000..955b895cbd0 --- /dev/null +++ b/apps/sim/blocks/blocks/google_pagespeed.ts @@ -0,0 +1,86 @@ +import { GooglePagespeedIcon } from '@/components/icons' +import { AuthMode, type BlockConfig } from '@/blocks/types' +import type { GooglePagespeedAnalyzeResponse } from '@/tools/google_pagespeed/types' + +export const GooglePagespeedBlock: BlockConfig = { + type: 'google_pagespeed', + name: 'Google PageSpeed', + description: 'Analyze webpage performance with Google PageSpeed Insights', + longDescription: + 'Analyze web pages for performance, accessibility, SEO, and best practices using Google PageSpeed Insights API powered by Lighthouse.', + docsLink: 'https://docs.sim.ai/tools/google_pagespeed', + category: 'tools', + bgColor: '#E0E0E0', + icon: GooglePagespeedIcon, + authMode: AuthMode.ApiKey, + + subBlocks: [ + { + id: 'url', + title: 'URL', + type: 'short-input', + required: true, + placeholder: 'https://example.com', + }, + { + id: 'strategy', + title: 'Strategy', + type: 'dropdown', + options: [ + { label: 'Desktop', id: 'desktop' }, + { label: 'Mobile', id: 'mobile' }, + ], + value: () => 'desktop', + }, + { + id: 'category', + title: 'Categories', + type: 'short-input', + placeholder: 'performance, accessibility, best-practices, seo', + mode: 'advanced', + wandConfig: { + enabled: true, + prompt: + 'Generate a comma-separated list of Google PageSpeed Insights categories to analyze. Valid values are: performance, accessibility, best-practices, seo. Return ONLY the comma-separated list - no explanations, no extra text.', + }, + }, + { + id: 'locale', + title: 'Locale', + type: 'short-input', + placeholder: 'en', + mode: 'advanced', + }, + { + id: 'apiKey', + title: 'API Key', + type: 'short-input', + required: true, + placeholder: 'Enter your Google PageSpeed API key', + password: true, + }, + ], + + tools: { + access: ['google_pagespeed_analyze'], + config: { + tool: () => 'google_pagespeed_analyze', + }, + }, + + inputs: { + url: { type: 'string', description: 'URL to analyze' }, + strategy: { type: 'string', description: 'Analysis strategy (desktop or mobile)' }, + category: { type: 'string', description: 'Comma-separated categories to analyze' }, + locale: { type: 'string', description: 'Locale for results' }, + apiKey: { type: 'string', description: 'Google PageSpeed API key' }, + }, + + outputs: { + response: { + type: 'json', + description: + 'PageSpeed analysis results including category scores (performanceScore, accessibilityScore, bestPracticesScore, seoScore), Core Web Vitals display values and numeric values (firstContentfulPaint, largestContentfulPaint, totalBlockingTime, cumulativeLayoutShift, speedIndex, interactive), and metadata (finalUrl, overallCategory, analysisTimestamp, lighthouseVersion)', + }, + }, +} diff --git a/apps/sim/blocks/blocks/pagerduty.ts b/apps/sim/blocks/blocks/pagerduty.ts new file mode 100644 index 00000000000..34e1336bb92 --- /dev/null +++ b/apps/sim/blocks/blocks/pagerduty.ts @@ -0,0 +1,482 @@ +import { PagerDutyIcon } from '@/components/icons' +import { AuthMode, type BlockConfig } from '@/blocks/types' + +export const PagerDutyBlock: BlockConfig = { + type: 'pagerduty', + name: 'PagerDuty', + description: 'Manage incidents and on-call schedules with PagerDuty', + longDescription: + 'Integrate PagerDuty into your workflow to list, create, and update incidents, add notes, list services, and check on-call schedules.', + docsLink: 'https://docs.sim.ai/tools/pagerduty', + category: 'tools', + bgColor: '#06AC38', + icon: PagerDutyIcon, + authMode: AuthMode.ApiKey, + + subBlocks: [ + { + id: 'operation', + title: 'Operation', + type: 'dropdown', + options: [ + { label: 'List Incidents', id: 'list_incidents' }, + { label: 'Create Incident', id: 'create_incident' }, + { label: 'Update Incident', id: 'update_incident' }, + { label: 'Add Note', id: 'add_note' }, + { label: 'List Services', id: 'list_services' }, + { label: 'List On-Calls', id: 'list_oncalls' }, + ], + value: () => 'list_incidents', + }, + + { + id: 'apiKey', + title: 'API Key', + type: 'short-input', + required: true, + placeholder: 'Enter your PagerDuty REST API Key', + password: true, + }, + + { + id: 'fromEmail', + title: 'From Email', + type: 'short-input', + required: { + field: 'operation', + value: ['create_incident', 'update_incident', 'add_note'], + }, + placeholder: 'Valid PagerDuty user email (required for write operations)', + condition: { + field: 'operation', + value: ['create_incident', 'update_incident', 'add_note'], + }, + }, + + // --- List Incidents fields --- + { + id: 'statuses', + title: 'Statuses', + type: 'dropdown', + options: [ + { label: 'All', id: '' }, + { label: 'Triggered', id: 'triggered' }, + { label: 'Acknowledged', id: 'acknowledged' }, + { label: 'Resolved', id: 'resolved' }, + ], + value: () => '', + condition: { field: 'operation', value: 'list_incidents' }, + }, + { + id: 'listServiceIds', + title: 'Service IDs', + type: 'short-input', + placeholder: 'Comma-separated service IDs to filter', + condition: { field: 'operation', value: 'list_incidents' }, + mode: 'advanced', + }, + { + id: 'listSince', + title: 'Since', + type: 'short-input', + placeholder: 'Start date (ISO 8601, e.g., 2024-01-01T00:00:00Z)', + condition: { field: 'operation', value: 'list_incidents' }, + mode: 'advanced', + wandConfig: { + enabled: true, + prompt: + 'Generate an ISO 8601 timestamp. Return ONLY the timestamp string - no explanations, no extra text.', + generationType: 'timestamp', + }, + }, + { + id: 'listUntil', + title: 'Until', + type: 'short-input', + placeholder: 'End date (ISO 8601, e.g., 2024-12-31T23:59:59Z)', + condition: { field: 'operation', value: 'list_incidents' }, + mode: 'advanced', + wandConfig: { + enabled: true, + prompt: + 'Generate an ISO 8601 timestamp. Return ONLY the timestamp string - no explanations, no extra text.', + generationType: 'timestamp', + }, + }, + { + id: 'listSortBy', + title: 'Sort By', + type: 'dropdown', + options: [ + { label: 'Created At (newest)', id: 'created_at:desc' }, + { label: 'Created At (oldest)', id: 'created_at:asc' }, + ], + value: () => 'created_at:desc', + condition: { field: 'operation', value: 'list_incidents' }, + mode: 'advanced', + }, + { + id: 'listLimit', + title: 'Limit', + type: 'short-input', + placeholder: '25', + condition: { field: 'operation', value: 'list_incidents' }, + mode: 'advanced', + }, + + // --- Create Incident fields --- + { + id: 'title', + title: 'Title', + type: 'short-input', + required: { field: 'operation', value: 'create_incident' }, + placeholder: 'Incident title/summary', + condition: { field: 'operation', value: 'create_incident' }, + }, + { + id: 'createServiceId', + title: 'Service ID', + type: 'short-input', + required: { field: 'operation', value: 'create_incident' }, + placeholder: 'PagerDuty service ID', + condition: { field: 'operation', value: 'create_incident' }, + }, + { + id: 'createUrgency', + title: 'Urgency', + type: 'dropdown', + options: [ + { label: 'High', id: 'high' }, + { label: 'Low', id: 'low' }, + ], + value: () => 'high', + condition: { field: 'operation', value: 'create_incident' }, + }, + { + id: 'body', + title: 'Description', + type: 'long-input', + placeholder: 'Detailed description of the incident', + condition: { field: 'operation', value: 'create_incident' }, + }, + { + id: 'escalationPolicyId', + title: 'Escalation Policy ID', + type: 'short-input', + placeholder: 'Escalation policy ID (optional)', + condition: { field: 'operation', value: 'create_incident' }, + mode: 'advanced', + }, + { + id: 'assigneeId', + title: 'Assignee User ID', + type: 'short-input', + placeholder: 'User ID to assign (optional)', + condition: { field: 'operation', value: 'create_incident' }, + mode: 'advanced', + }, + + // --- Update Incident fields --- + { + id: 'updateIncidentId', + title: 'Incident ID', + type: 'short-input', + required: { field: 'operation', value: 'update_incident' }, + placeholder: 'ID of the incident to update', + condition: { field: 'operation', value: 'update_incident' }, + }, + { + id: 'updateStatus', + title: 'Status', + type: 'dropdown', + options: [ + { label: 'No Change', id: '' }, + { label: 'Acknowledged', id: 'acknowledged' }, + { label: 'Resolved', id: 'resolved' }, + ], + value: () => '', + condition: { field: 'operation', value: 'update_incident' }, + }, + { + id: 'updateTitle', + title: 'New Title', + type: 'short-input', + placeholder: 'New incident title (optional)', + condition: { field: 'operation', value: 'update_incident' }, + mode: 'advanced', + }, + { + id: 'updateUrgency', + title: 'Urgency', + type: 'dropdown', + options: [ + { label: 'No Change', id: '' }, + { label: 'High', id: 'high' }, + { label: 'Low', id: 'low' }, + ], + value: () => '', + condition: { field: 'operation', value: 'update_incident' }, + mode: 'advanced', + }, + { + id: 'updateEscalationLevel', + title: 'Escalation Level', + type: 'short-input', + placeholder: 'Escalation level number (e.g., 2)', + condition: { field: 'operation', value: 'update_incident' }, + mode: 'advanced', + }, + // --- Add Note fields --- + { + id: 'noteIncidentId', + title: 'Incident ID', + type: 'short-input', + required: { field: 'operation', value: 'add_note' }, + placeholder: 'ID of the incident', + condition: { field: 'operation', value: 'add_note' }, + }, + { + id: 'noteContent', + title: 'Note Content', + type: 'long-input', + required: { field: 'operation', value: 'add_note' }, + placeholder: 'Note text to add to the incident', + condition: { field: 'operation', value: 'add_note' }, + }, + + // --- List Services fields --- + { + id: 'serviceQuery', + title: 'Search Query', + type: 'short-input', + placeholder: 'Filter services by name', + condition: { field: 'operation', value: 'list_services' }, + }, + { + id: 'serviceLimit', + title: 'Limit', + type: 'short-input', + placeholder: '25', + condition: { field: 'operation', value: 'list_services' }, + mode: 'advanced', + }, + + // --- List On-Calls fields --- + { + id: 'oncallEscalationPolicyIds', + title: 'Escalation Policy IDs', + type: 'short-input', + placeholder: 'Comma-separated escalation policy IDs', + condition: { field: 'operation', value: 'list_oncalls' }, + }, + { + id: 'oncallScheduleIds', + title: 'Schedule IDs', + type: 'short-input', + placeholder: 'Comma-separated schedule IDs', + condition: { field: 'operation', value: 'list_oncalls' }, + mode: 'advanced', + }, + { + id: 'oncallLimit', + title: 'Limit', + type: 'short-input', + placeholder: '25', + condition: { field: 'operation', value: 'list_oncalls' }, + mode: 'advanced', + }, + { + id: 'oncallSince', + title: 'Since', + type: 'short-input', + placeholder: 'Start time (ISO 8601)', + condition: { field: 'operation', value: 'list_oncalls' }, + mode: 'advanced', + wandConfig: { + enabled: true, + prompt: + 'Generate an ISO 8601 timestamp. Return ONLY the timestamp string - no explanations, no extra text.', + generationType: 'timestamp', + }, + }, + { + id: 'oncallUntil', + title: 'Until', + type: 'short-input', + placeholder: 'End time (ISO 8601)', + condition: { field: 'operation', value: 'list_oncalls' }, + mode: 'advanced', + wandConfig: { + enabled: true, + prompt: + 'Generate an ISO 8601 timestamp. Return ONLY the timestamp string - no explanations, no extra text.', + generationType: 'timestamp', + }, + }, + ], + + tools: { + access: [ + 'pagerduty_list_incidents', + 'pagerduty_create_incident', + 'pagerduty_update_incident', + 'pagerduty_add_note', + 'pagerduty_list_services', + 'pagerduty_list_oncalls', + ], + config: { + tool: (params) => `pagerduty_${params.operation}`, + params: (params) => { + const result: Record = {} + + switch (params.operation) { + case 'list_incidents': + if (params.statuses) result.statuses = params.statuses + if (params.listServiceIds) result.serviceIds = params.listServiceIds + if (params.listSince) result.since = params.listSince + if (params.listUntil) result.until = params.listUntil + if (params.listSortBy) result.sortBy = params.listSortBy + if (params.listLimit) result.limit = params.listLimit + break + + case 'create_incident': + if (params.createServiceId) result.serviceId = params.createServiceId + if (params.createUrgency) result.urgency = params.createUrgency + break + + case 'update_incident': + if (params.updateIncidentId) result.incidentId = params.updateIncidentId + if (params.updateStatus) result.status = params.updateStatus + if (params.updateTitle) result.title = params.updateTitle + if (params.updateUrgency) result.urgency = params.updateUrgency + if (params.updateEscalationLevel) result.escalationLevel = params.updateEscalationLevel + break + + case 'add_note': + if (params.noteIncidentId) result.incidentId = params.noteIncidentId + if (params.noteContent) result.content = params.noteContent + break + + case 'list_services': + if (params.serviceQuery) result.query = params.serviceQuery + if (params.serviceLimit) result.limit = params.serviceLimit + break + + case 'list_oncalls': + if (params.oncallEscalationPolicyIds) + result.escalationPolicyIds = params.oncallEscalationPolicyIds + if (params.oncallScheduleIds) result.scheduleIds = params.oncallScheduleIds + if (params.oncallSince) result.since = params.oncallSince + if (params.oncallUntil) result.until = params.oncallUntil + if (params.oncallLimit) result.limit = params.oncallLimit + break + } + + return result + }, + }, + }, + + inputs: { + operation: { type: 'string', description: 'Operation to perform' }, + apiKey: { type: 'string', description: 'PagerDuty REST API Key' }, + fromEmail: { type: 'string', description: 'Valid PagerDuty user email' }, + statuses: { type: 'string', description: 'Status filter for incidents' }, + listServiceIds: { type: 'string', description: 'Service IDs filter' }, + listSince: { type: 'string', description: 'Start date filter' }, + listUntil: { type: 'string', description: 'End date filter' }, + title: { type: 'string', description: 'Incident title' }, + createServiceId: { type: 'string', description: 'Service ID for new incident' }, + createUrgency: { type: 'string', description: 'Urgency level' }, + body: { type: 'string', description: 'Incident description' }, + updateIncidentId: { type: 'string', description: 'Incident ID to update' }, + updateStatus: { type: 'string', description: 'New status' }, + noteIncidentId: { type: 'string', description: 'Incident ID for note' }, + noteContent: { type: 'string', description: 'Note content' }, + escalationPolicyId: { type: 'string', description: 'Escalation policy ID' }, + assigneeId: { type: 'string', description: 'Assignee user ID' }, + updateTitle: { type: 'string', description: 'New incident title' }, + updateUrgency: { type: 'string', description: 'New urgency level' }, + updateEscalationLevel: { type: 'string', description: 'Escalation level number' }, + listSortBy: { type: 'string', description: 'Sort field' }, + listLimit: { type: 'string', description: 'Max results for incidents' }, + serviceQuery: { type: 'string', description: 'Service name filter' }, + serviceLimit: { type: 'string', description: 'Max results for services' }, + oncallEscalationPolicyIds: { type: 'string', description: 'Escalation policy IDs filter' }, + oncallScheduleIds: { type: 'string', description: 'Schedule IDs filter' }, + oncallSince: { type: 'string', description: 'On-call start time filter' }, + oncallUntil: { type: 'string', description: 'On-call end time filter' }, + oncallLimit: { type: 'string', description: 'Max results for on-calls' }, + }, + + outputs: { + incidents: { + type: 'json', + description: 'Array of incidents (list_incidents)', + }, + total: { + type: 'number', + description: 'Total count of results', + }, + more: { + type: 'boolean', + description: 'Whether more results are available', + }, + id: { + type: 'string', + description: 'Created/updated resource ID', + }, + incidentNumber: { + type: 'number', + description: 'Incident number', + }, + title: { + type: 'string', + description: 'Incident title', + }, + status: { + type: 'string', + description: 'Incident status', + }, + urgency: { + type: 'string', + description: 'Incident urgency', + }, + createdAt: { + type: 'string', + description: 'Creation timestamp', + }, + updatedAt: { + type: 'string', + description: 'Last updated timestamp', + }, + serviceName: { + type: 'string', + description: 'Service name', + }, + serviceId: { + type: 'string', + description: 'Service ID', + }, + htmlUrl: { + type: 'string', + description: 'PagerDuty web URL', + }, + content: { + type: 'string', + description: 'Note content (add_note)', + }, + userName: { + type: 'string', + description: 'User name (add_note)', + }, + services: { + type: 'json', + description: 'Array of services (list_services)', + }, + oncalls: { + type: 'json', + description: 'Array of on-call entries (list_oncalls)', + }, + }, +} diff --git a/apps/sim/blocks/registry.ts b/apps/sim/blocks/registry.ts index b65005b316c..0e3e2c695ea 100644 --- a/apps/sim/blocks/registry.ts +++ b/apps/sim/blocks/registry.ts @@ -4,6 +4,7 @@ import { AhrefsBlock } from '@/blocks/blocks/ahrefs' import { AirtableBlock } from '@/blocks/blocks/airtable' import { AirweaveBlock } from '@/blocks/blocks/airweave' import { AlgoliaBlock } from '@/blocks/blocks/algolia' +import { AmplitudeBlock } from '@/blocks/blocks/amplitude' import { ApiBlock } from '@/blocks/blocks/api' import { ApiTriggerBlock } from '@/blocks/blocks/api_trigger' import { ApifyBlock } from '@/blocks/blocks/apify' @@ -56,6 +57,7 @@ import { GoogleDriveBlock } from '@/blocks/blocks/google_drive' import { GoogleFormsBlock } from '@/blocks/blocks/google_forms' import { GoogleGroupsBlock } from '@/blocks/blocks/google_groups' import { GoogleMapsBlock } from '@/blocks/blocks/google_maps' +import { GooglePagespeedBlock } from '@/blocks/blocks/google_pagespeed' import { GoogleSheetsBlock, GoogleSheetsV2Block } from '@/blocks/blocks/google_sheets' import { GoogleSlidesBlock, GoogleSlidesV2Block } from '@/blocks/blocks/google_slides' import { GoogleTasksBlock } from '@/blocks/blocks/google_tasks' @@ -112,6 +114,7 @@ import { OneDriveBlock } from '@/blocks/blocks/onedrive' import { OnePasswordBlock } from '@/blocks/blocks/onepassword' import { OpenAIBlock } from '@/blocks/blocks/openai' import { OutlookBlock } from '@/blocks/blocks/outlook' +import { PagerDutyBlock } from '@/blocks/blocks/pagerduty' import { ParallelBlock } from '@/blocks/blocks/parallel' import { PerplexityBlock } from '@/blocks/blocks/perplexity' import { PineconeBlock } from '@/blocks/blocks/pinecone' @@ -193,6 +196,7 @@ export const registry: Record = { airtable: AirtableBlock, airweave: AirweaveBlock, algolia: AlgoliaBlock, + amplitude: AmplitudeBlock, api: ApiBlock, api_trigger: ApiTriggerBlock, apify: ApifyBlock, @@ -250,6 +254,7 @@ export const registry: Record = { google_forms: GoogleFormsBlock, google_groups: GoogleGroupsBlock, google_maps: GoogleMapsBlock, + google_pagespeed: GooglePagespeedBlock, google_tasks: GoogleTasksBlock, google_translate: GoogleTranslateBlock, gong: GongBlock, @@ -313,6 +318,7 @@ export const registry: Record = { onedrive: OneDriveBlock, openai: OpenAIBlock, outlook: OutlookBlock, + pagerduty: PagerDutyBlock, parallel_ai: ParallelBlock, perplexity: PerplexityBlock, pinecone: PineconeBlock, diff --git a/apps/sim/components/icons.tsx b/apps/sim/components/icons.tsx index 51cd709bb71..c4666fba176 100644 --- a/apps/sim/components/icons.tsx +++ b/apps/sim/components/icons.tsx @@ -1209,6 +1209,17 @@ export function AlgoliaIcon(props: SVGProps) { ) } +export function AmplitudeIcon(props: SVGProps) { + return ( + + + + ) +} + export function GoogleBooksIcon(props: SVGProps) { return ( @@ -1938,13 +1949,11 @@ export function ElevenLabsIcon(props: SVGProps) { export function LinkupIcon(props: SVGProps) { return ( - - - - + + ) } @@ -2453,6 +2462,17 @@ export function OutlookIcon(props: SVGProps) { ) } +export function PagerDutyIcon(props: SVGProps) { + return ( + + + + ) +} + export function MicrosoftExcelIcon(props: SVGProps) { const id = useId() const gradientId = `excel_gradient_${id}` @@ -3996,10 +4016,10 @@ export function IntercomIcon(props: SVGProps) { export function LoopsIcon(props: SVGProps) { return ( - + ) @@ -5578,6 +5598,35 @@ export function GoogleMapsIcon(props: SVGProps) { ) } +export function GooglePagespeedIcon(props: SVGProps) { + return ( + + + + + + + + + + ) +} + export function GoogleTranslateIcon(props: SVGProps) { return ( diff --git a/apps/sim/tools/amplitude/event_segmentation.ts b/apps/sim/tools/amplitude/event_segmentation.ts new file mode 100644 index 00000000000..d5cbab3357a --- /dev/null +++ b/apps/sim/tools/amplitude/event_segmentation.ts @@ -0,0 +1,134 @@ +import type { + AmplitudeEventSegmentationParams, + AmplitudeEventSegmentationResponse, +} from '@/tools/amplitude/types' +import type { ToolConfig } from '@/tools/types' + +export const eventSegmentationTool: ToolConfig< + AmplitudeEventSegmentationParams, + AmplitudeEventSegmentationResponse +> = { + id: 'amplitude_event_segmentation', + name: 'Amplitude Event Segmentation', + description: + 'Query event analytics data with segmentation. Get event counts, uniques, averages, and more.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude API Key', + }, + secretKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude Secret Key', + }, + eventType: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Event type name to analyze', + }, + start: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Start date in YYYYMMDD format', + }, + end: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'End date in YYYYMMDD format', + }, + metric: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: + 'Metric type: uniques, totals, pct_dau, average, histogram, sums, value_avg, or formula (default: uniques)', + }, + interval: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Time interval: 1 (daily), 7 (weekly), or 30 (monthly)', + }, + groupBy: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Property name to group by (prefix custom user properties with "gp:")', + }, + limit: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of group-by values (max 1000)', + }, + }, + + request: { + url: (params) => { + const url = new URL('https://amplitude.com/api/2/events/segmentation') + const eventObj = JSON.stringify({ event_type: params.eventType }) + url.searchParams.set('e', eventObj) + url.searchParams.set('start', params.start) + url.searchParams.set('end', params.end) + if (params.metric) url.searchParams.set('m', params.metric) + if (params.interval) url.searchParams.set('i', params.interval) + if (params.groupBy) url.searchParams.set('g', params.groupBy) + if (params.limit) url.searchParams.set('limit', params.limit) + return url.toString() + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Basic ${btoa(`${params.apiKey}:${params.secretKey}`)}`, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || `Amplitude Event Segmentation API error: ${response.status}`) + } + + const result = data.data ?? {} + + return { + success: true, + output: { + series: result.series ?? [], + seriesLabels: result.seriesLabels ?? [], + seriesCollapsed: result.seriesCollapsed ?? [], + xValues: result.xValues ?? [], + }, + } + }, + + outputs: { + series: { + type: 'json', + description: 'Time-series data arrays indexed by series', + }, + seriesLabels: { + type: 'array', + description: 'Labels for each data series', + items: { type: 'string' }, + }, + seriesCollapsed: { + type: 'json', + description: 'Collapsed aggregate totals per series', + }, + xValues: { + type: 'array', + description: 'Date values for the x-axis', + items: { type: 'string' }, + }, + }, +} diff --git a/apps/sim/tools/amplitude/get_active_users.ts b/apps/sim/tools/amplitude/get_active_users.ts new file mode 100644 index 00000000000..6670e5ab150 --- /dev/null +++ b/apps/sim/tools/amplitude/get_active_users.ts @@ -0,0 +1,105 @@ +import type { + AmplitudeGetActiveUsersParams, + AmplitudeGetActiveUsersResponse, +} from '@/tools/amplitude/types' +import type { ToolConfig } from '@/tools/types' + +export const getActiveUsersTool: ToolConfig< + AmplitudeGetActiveUsersParams, + AmplitudeGetActiveUsersResponse +> = { + id: 'amplitude_get_active_users', + name: 'Amplitude Get Active Users', + description: 'Get active or new user counts over a date range from the Dashboard REST API.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude API Key', + }, + secretKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude Secret Key', + }, + start: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Start date in YYYYMMDD format', + }, + end: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'End date in YYYYMMDD format', + }, + metric: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Metric type: "active" or "new" (default: active)', + }, + interval: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Time interval: 1 (daily), 7 (weekly), or 30 (monthly)', + }, + }, + + request: { + url: (params) => { + const url = new URL('https://amplitude.com/api/2/users') + url.searchParams.set('start', params.start) + url.searchParams.set('end', params.end) + if (params.metric) url.searchParams.set('m', params.metric) + if (params.interval) url.searchParams.set('i', params.interval) + return url.toString() + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Basic ${btoa(`${params.apiKey}:${params.secretKey}`)}`, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || `Amplitude Active Users API error: ${response.status}`) + } + + const result = data.data ?? {} + + return { + success: true, + output: { + series: result.series ?? [], + seriesMeta: result.seriesMeta ?? [], + xValues: result.xValues ?? [], + }, + } + }, + + outputs: { + series: { + type: 'json', + description: 'Array of data series with user counts per time interval', + }, + seriesMeta: { + type: 'array', + description: 'Metadata labels for each data series (e.g., segment names)', + items: { type: 'string' }, + }, + xValues: { + type: 'array', + description: 'Date values for the x-axis', + items: { type: 'string' }, + }, + }, +} diff --git a/apps/sim/tools/amplitude/get_revenue.ts b/apps/sim/tools/amplitude/get_revenue.ts new file mode 100644 index 00000000000..264eb1e0f95 --- /dev/null +++ b/apps/sim/tools/amplitude/get_revenue.ts @@ -0,0 +1,102 @@ +import type { + AmplitudeGetRevenueParams, + AmplitudeGetRevenueResponse, +} from '@/tools/amplitude/types' +import type { ToolConfig } from '@/tools/types' + +export const getRevenueTool: ToolConfig = { + id: 'amplitude_get_revenue', + name: 'Amplitude Get Revenue', + description: 'Get revenue LTV data including ARPU, ARPPU, total revenue, and paying user counts.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude API Key', + }, + secretKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude Secret Key', + }, + start: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Start date in YYYYMMDD format', + }, + end: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'End date in YYYYMMDD format', + }, + metric: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Metric: 0 (ARPU), 1 (ARPPU), 2 (Total Revenue), 3 (Paying Users)', + }, + interval: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Time interval: 1 (daily), 7 (weekly), or 30 (monthly)', + }, + }, + + request: { + url: (params) => { + const url = new URL('https://amplitude.com/api/2/revenue/ltv') + url.searchParams.set('start', params.start) + url.searchParams.set('end', params.end) + if (params.metric) url.searchParams.set('m', params.metric) + if (params.interval) url.searchParams.set('i', params.interval) + return url.toString() + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Basic ${btoa(`${params.apiKey}:${params.secretKey}`)}`, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || `Amplitude Revenue API error: ${response.status}`) + } + + const result = data.data ?? {} + + return { + success: true, + output: { + series: result.series ?? [], + seriesLabels: result.seriesLabels ?? [], + xValues: result.xValues ?? [], + }, + } + }, + + outputs: { + series: { + type: 'json', + description: 'Array of revenue data series', + }, + seriesLabels: { + type: 'array', + description: 'Labels for each data series', + items: { type: 'string' }, + }, + xValues: { + type: 'array', + description: 'Date values for the x-axis', + items: { type: 'string' }, + }, + }, +} diff --git a/apps/sim/tools/amplitude/group_identify.ts b/apps/sim/tools/amplitude/group_identify.ts new file mode 100644 index 00000000000..b0bd548c49d --- /dev/null +++ b/apps/sim/tools/amplitude/group_identify.ts @@ -0,0 +1,99 @@ +import type { + AmplitudeGroupIdentifyParams, + AmplitudeGroupIdentifyResponse, +} from '@/tools/amplitude/types' +import type { ToolConfig } from '@/tools/types' + +export const groupIdentifyTool: ToolConfig< + AmplitudeGroupIdentifyParams, + AmplitudeGroupIdentifyResponse +> = { + id: 'amplitude_group_identify', + name: 'Amplitude Group Identify', + description: + 'Set group-level properties in Amplitude. Supports $set, $setOnce, $add, $append, $unset operations.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude API Key', + }, + groupType: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Group classification (e.g., "company", "org_id")', + }, + groupValue: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Specific group identifier (e.g., "Acme Corp")', + }, + groupProperties: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: + 'JSON object of group properties. Use operations like $set, $setOnce, $add, $append, $unset.', + }, + }, + + request: { + url: 'https://api2.amplitude.com/groupidentify', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params) => { + let groupProperties: Record = {} + try { + groupProperties = JSON.parse(params.groupProperties) + } catch { + groupProperties = {} + } + + return { + api_key: params.apiKey, + identification: [ + { + group_type: params.groupType, + group_value: params.groupValue, + group_properties: groupProperties, + }, + ], + } + }, + }, + + transformResponse: async (response: Response) => { + const text = await response.text() + + if (!response.ok) { + throw new Error(`Amplitude Group Identify API error: ${text}`) + } + + return { + success: true, + output: { + code: response.status, + message: text || null, + }, + } + }, + + outputs: { + code: { + type: 'number', + description: 'HTTP response status code', + }, + message: { + type: 'string', + description: 'Response message', + optional: true, + }, + }, +} diff --git a/apps/sim/tools/amplitude/identify_user.ts b/apps/sim/tools/amplitude/identify_user.ts new file mode 100644 index 00000000000..a0cb0316805 --- /dev/null +++ b/apps/sim/tools/amplitude/identify_user.ts @@ -0,0 +1,97 @@ +import type { + AmplitudeIdentifyUserParams, + AmplitudeIdentifyUserResponse, +} from '@/tools/amplitude/types' +import type { ToolConfig } from '@/tools/types' + +export const identifyUserTool: ToolConfig< + AmplitudeIdentifyUserParams, + AmplitudeIdentifyUserResponse +> = { + id: 'amplitude_identify_user', + name: 'Amplitude Identify User', + description: + 'Set user properties in Amplitude using the Identify API. Supports $set, $setOnce, $add, $append, $unset operations.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude API Key', + }, + userId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'User ID (required if no device_id)', + }, + deviceId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Device ID (required if no user_id)', + }, + userProperties: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: + 'JSON object of user properties. Use operations like $set, $setOnce, $add, $append, $unset.', + }, + }, + + request: { + url: 'https://api2.amplitude.com/identify', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params) => { + const identification: Record = {} + + if (params.userId) identification.user_id = params.userId + if (params.deviceId) identification.device_id = params.deviceId + + try { + identification.user_properties = JSON.parse(params.userProperties) + } catch { + identification.user_properties = {} + } + + return { + api_key: params.apiKey, + identification: [identification], + } + }, + }, + + transformResponse: async (response: Response) => { + const text = await response.text() + + if (!response.ok) { + throw new Error(`Amplitude Identify API error: ${text}`) + } + + return { + success: true, + output: { + code: response.status, + message: text || null, + }, + } + }, + + outputs: { + code: { + type: 'number', + description: 'HTTP response status code', + }, + message: { + type: 'string', + description: 'Response message', + optional: true, + }, + }, +} diff --git a/apps/sim/tools/amplitude/index.ts b/apps/sim/tools/amplitude/index.ts new file mode 100644 index 00000000000..b0f1aab792f --- /dev/null +++ b/apps/sim/tools/amplitude/index.ts @@ -0,0 +1,23 @@ +import { eventSegmentationTool } from '@/tools/amplitude/event_segmentation' +import { getActiveUsersTool } from '@/tools/amplitude/get_active_users' +import { getRevenueTool } from '@/tools/amplitude/get_revenue' +import { groupIdentifyTool } from '@/tools/amplitude/group_identify' +import { identifyUserTool } from '@/tools/amplitude/identify_user' +import { listEventsTool } from '@/tools/amplitude/list_events' +import { realtimeActiveUsersTool } from '@/tools/amplitude/realtime_active_users' +import { sendEventTool } from '@/tools/amplitude/send_event' +import { userActivityTool } from '@/tools/amplitude/user_activity' +import { userProfileTool } from '@/tools/amplitude/user_profile' +import { userSearchTool } from '@/tools/amplitude/user_search' + +export const amplitudeSendEventTool = sendEventTool +export const amplitudeIdentifyUserTool = identifyUserTool +export const amplitudeGroupIdentifyTool = groupIdentifyTool +export const amplitudeUserSearchTool = userSearchTool +export const amplitudeUserActivityTool = userActivityTool +export const amplitudeUserProfileTool = userProfileTool +export const amplitudeEventSegmentationTool = eventSegmentationTool +export const amplitudeGetActiveUsersTool = getActiveUsersTool +export const amplitudeRealtimeActiveUsersTool = realtimeActiveUsersTool +export const amplitudeListEventsTool = listEventsTool +export const amplitudeGetRevenueTool = getRevenueTool diff --git a/apps/sim/tools/amplitude/list_events.ts b/apps/sim/tools/amplitude/list_events.ts new file mode 100644 index 00000000000..fa89d3ddf11 --- /dev/null +++ b/apps/sim/tools/amplitude/list_events.ts @@ -0,0 +1,79 @@ +import type { + AmplitudeListEventsParams, + AmplitudeListEventsResponse, +} from '@/tools/amplitude/types' +import type { ToolConfig } from '@/tools/types' + +export const listEventsTool: ToolConfig = { + id: 'amplitude_list_events', + name: 'Amplitude List Events', + description: + 'List all event types in the Amplitude project with their weekly totals and unique counts.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude API Key', + }, + secretKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude Secret Key', + }, + }, + + request: { + url: 'https://amplitude.com/api/2/events/list', + method: 'GET', + headers: (params) => ({ + Authorization: `Basic ${btoa(`${params.apiKey}:${params.secretKey}`)}`, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || `Amplitude List Events API error: ${response.status}`) + } + + const events = (data.data ?? []).map( + (e: Record) => + ({ + value: (e.value as string) ?? '', + displayName: (e.display as string) ?? null, + totals: (e.totals as number) ?? 0, + hidden: (e.hidden as boolean) ?? false, + deleted: (e.deleted as boolean) ?? false, + }) as const + ) + + return { + success: true, + output: { + events, + }, + } + }, + + outputs: { + events: { + type: 'array', + description: 'List of event types in the project', + items: { + type: 'object', + properties: { + value: { type: 'string', description: 'Event type name' }, + displayName: { type: 'string', description: 'Event display name' }, + totals: { type: 'number', description: 'Weekly total count' }, + hidden: { type: 'boolean', description: 'Whether the event is hidden' }, + deleted: { type: 'boolean', description: 'Whether the event is deleted' }, + }, + }, + }, + }, +} diff --git a/apps/sim/tools/amplitude/realtime_active_users.ts b/apps/sim/tools/amplitude/realtime_active_users.ts new file mode 100644 index 00000000000..0462d699dcc --- /dev/null +++ b/apps/sim/tools/amplitude/realtime_active_users.ts @@ -0,0 +1,74 @@ +import type { + AmplitudeRealtimeActiveUsersParams, + AmplitudeRealtimeActiveUsersResponse, +} from '@/tools/amplitude/types' +import type { ToolConfig } from '@/tools/types' + +export const realtimeActiveUsersTool: ToolConfig< + AmplitudeRealtimeActiveUsersParams, + AmplitudeRealtimeActiveUsersResponse +> = { + id: 'amplitude_realtime_active_users', + name: 'Amplitude Real-time Active Users', + description: 'Get real-time active user counts at 5-minute granularity for the last 2 days.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude API Key', + }, + secretKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude Secret Key', + }, + }, + + request: { + url: 'https://amplitude.com/api/2/realtime', + method: 'GET', + headers: (params) => ({ + Authorization: `Basic ${btoa(`${params.apiKey}:${params.secretKey}`)}`, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || `Amplitude Real-time API error: ${response.status}`) + } + + const result = data.data ?? {} + + return { + success: true, + output: { + series: result.series ?? [], + seriesLabels: result.seriesLabels ?? [], + xValues: result.xValues ?? [], + }, + } + }, + + outputs: { + series: { + type: 'json', + description: 'Array of data series with active user counts at 5-minute intervals', + }, + seriesLabels: { + type: 'array', + description: 'Labels for each series (e.g., "Today", "Yesterday")', + items: { type: 'string' }, + }, + xValues: { + type: 'array', + description: 'Time values for the x-axis (e.g., "15:00", "15:05")', + items: { type: 'string' }, + }, + }, +} diff --git a/apps/sim/tools/amplitude/send_event.ts b/apps/sim/tools/amplitude/send_event.ts new file mode 100644 index 00000000000..a24c0a45f32 --- /dev/null +++ b/apps/sim/tools/amplitude/send_event.ts @@ -0,0 +1,214 @@ +import type { AmplitudeSendEventParams, AmplitudeSendEventResponse } from '@/tools/amplitude/types' +import type { ToolConfig } from '@/tools/types' + +export const sendEventTool: ToolConfig = { + id: 'amplitude_send_event', + name: 'Amplitude Send Event', + description: 'Track an event in Amplitude using the HTTP V2 API.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude API Key', + }, + userId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'User ID (required if no device_id)', + }, + deviceId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Device ID (required if no user_id)', + }, + eventType: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Name of the event (e.g., "page_view", "purchase")', + }, + eventProperties: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'JSON object of custom event properties', + }, + userProperties: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: + 'JSON object of user properties to set (supports $set, $setOnce, $add, $append, $unset)', + }, + time: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Event timestamp in milliseconds since epoch', + }, + sessionId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Session start time in milliseconds since epoch', + }, + insertId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Unique ID for deduplication (within 7-day window)', + }, + appVersion: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Application version string', + }, + platform: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Platform (e.g., "Web", "iOS", "Android")', + }, + country: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Two-letter country code', + }, + language: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Language code (e.g., "en")', + }, + ip: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'IP address for geo-location', + }, + price: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Price of the item purchased', + }, + quantity: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Quantity of items purchased', + }, + revenue: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Revenue amount', + }, + productId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Product identifier', + }, + revenueType: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Revenue type (e.g., "purchase", "refund")', + }, + }, + + request: { + url: 'https://api2.amplitude.com/2/httpapi', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params) => { + const event: Record = { + event_type: params.eventType, + } + + if (params.userId) event.user_id = params.userId + if (params.deviceId) event.device_id = params.deviceId + if (params.time) event.time = Number(params.time) + if (params.sessionId) event.session_id = Number(params.sessionId) + if (params.insertId) event.insert_id = params.insertId + if (params.appVersion) event.app_version = params.appVersion + if (params.platform) event.platform = params.platform + if (params.country) event.country = params.country + if (params.language) event.language = params.language + if (params.ip) event.ip = params.ip + if (params.price) event.price = Number(params.price) + if (params.quantity) event.quantity = Number(params.quantity) + if (params.revenue) event.revenue = Number(params.revenue) + if (params.productId) event.product_id = params.productId + if (params.revenueType) event.revenue_type = params.revenueType + + if (params.eventProperties) { + try { + event.event_properties = JSON.parse(params.eventProperties) + } catch { + event.event_properties = {} + } + } + + if (params.userProperties) { + try { + event.user_properties = JSON.parse(params.userProperties) + } catch { + event.user_properties = {} + } + } + + return { + api_key: params.apiKey, + events: [event], + } + }, + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (data.code !== 200) { + throw new Error(data.error || `Amplitude API error: code ${data.code}`) + } + + return { + success: true, + output: { + code: data.code ?? 200, + eventsIngested: data.events_ingested ?? 0, + payloadSizeBytes: data.payload_size_bytes ?? 0, + serverUploadTime: data.server_upload_time ?? 0, + }, + } + }, + + outputs: { + code: { + type: 'number', + description: 'Response code (200 for success)', + }, + eventsIngested: { + type: 'number', + description: 'Number of events ingested', + }, + payloadSizeBytes: { + type: 'number', + description: 'Size of the payload in bytes', + }, + serverUploadTime: { + type: 'number', + description: 'Server upload timestamp', + }, + }, +} diff --git a/apps/sim/tools/amplitude/types.ts b/apps/sim/tools/amplitude/types.ts new file mode 100644 index 00000000000..737de7c8e48 --- /dev/null +++ b/apps/sim/tools/amplitude/types.ts @@ -0,0 +1,241 @@ +import type { ToolResponse } from '@/tools/types' + +/** + * Base params shared by endpoints using API key in body. + */ +export interface AmplitudeApiKeyParams { + apiKey: string +} + +/** + * Base params shared by endpoints using Basic Auth (api_key:secret_key). + */ +export interface AmplitudeBasicAuthParams { + apiKey: string + secretKey: string +} + +/** + * Send Event params (HTTP V2 API). + */ +export interface AmplitudeSendEventParams extends AmplitudeApiKeyParams { + userId?: string + deviceId?: string + eventType: string + eventProperties?: string + userProperties?: string + time?: string + sessionId?: string + insertId?: string + appVersion?: string + platform?: string + country?: string + language?: string + ip?: string + price?: string + quantity?: string + revenue?: string + productId?: string + revenueType?: string +} + +export interface AmplitudeSendEventResponse extends ToolResponse { + output: { + code: number + eventsIngested: number + payloadSizeBytes: number + serverUploadTime: number + } +} + +/** + * Identify User params (Identify API). + */ +export interface AmplitudeIdentifyUserParams extends AmplitudeApiKeyParams { + userId?: string + deviceId?: string + userProperties: string +} + +export interface AmplitudeIdentifyUserResponse extends ToolResponse { + output: { + code: number + message: string | null + } +} + +/** + * Group Identify params (Group Identify API). + */ +export interface AmplitudeGroupIdentifyParams extends AmplitudeApiKeyParams { + groupType: string + groupValue: string + groupProperties: string +} + +export interface AmplitudeGroupIdentifyResponse extends ToolResponse { + output: { + code: number + message: string | null + } +} + +/** + * User Search params (Dashboard REST API). + */ +export interface AmplitudeUserSearchParams extends AmplitudeBasicAuthParams { + user: string +} + +export interface AmplitudeUserSearchResponse extends ToolResponse { + output: { + matches: Array<{ + amplitudeId: number + userId: string | null + }> + type: string | null + } +} + +/** + * User Activity params (Dashboard REST API). + */ +export interface AmplitudeUserActivityParams extends AmplitudeBasicAuthParams { + amplitudeId: string + offset?: string + limit?: string + direction?: string +} + +export interface AmplitudeUserActivityResponse extends ToolResponse { + output: { + events: Array<{ + eventType: string + eventTime: string + eventProperties: Record + userProperties: Record + sessionId: number | null + platform: string | null + country: string | null + city: string | null + }> + userData: { + userId: string | null + canonicalAmplitudeId: number | null + numEvents: number | null + numSessions: number | null + platform: string | null + country: string | null + } | null + } +} + +/** + * User Profile params (User Profile API). + */ +export interface AmplitudeUserProfileParams { + secretKey: string + userId?: string + deviceId?: string + getAmpProps?: string + getCohortIds?: string + getComputations?: string +} + +export interface AmplitudeUserProfileResponse extends ToolResponse { + output: { + userId: string | null + deviceId: string | null + ampProps: Record | null + cohortIds: string[] | null + computations: Record | null + } +} + +/** + * Event Segmentation params (Dashboard REST API). + */ +export interface AmplitudeEventSegmentationParams extends AmplitudeBasicAuthParams { + eventType: string + start: string + end: string + metric?: string + interval?: string + groupBy?: string + limit?: string +} + +export interface AmplitudeEventSegmentationResponse extends ToolResponse { + output: { + series: unknown[] + seriesLabels: string[] + seriesCollapsed: unknown[] + xValues: string[] + } +} + +/** + * Get Active Users params (Dashboard REST API). + */ +export interface AmplitudeGetActiveUsersParams extends AmplitudeBasicAuthParams { + start: string + end: string + metric?: string + interval?: string +} + +export interface AmplitudeGetActiveUsersResponse extends ToolResponse { + output: { + series: number[][] + seriesMeta: string[] + xValues: string[] + } +} + +/** + * Real-time Active Users params (Dashboard REST API). + */ +export interface AmplitudeRealtimeActiveUsersParams extends AmplitudeBasicAuthParams {} + +export interface AmplitudeRealtimeActiveUsersResponse extends ToolResponse { + output: { + series: number[][] + seriesLabels: string[] + xValues: string[] + } +} + +/** + * List Events params (Dashboard REST API). + */ +export interface AmplitudeListEventsParams extends AmplitudeBasicAuthParams {} + +export interface AmplitudeListEventsResponse extends ToolResponse { + output: { + events: Array<{ + value: string + displayName: string | null + totals: number + hidden: boolean + deleted: boolean + }> + } +} + +/** + * Get Revenue params (Dashboard REST API). + */ +export interface AmplitudeGetRevenueParams extends AmplitudeBasicAuthParams { + start: string + end: string + metric?: string + interval?: string +} + +export interface AmplitudeGetRevenueResponse extends ToolResponse { + output: { + series: unknown[] + seriesLabels: string[] + xValues: string[] + } +} diff --git a/apps/sim/tools/amplitude/user_activity.ts b/apps/sim/tools/amplitude/user_activity.ts new file mode 100644 index 00000000000..1dfa504e553 --- /dev/null +++ b/apps/sim/tools/amplitude/user_activity.ts @@ -0,0 +1,144 @@ +import type { + AmplitudeUserActivityParams, + AmplitudeUserActivityResponse, +} from '@/tools/amplitude/types' +import type { ToolConfig } from '@/tools/types' + +export const userActivityTool: ToolConfig< + AmplitudeUserActivityParams, + AmplitudeUserActivityResponse +> = { + id: 'amplitude_user_activity', + name: 'Amplitude User Activity', + description: 'Get the event stream for a specific user by their Amplitude ID.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude API Key', + }, + secretKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude Secret Key', + }, + amplitudeId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Amplitude internal user ID', + }, + offset: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Offset for pagination (default 0)', + }, + limit: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of events to return (default 1000, max 1000)', + }, + direction: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Sort direction: "latest" or "earliest" (default: latest)', + }, + }, + + request: { + url: (params) => { + const url = new URL('https://amplitude.com/api/2/useractivity') + url.searchParams.set('user', params.amplitudeId.trim()) + if (params.offset) url.searchParams.set('offset', params.offset) + if (params.limit) url.searchParams.set('limit', params.limit) + if (params.direction) url.searchParams.set('direction', params.direction) + return url.toString() + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Basic ${btoa(`${params.apiKey}:${params.secretKey}`)}`, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || `Amplitude User Activity API error: ${response.status}`) + } + + const events = (data.events ?? []).map( + (e: Record) => + ({ + eventType: (e.event_type as string) ?? '', + eventTime: (e.event_time as string) ?? '', + eventProperties: (e.event_properties as Record) ?? {}, + userProperties: (e.user_properties as Record) ?? {}, + sessionId: (e.session_id as number) ?? null, + platform: (e.platform as string) ?? null, + country: (e.country as string) ?? null, + city: (e.city as string) ?? null, + }) as const + ) + + const ud = data.userData as Record | undefined + const userData = ud + ? { + userId: (ud.user_id as string) ?? null, + canonicalAmplitudeId: (ud.canonical_amplitude_id as number) ?? null, + numEvents: (ud.num_events as number) ?? null, + numSessions: (ud.num_sessions as number) ?? null, + platform: (ud.platform as string) ?? null, + country: (ud.country as string) ?? null, + } + : null + + return { + success: true, + output: { + events, + userData, + }, + } + }, + + outputs: { + events: { + type: 'array', + description: 'List of user events', + items: { + type: 'object', + properties: { + eventType: { type: 'string', description: 'Type of event' }, + eventTime: { type: 'string', description: 'Event timestamp' }, + eventProperties: { type: 'json', description: 'Custom event properties' }, + userProperties: { type: 'json', description: 'User properties at event time' }, + sessionId: { type: 'number', description: 'Session ID' }, + platform: { type: 'string', description: 'Platform' }, + country: { type: 'string', description: 'Country' }, + city: { type: 'string', description: 'City' }, + }, + }, + }, + userData: { + type: 'json', + description: 'User metadata', + optional: true, + properties: { + userId: { type: 'string', description: 'External user ID' }, + canonicalAmplitudeId: { type: 'number', description: 'Canonical Amplitude ID' }, + numEvents: { type: 'number', description: 'Total event count' }, + numSessions: { type: 'number', description: 'Total session count' }, + platform: { type: 'string', description: 'Primary platform' }, + country: { type: 'string', description: 'Country' }, + }, + }, + }, +} diff --git a/apps/sim/tools/amplitude/user_profile.ts b/apps/sim/tools/amplitude/user_profile.ts new file mode 100644 index 00000000000..05395d7e4ca --- /dev/null +++ b/apps/sim/tools/amplitude/user_profile.ts @@ -0,0 +1,120 @@ +import type { + AmplitudeUserProfileParams, + AmplitudeUserProfileResponse, +} from '@/tools/amplitude/types' +import type { ToolConfig } from '@/tools/types' + +export const userProfileTool: ToolConfig = + { + id: 'amplitude_user_profile', + name: 'Amplitude User Profile', + description: + 'Get a user profile including properties, cohort memberships, and computed properties.', + version: '1.0.0', + + params: { + secretKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude Secret Key', + }, + userId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'External user ID (required if no device_id)', + }, + deviceId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Device ID (required if no user_id)', + }, + getAmpProps: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Include Amplitude user properties (true/false, default: false)', + }, + getCohortIds: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Include cohort IDs the user belongs to (true/false, default: false)', + }, + getComputations: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Include computed user properties (true/false, default: false)', + }, + }, + + request: { + url: (params) => { + const url = new URL('https://profile-api.amplitude.com/v1/userprofile') + if (params.userId) url.searchParams.set('user_id', params.userId.trim()) + if (params.deviceId) url.searchParams.set('device_id', params.deviceId.trim()) + if (params.getAmpProps) url.searchParams.set('get_amp_props', params.getAmpProps) + if (params.getCohortIds) url.searchParams.set('get_cohort_ids', params.getCohortIds) + if (params.getComputations) url.searchParams.set('get_computations', params.getComputations) + return url.toString() + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Api-Key ${params.secretKey}`, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || `Amplitude User Profile API error: ${response.status}`) + } + + const userData = data.userData ?? {} + + return { + success: true, + output: { + userId: (userData.user_id as string) ?? null, + deviceId: (userData.device_id as string) ?? null, + ampProps: (userData.amp_props as Record) ?? null, + cohortIds: (userData.cohort_ids as string[]) ?? null, + computations: (userData.computations as Record) ?? null, + }, + } + }, + + outputs: { + userId: { + type: 'string', + description: 'External user ID', + optional: true, + }, + deviceId: { + type: 'string', + description: 'Device ID', + optional: true, + }, + ampProps: { + type: 'json', + description: + 'Amplitude user properties (library, first_used, last_used, custom properties)', + optional: true, + }, + cohortIds: { + type: 'array', + description: 'List of cohort IDs the user belongs to', + optional: true, + items: { type: 'string' }, + }, + computations: { + type: 'json', + description: 'Computed user properties', + optional: true, + }, + }, + } diff --git a/apps/sim/tools/amplitude/user_search.ts b/apps/sim/tools/amplitude/user_search.ts new file mode 100644 index 00000000000..91d8d2b9af6 --- /dev/null +++ b/apps/sim/tools/amplitude/user_search.ts @@ -0,0 +1,89 @@ +import type { + AmplitudeUserSearchParams, + AmplitudeUserSearchResponse, +} from '@/tools/amplitude/types' +import type { ToolConfig } from '@/tools/types' + +export const userSearchTool: ToolConfig = { + id: 'amplitude_user_search', + name: 'Amplitude User Search', + description: + 'Search for a user by User ID, Device ID, or Amplitude ID using the Dashboard REST API.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude API Key', + }, + secretKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Amplitude Secret Key', + }, + user: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'User ID, Device ID, or Amplitude ID to search for', + }, + }, + + request: { + url: (params) => { + const url = new URL('https://amplitude.com/api/2/usersearch') + url.searchParams.set('user', params.user.trim()) + return url.toString() + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Basic ${btoa(`${params.apiKey}:${params.secretKey}`)}`, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || `Amplitude User Search API error: ${response.status}`) + } + + const matches = (data.matches ?? []).map( + (m: Record) => + ({ + amplitudeId: (m.amplitude_id as number) ?? 0, + userId: (m.user_id as string) ?? null, + }) as const + ) + + return { + success: true, + output: { + matches, + type: (data.type as string) ?? null, + }, + } + }, + + outputs: { + matches: { + type: 'array', + description: 'List of matching users', + items: { + type: 'object', + properties: { + amplitudeId: { type: 'number', description: 'Amplitude internal user ID' }, + userId: { type: 'string', description: 'External user ID' }, + }, + }, + }, + type: { + type: 'string', + description: 'Match type (e.g., match_user_or_device_id)', + optional: true, + }, + }, +} diff --git a/apps/sim/tools/google_pagespeed/analyze.ts b/apps/sim/tools/google_pagespeed/analyze.ts new file mode 100644 index 00000000000..a5fc0cfa5e2 --- /dev/null +++ b/apps/sim/tools/google_pagespeed/analyze.ts @@ -0,0 +1,223 @@ +import type { + GooglePagespeedAnalyzeParams, + GooglePagespeedAnalyzeResponse, +} from '@/tools/google_pagespeed/types' +import type { ToolConfig } from '@/tools/types' + +export const analyzeTool: ToolConfig = + { + id: 'google_pagespeed_analyze', + name: 'Google PageSpeed Analyze', + description: + 'Analyze a webpage for performance, accessibility, SEO, and best practices using Google PageSpeed Insights.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Google PageSpeed Insights API Key', + }, + url: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The URL of the webpage to analyze', + }, + category: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: + 'Lighthouse categories to analyze (comma-separated): performance, accessibility, best-practices, seo', + }, + strategy: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Analysis strategy: desktop or mobile', + }, + locale: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Locale for results (e.g., en, fr, de)', + }, + }, + + request: { + url: (params) => { + const url = new URL('https://www.googleapis.com/pagespeedonline/v5/runPagespeed') + url.searchParams.append('url', params.url.trim()) + url.searchParams.append('key', params.apiKey) + + if (params.category) { + const categories = params.category.split(',').map((c) => c.trim()) + for (const cat of categories) { + url.searchParams.append('category', cat) + } + } else { + url.searchParams.append('category', 'performance') + url.searchParams.append('category', 'accessibility') + url.searchParams.append('category', 'best-practices') + url.searchParams.append('category', 'seo') + } + + if (params.strategy) { + url.searchParams.append('strategy', params.strategy) + } + if (params.locale) { + url.searchParams.append('locale', params.locale) + } + + return url.toString() + }, + method: 'GET', + headers: () => ({ + Accept: 'application/json', + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error?.message ?? 'Failed to analyze page') + } + + const lighthouse = data.lighthouseResult ?? {} + const categories = lighthouse.categories ?? {} + const audits = lighthouse.audits ?? {} + const loadingExperience = data.loadingExperience ?? {} + + return { + success: true, + output: { + finalUrl: data.id ?? null, + performanceScore: categories.performance?.score ?? null, + accessibilityScore: categories.accessibility?.score ?? null, + bestPracticesScore: categories['best-practices']?.score ?? null, + seoScore: categories.seo?.score ?? null, + firstContentfulPaint: audits['first-contentful-paint']?.displayValue ?? null, + firstContentfulPaintMs: audits['first-contentful-paint']?.numericValue ?? null, + largestContentfulPaint: audits['largest-contentful-paint']?.displayValue ?? null, + largestContentfulPaintMs: audits['largest-contentful-paint']?.numericValue ?? null, + totalBlockingTime: audits['total-blocking-time']?.displayValue ?? null, + totalBlockingTimeMs: audits['total-blocking-time']?.numericValue ?? null, + cumulativeLayoutShift: audits['cumulative-layout-shift']?.displayValue ?? null, + cumulativeLayoutShiftValue: audits['cumulative-layout-shift']?.numericValue ?? null, + speedIndex: audits['speed-index']?.displayValue ?? null, + speedIndexMs: audits['speed-index']?.numericValue ?? null, + interactive: audits.interactive?.displayValue ?? null, + interactiveMs: audits.interactive?.numericValue ?? null, + overallCategory: loadingExperience.overall_category ?? null, + analysisTimestamp: data.analysisUTCTimestamp ?? null, + lighthouseVersion: lighthouse.lighthouseVersion ?? null, + }, + } + }, + + outputs: { + finalUrl: { + type: 'string', + description: 'The final URL after redirects', + optional: true, + }, + performanceScore: { + type: 'number', + description: 'Performance category score (0-1)', + optional: true, + }, + accessibilityScore: { + type: 'number', + description: 'Accessibility category score (0-1)', + optional: true, + }, + bestPracticesScore: { + type: 'number', + description: 'Best Practices category score (0-1)', + optional: true, + }, + seoScore: { + type: 'number', + description: 'SEO category score (0-1)', + optional: true, + }, + firstContentfulPaint: { + type: 'string', + description: 'Time to First Contentful Paint (display value)', + optional: true, + }, + firstContentfulPaintMs: { + type: 'number', + description: 'Time to First Contentful Paint in milliseconds', + optional: true, + }, + largestContentfulPaint: { + type: 'string', + description: 'Time to Largest Contentful Paint (display value)', + optional: true, + }, + largestContentfulPaintMs: { + type: 'number', + description: 'Time to Largest Contentful Paint in milliseconds', + optional: true, + }, + totalBlockingTime: { + type: 'string', + description: 'Total Blocking Time (display value)', + optional: true, + }, + totalBlockingTimeMs: { + type: 'number', + description: 'Total Blocking Time in milliseconds', + optional: true, + }, + cumulativeLayoutShift: { + type: 'string', + description: 'Cumulative Layout Shift (display value)', + optional: true, + }, + cumulativeLayoutShiftValue: { + type: 'number', + description: 'Cumulative Layout Shift numeric value', + optional: true, + }, + speedIndex: { + type: 'string', + description: 'Speed Index (display value)', + optional: true, + }, + speedIndexMs: { + type: 'number', + description: 'Speed Index in milliseconds', + optional: true, + }, + interactive: { + type: 'string', + description: 'Time to Interactive (display value)', + optional: true, + }, + interactiveMs: { + type: 'number', + description: 'Time to Interactive in milliseconds', + optional: true, + }, + overallCategory: { + type: 'string', + description: 'Overall loading experience category (FAST, AVERAGE, SLOW, or NONE)', + optional: true, + }, + analysisTimestamp: { + type: 'string', + description: 'UTC timestamp of the analysis', + optional: true, + }, + lighthouseVersion: { + type: 'string', + description: 'Version of Lighthouse used for the analysis', + optional: true, + }, + }, + } diff --git a/apps/sim/tools/google_pagespeed/index.ts b/apps/sim/tools/google_pagespeed/index.ts new file mode 100644 index 00000000000..61d688f0bee --- /dev/null +++ b/apps/sim/tools/google_pagespeed/index.ts @@ -0,0 +1,5 @@ +import { analyzeTool } from '@/tools/google_pagespeed/analyze' + +export const googlePagespeedAnalyzeTool = analyzeTool + +export * from '@/tools/google_pagespeed/types' diff --git a/apps/sim/tools/google_pagespeed/types.ts b/apps/sim/tools/google_pagespeed/types.ts new file mode 100644 index 00000000000..77f49aeee61 --- /dev/null +++ b/apps/sim/tools/google_pagespeed/types.ts @@ -0,0 +1,37 @@ +import type { ToolResponse } from '@/tools/types' + +export interface GooglePagespeedBaseParams { + apiKey: string +} + +export interface GooglePagespeedAnalyzeParams extends GooglePagespeedBaseParams { + url: string + category?: string + strategy?: string + locale?: string +} + +export interface GooglePagespeedAnalyzeResponse extends ToolResponse { + output: { + finalUrl: string | null + performanceScore: number | null + accessibilityScore: number | null + bestPracticesScore: number | null + seoScore: number | null + firstContentfulPaint: string | null + firstContentfulPaintMs: number | null + largestContentfulPaint: string | null + largestContentfulPaintMs: number | null + totalBlockingTime: string | null + totalBlockingTimeMs: number | null + cumulativeLayoutShift: string | null + cumulativeLayoutShiftValue: number | null + speedIndex: string | null + speedIndexMs: number | null + interactive: string | null + interactiveMs: number | null + overallCategory: string | null + analysisTimestamp: string | null + lighthouseVersion: string | null + } +} diff --git a/apps/sim/tools/pagerduty/add_note.ts b/apps/sim/tools/pagerduty/add_note.ts new file mode 100644 index 00000000000..5c900c15618 --- /dev/null +++ b/apps/sim/tools/pagerduty/add_note.ts @@ -0,0 +1,78 @@ +import type { PagerDutyAddNoteParams, PagerDutyAddNoteResponse } from '@/tools/pagerduty/types' +import type { ToolConfig } from '@/tools/types' + +export const addNoteTool: ToolConfig = { + id: 'pagerduty_add_note', + name: 'PagerDuty Add Note', + description: 'Add a note to an existing PagerDuty incident.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'PagerDuty REST API Key', + }, + fromEmail: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Email address of a valid PagerDuty user', + }, + incidentId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the incident to add the note to', + }, + content: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Note content text', + }, + }, + + request: { + url: (params) => `https://api.pagerduty.com/incidents/${params.incidentId.trim()}/notes`, + method: 'POST', + headers: (params) => ({ + Authorization: `Token token=${params.apiKey}`, + Accept: 'application/vnd.pagerduty+json;version=2', + 'Content-Type': 'application/json', + From: params.fromEmail, + }), + body: (params) => ({ + note: { + content: params.content, + }, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error?.message || `PagerDuty API error: ${response.status}`) + } + + const note = data.note ?? {} + return { + success: true, + output: { + id: note.id ?? null, + content: note.content ?? null, + createdAt: note.created_at ?? null, + userName: note.user?.summary ?? null, + }, + } + }, + + outputs: { + id: { type: 'string', description: 'Note ID' }, + content: { type: 'string', description: 'Note content' }, + createdAt: { type: 'string', description: 'Creation timestamp' }, + userName: { type: 'string', description: 'Name of the user who created the note' }, + }, +} diff --git a/apps/sim/tools/pagerduty/create_incident.ts b/apps/sim/tools/pagerduty/create_incident.ts new file mode 100644 index 00000000000..6a4c98854f5 --- /dev/null +++ b/apps/sim/tools/pagerduty/create_incident.ts @@ -0,0 +1,149 @@ +import type { + PagerDutyCreateIncidentParams, + PagerDutyCreateIncidentResponse, +} from '@/tools/pagerduty/types' +import type { ToolConfig } from '@/tools/types' + +export const createIncidentTool: ToolConfig< + PagerDutyCreateIncidentParams, + PagerDutyCreateIncidentResponse +> = { + id: 'pagerduty_create_incident', + name: 'PagerDuty Create Incident', + description: 'Create a new incident in PagerDuty.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'PagerDuty REST API Key', + }, + fromEmail: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Email address of a valid PagerDuty user', + }, + title: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Incident title/summary', + }, + serviceId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the PagerDuty service', + }, + urgency: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Urgency level (high or low)', + }, + body: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Detailed description of the incident', + }, + escalationPolicyId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Escalation policy ID to assign', + }, + assigneeId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'User ID to assign the incident to', + }, + }, + + request: { + url: 'https://api.pagerduty.com/incidents', + method: 'POST', + headers: (params) => ({ + Authorization: `Token token=${params.apiKey}`, + Accept: 'application/vnd.pagerduty+json;version=2', + 'Content-Type': 'application/json', + From: params.fromEmail, + }), + body: (params) => { + const incident: Record = { + type: 'incident', + title: params.title, + service: { + id: params.serviceId, + type: 'service_reference', + }, + } + + if (params.urgency) incident.urgency = params.urgency + if (params.body) { + incident.body = { + type: 'incident_body', + details: params.body, + } + } + if (params.escalationPolicyId) { + incident.escalation_policy = { + id: params.escalationPolicyId, + type: 'escalation_policy_reference', + } + } + if (params.assigneeId) { + incident.assignments = [ + { + assignee: { + id: params.assigneeId, + type: 'user_reference', + }, + }, + ] + } + + return { incident } + }, + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error?.message || `PagerDuty API error: ${response.status}`) + } + + const inc = data.incident ?? {} + return { + success: true, + output: { + id: inc.id ?? null, + incidentNumber: inc.incident_number ?? null, + title: inc.title ?? null, + status: inc.status ?? null, + urgency: inc.urgency ?? null, + createdAt: inc.created_at ?? null, + serviceName: inc.service?.summary ?? null, + serviceId: inc.service?.id ?? null, + htmlUrl: inc.html_url ?? null, + }, + } + }, + + outputs: { + id: { type: 'string', description: 'Created incident ID' }, + incidentNumber: { type: 'number', description: 'Incident number' }, + title: { type: 'string', description: 'Incident title' }, + status: { type: 'string', description: 'Incident status' }, + urgency: { type: 'string', description: 'Incident urgency' }, + createdAt: { type: 'string', description: 'Creation timestamp' }, + serviceName: { type: 'string', description: 'Service name' }, + serviceId: { type: 'string', description: 'Service ID' }, + htmlUrl: { type: 'string', description: 'PagerDuty web URL' }, + }, +} diff --git a/apps/sim/tools/pagerduty/index.ts b/apps/sim/tools/pagerduty/index.ts new file mode 100644 index 00000000000..e6ee2bc34b4 --- /dev/null +++ b/apps/sim/tools/pagerduty/index.ts @@ -0,0 +1,13 @@ +import { addNoteTool } from '@/tools/pagerduty/add_note' +import { createIncidentTool } from '@/tools/pagerduty/create_incident' +import { listIncidentsTool } from '@/tools/pagerduty/list_incidents' +import { listOncallsTool } from '@/tools/pagerduty/list_oncalls' +import { listServicesTool } from '@/tools/pagerduty/list_services' +import { updateIncidentTool } from '@/tools/pagerduty/update_incident' + +export const pagerdutyListIncidentsTool = listIncidentsTool +export const pagerdutyCreateIncidentTool = createIncidentTool +export const pagerdutyUpdateIncidentTool = updateIncidentTool +export const pagerdutyAddNoteTool = addNoteTool +export const pagerdutyListServicesTool = listServicesTool +export const pagerdutyListOncallsTool = listOncallsTool diff --git a/apps/sim/tools/pagerduty/list_incidents.ts b/apps/sim/tools/pagerduty/list_incidents.ts new file mode 100644 index 00000000000..a2ed3530761 --- /dev/null +++ b/apps/sim/tools/pagerduty/list_incidents.ts @@ -0,0 +1,161 @@ +import type { + PagerDutyListIncidentsParams, + PagerDutyListIncidentsResponse, +} from '@/tools/pagerduty/types' +import type { ToolConfig } from '@/tools/types' + +export const listIncidentsTool: ToolConfig< + PagerDutyListIncidentsParams, + PagerDutyListIncidentsResponse +> = { + id: 'pagerduty_list_incidents', + name: 'PagerDuty List Incidents', + description: 'List incidents from PagerDuty with optional filters.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'PagerDuty REST API Key', + }, + statuses: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Comma-separated statuses to filter (triggered, acknowledged, resolved)', + }, + serviceIds: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Comma-separated service IDs to filter', + }, + since: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Start date filter (ISO 8601 format)', + }, + until: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'End date filter (ISO 8601 format)', + }, + sortBy: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Sort field (e.g., created_at:desc)', + }, + limit: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of results (max 100)', + }, + }, + + request: { + url: (params) => { + const query = new URLSearchParams() + if (params.statuses) { + for (const s of params.statuses.split(',')) { + query.append('statuses[]', s.trim()) + } + } + if (params.serviceIds) { + for (const id of params.serviceIds.split(',')) { + query.append('service_ids[]', id.trim()) + } + } + if (params.since) query.set('since', params.since) + if (params.until) query.set('until', params.until) + if (params.sortBy) query.set('sort_by', params.sortBy) + if (params.limit) query.set('limit', params.limit) + query.append('include[]', 'services') + const qs = query.toString() + return `https://api.pagerduty.com/incidents${qs ? `?${qs}` : ''}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Token token=${params.apiKey}`, + Accept: 'application/vnd.pagerduty+json;version=2', + 'Content-Type': 'application/json', + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error?.message || `PagerDuty API error: ${response.status}`) + } + + return { + success: true, + output: { + incidents: (data.incidents ?? []).map( + ( + inc: Record & { + service?: Record + assignments?: Array & { assignee?: Record }> + escalation_policy?: Record + } + ) => ({ + id: inc.id ?? null, + incidentNumber: inc.incident_number ?? null, + title: inc.title ?? null, + status: inc.status ?? null, + urgency: inc.urgency ?? null, + createdAt: inc.created_at ?? null, + updatedAt: inc.updated_at ?? null, + serviceName: inc.service?.summary ?? null, + serviceId: inc.service?.id ?? null, + assigneeName: inc.assignments?.[0]?.assignee?.summary ?? null, + assigneeId: inc.assignments?.[0]?.assignee?.id ?? null, + escalationPolicyName: inc.escalation_policy?.summary ?? null, + htmlUrl: inc.html_url ?? null, + }) + ), + total: data.total ?? 0, + more: data.more ?? false, + }, + } + }, + + outputs: { + incidents: { + type: 'array', + description: 'Array of incidents', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Incident ID' }, + incidentNumber: { type: 'number', description: 'Incident number' }, + title: { type: 'string', description: 'Incident title' }, + status: { type: 'string', description: 'Incident status' }, + urgency: { type: 'string', description: 'Incident urgency' }, + createdAt: { type: 'string', description: 'Creation timestamp' }, + updatedAt: { type: 'string', description: 'Last updated timestamp' }, + serviceName: { type: 'string', description: 'Service name' }, + serviceId: { type: 'string', description: 'Service ID' }, + assigneeName: { type: 'string', description: 'Assignee name' }, + assigneeId: { type: 'string', description: 'Assignee ID' }, + escalationPolicyName: { type: 'string', description: 'Escalation policy name' }, + htmlUrl: { type: 'string', description: 'PagerDuty web URL' }, + }, + }, + }, + total: { + type: 'number', + description: 'Total number of matching incidents', + }, + more: { + type: 'boolean', + description: 'Whether more results are available', + }, + }, +} diff --git a/apps/sim/tools/pagerduty/list_oncalls.ts b/apps/sim/tools/pagerduty/list_oncalls.ts new file mode 100644 index 00000000000..92f436b9c39 --- /dev/null +++ b/apps/sim/tools/pagerduty/list_oncalls.ts @@ -0,0 +1,145 @@ +import type { + PagerDutyListOncallsParams, + PagerDutyListOncallsResponse, +} from '@/tools/pagerduty/types' +import type { ToolConfig } from '@/tools/types' + +export const listOncallsTool: ToolConfig = + { + id: 'pagerduty_list_oncalls', + name: 'PagerDuty List On-Calls', + description: 'List current on-call entries from PagerDuty.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'PagerDuty REST API Key', + }, + escalationPolicyIds: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Comma-separated escalation policy IDs to filter', + }, + scheduleIds: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Comma-separated schedule IDs to filter', + }, + since: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Start time filter (ISO 8601 format)', + }, + until: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'End time filter (ISO 8601 format)', + }, + limit: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of results (max 100)', + }, + }, + + request: { + url: (params) => { + const query = new URLSearchParams() + if (params.escalationPolicyIds) { + for (const id of params.escalationPolicyIds.split(',')) { + query.append('escalation_policy_ids[]', id.trim()) + } + } + if (params.scheduleIds) { + for (const id of params.scheduleIds.split(',')) { + query.append('schedule_ids[]', id.trim()) + } + } + if (params.since) query.set('since', params.since) + if (params.until) query.set('until', params.until) + if (params.limit) query.set('limit', params.limit) + const qs = query.toString() + return `https://api.pagerduty.com/oncalls${qs ? `?${qs}` : ''}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Token token=${params.apiKey}`, + Accept: 'application/vnd.pagerduty+json;version=2', + 'Content-Type': 'application/json', + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error?.message || `PagerDuty API error: ${response.status}`) + } + + const oncalls = (data.oncalls ?? []).map( + ( + oc: Record & { + user?: Record + escalation_policy?: Record + schedule?: Record + } + ) => ({ + userName: oc.user?.summary ?? null, + userId: oc.user?.id ?? null, + escalationLevel: oc.escalation_level ?? 0, + escalationPolicyName: oc.escalation_policy?.summary ?? null, + escalationPolicyId: oc.escalation_policy?.id ?? null, + scheduleName: oc.schedule?.summary ?? null, + scheduleId: oc.schedule?.id ?? null, + start: oc.start ?? null, + end: oc.end ?? null, + }) + ) + + return { + success: true, + output: { + oncalls, + total: data.total ?? oncalls.length, + more: data.more ?? false, + }, + } + }, + + outputs: { + oncalls: { + type: 'array', + description: 'Array of on-call entries', + items: { + type: 'object', + properties: { + userName: { type: 'string', description: 'On-call user name' }, + userId: { type: 'string', description: 'On-call user ID' }, + escalationLevel: { type: 'number', description: 'Escalation level' }, + escalationPolicyName: { type: 'string', description: 'Escalation policy name' }, + escalationPolicyId: { type: 'string', description: 'Escalation policy ID' }, + scheduleName: { type: 'string', description: 'Schedule name' }, + scheduleId: { type: 'string', description: 'Schedule ID' }, + start: { type: 'string', description: 'On-call start time' }, + end: { type: 'string', description: 'On-call end time' }, + }, + }, + }, + total: { + type: 'number', + description: 'Total number of matching on-call entries', + }, + more: { + type: 'boolean', + description: 'Whether more results are available', + }, + }, + } diff --git a/apps/sim/tools/pagerduty/list_services.ts b/apps/sim/tools/pagerduty/list_services.ts new file mode 100644 index 00000000000..af281ffc837 --- /dev/null +++ b/apps/sim/tools/pagerduty/list_services.ts @@ -0,0 +1,108 @@ +import type { + PagerDutyListServicesParams, + PagerDutyListServicesResponse, +} from '@/tools/pagerduty/types' +import type { ToolConfig } from '@/tools/types' + +export const listServicesTool: ToolConfig< + PagerDutyListServicesParams, + PagerDutyListServicesResponse +> = { + id: 'pagerduty_list_services', + name: 'PagerDuty List Services', + description: 'List services from PagerDuty with optional name filter.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'PagerDuty REST API Key', + }, + query: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Filter services by name', + }, + limit: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of results (max 100)', + }, + }, + + request: { + url: (params) => { + const query = new URLSearchParams() + if (params.query) query.set('query', params.query) + if (params.limit) query.set('limit', params.limit) + const qs = query.toString() + return `https://api.pagerduty.com/services${qs ? `?${qs}` : ''}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Token token=${params.apiKey}`, + Accept: 'application/vnd.pagerduty+json;version=2', + 'Content-Type': 'application/json', + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error?.message || `PagerDuty API error: ${response.status}`) + } + + return { + success: true, + output: { + services: (data.services ?? []).map( + (svc: Record & { escalation_policy?: Record }) => ({ + id: svc.id ?? null, + name: svc.name ?? null, + description: svc.description ?? null, + status: svc.status ?? null, + escalationPolicyName: svc.escalation_policy?.summary ?? null, + escalationPolicyId: svc.escalation_policy?.id ?? null, + createdAt: svc.created_at ?? null, + htmlUrl: svc.html_url ?? null, + }) + ), + total: data.total ?? 0, + more: data.more ?? false, + }, + } + }, + + outputs: { + services: { + type: 'array', + description: 'Array of services', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Service ID' }, + name: { type: 'string', description: 'Service name' }, + description: { type: 'string', description: 'Service description' }, + status: { type: 'string', description: 'Service status' }, + escalationPolicyName: { type: 'string', description: 'Escalation policy name' }, + escalationPolicyId: { type: 'string', description: 'Escalation policy ID' }, + createdAt: { type: 'string', description: 'Creation timestamp' }, + htmlUrl: { type: 'string', description: 'PagerDuty web URL' }, + }, + }, + }, + total: { + type: 'number', + description: 'Total number of matching services', + }, + more: { + type: 'boolean', + description: 'Whether more results are available', + }, + }, +} diff --git a/apps/sim/tools/pagerduty/types.ts b/apps/sim/tools/pagerduty/types.ts new file mode 100644 index 00000000000..ab800bf1885 --- /dev/null +++ b/apps/sim/tools/pagerduty/types.ts @@ -0,0 +1,169 @@ +import type { ToolResponse } from '@/tools/types' + +/** + * Base params shared by all PagerDuty endpoints. + */ +export interface PagerDutyBaseParams { + apiKey: string +} + +/** + * Params that require a From header for write operations. + */ +export interface PagerDutyWriteParams extends PagerDutyBaseParams { + fromEmail: string +} + +/** + * List Incidents params. + */ +export interface PagerDutyListIncidentsParams extends PagerDutyBaseParams { + statuses?: string + serviceIds?: string + since?: string + until?: string + sortBy?: string + limit?: string +} + +export interface PagerDutyListIncidentsResponse extends ToolResponse { + output: { + incidents: Array<{ + id: string + incidentNumber: number + title: string + status: string + urgency: string + createdAt: string + updatedAt: string | null + serviceName: string | null + serviceId: string | null + assigneeName: string | null + assigneeId: string | null + escalationPolicyName: string | null + htmlUrl: string | null + }> + total: number + more: boolean + } +} + +/** + * Create Incident params. + */ +export interface PagerDutyCreateIncidentParams extends PagerDutyWriteParams { + title: string + serviceId: string + urgency?: string + body?: string + escalationPolicyId?: string + assigneeId?: string +} + +export interface PagerDutyCreateIncidentResponse extends ToolResponse { + output: { + id: string + incidentNumber: number + title: string + status: string + urgency: string + createdAt: string + serviceName: string | null + serviceId: string | null + htmlUrl: string | null + } +} + +/** + * Update Incident params. + */ +export interface PagerDutyUpdateIncidentParams extends PagerDutyWriteParams { + incidentId: string + status?: string + title?: string + urgency?: string + escalationLevel?: string +} + +export interface PagerDutyUpdateIncidentResponse extends ToolResponse { + output: { + id: string + incidentNumber: number + title: string + status: string + urgency: string + updatedAt: string | null + htmlUrl: string | null + } +} + +/** + * Add Note to Incident params. + */ +export interface PagerDutyAddNoteParams extends PagerDutyWriteParams { + incidentId: string + content: string +} + +export interface PagerDutyAddNoteResponse extends ToolResponse { + output: { + id: string + content: string + createdAt: string + userName: string | null + } +} + +/** + * List Services params. + */ +export interface PagerDutyListServicesParams extends PagerDutyBaseParams { + query?: string + limit?: string +} + +export interface PagerDutyListServicesResponse extends ToolResponse { + output: { + services: Array<{ + id: string + name: string + description: string | null + status: string + escalationPolicyName: string | null + escalationPolicyId: string | null + createdAt: string + htmlUrl: string | null + }> + total: number + more: boolean + } +} + +/** + * List On-Calls params. + */ +export interface PagerDutyListOncallsParams extends PagerDutyBaseParams { + escalationPolicyIds?: string + scheduleIds?: string + since?: string + until?: string + limit?: string +} + +export interface PagerDutyListOncallsResponse extends ToolResponse { + output: { + oncalls: Array<{ + userName: string | null + userId: string | null + escalationLevel: number + escalationPolicyName: string | null + escalationPolicyId: string | null + scheduleName: string | null + scheduleId: string | null + start: string | null + end: string | null + }> + total: number + more: boolean + } +} diff --git a/apps/sim/tools/pagerduty/update_incident.ts b/apps/sim/tools/pagerduty/update_incident.ts new file mode 100644 index 00000000000..156b5a1ad57 --- /dev/null +++ b/apps/sim/tools/pagerduty/update_incident.ts @@ -0,0 +1,117 @@ +import type { + PagerDutyUpdateIncidentParams, + PagerDutyUpdateIncidentResponse, +} from '@/tools/pagerduty/types' +import type { ToolConfig } from '@/tools/types' + +export const updateIncidentTool: ToolConfig< + PagerDutyUpdateIncidentParams, + PagerDutyUpdateIncidentResponse +> = { + id: 'pagerduty_update_incident', + name: 'PagerDuty Update Incident', + description: 'Update an incident in PagerDuty (acknowledge, resolve, change urgency, etc.).', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'PagerDuty REST API Key', + }, + fromEmail: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Email address of a valid PagerDuty user', + }, + incidentId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the incident to update', + }, + status: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'New status (acknowledged or resolved)', + }, + title: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'New incident title', + }, + urgency: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'New urgency (high or low)', + }, + escalationLevel: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Escalation level to escalate to', + }, + }, + + request: { + url: (params) => `https://api.pagerduty.com/incidents/${params.incidentId.trim()}`, + method: 'PUT', + headers: (params) => ({ + Authorization: `Token token=${params.apiKey}`, + Accept: 'application/vnd.pagerduty+json;version=2', + 'Content-Type': 'application/json', + From: params.fromEmail, + }), + body: (params) => { + const incident: Record = { + id: params.incidentId, + type: 'incident', + } + + if (params.status) incident.status = params.status + if (params.title) incident.title = params.title + if (params.urgency) incident.urgency = params.urgency + if (params.escalationLevel) { + incident.escalation_level = Number(params.escalationLevel) + } + return { incident } + }, + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error?.message || `PagerDuty API error: ${response.status}`) + } + + const inc = data.incident ?? {} + return { + success: true, + output: { + id: inc.id ?? null, + incidentNumber: inc.incident_number ?? null, + title: inc.title ?? null, + status: inc.status ?? null, + urgency: inc.urgency ?? null, + updatedAt: inc.updated_at ?? null, + htmlUrl: inc.html_url ?? null, + }, + } + }, + + outputs: { + id: { type: 'string', description: 'Incident ID' }, + incidentNumber: { type: 'number', description: 'Incident number' }, + title: { type: 'string', description: 'Incident title' }, + status: { type: 'string', description: 'Updated status' }, + urgency: { type: 'string', description: 'Updated urgency' }, + updatedAt: { type: 'string', description: 'Last updated timestamp' }, + htmlUrl: { type: 'string', description: 'PagerDuty web URL' }, + }, +} diff --git a/apps/sim/tools/registry.ts b/apps/sim/tools/registry.ts index 00b9d1e47cd..e44e35ed1b5 100644 --- a/apps/sim/tools/registry.ts +++ b/apps/sim/tools/registry.ts @@ -42,6 +42,19 @@ import { algoliaSearchTool, algoliaUpdateSettingsTool, } from '@/tools/algolia' +import { + amplitudeEventSegmentationTool, + amplitudeGetActiveUsersTool, + amplitudeGetRevenueTool, + amplitudeGroupIdentifyTool, + amplitudeIdentifyUserTool, + amplitudeListEventsTool, + amplitudeRealtimeActiveUsersTool, + amplitudeSendEventTool, + amplitudeUserActivityTool, + amplitudeUserProfileTool, + amplitudeUserSearchTool, +} from '@/tools/amplitude' import { apifyRunActorAsyncTool, apifyRunActorSyncTool } from '@/tools/apify' import { apolloAccountBulkCreateTool, @@ -786,6 +799,7 @@ import { googleMapsTimezoneTool, googleMapsValidateAddressTool, } from '@/tools/google_maps' +import { googlePagespeedAnalyzeTool } from '@/tools/google_pagespeed' import { googleSheetsAppendTool, googleSheetsAppendV2Tool, @@ -1444,6 +1458,14 @@ import { outlookReadTool, outlookSendTool, } from '@/tools/outlook' +import { + pagerdutyAddNoteTool, + pagerdutyCreateIncidentTool, + pagerdutyListIncidentsTool, + pagerdutyListOncallsTool, + pagerdutyListServicesTool, + pagerdutyUpdateIncidentTool, +} from '@/tools/pagerduty' import { parallelDeepResearchTool, parallelExtractTool, parallelSearchTool } from '@/tools/parallel' import { perplexityChatTool, perplexitySearchTool } from '@/tools/perplexity' import { @@ -2248,6 +2270,17 @@ export const tools: Record = { a2a_send_message: a2aSendMessageTool, a2a_set_push_notification: a2aSetPushNotificationTool, airweave_search: airweaveSearchTool, + amplitude_send_event: amplitudeSendEventTool, + amplitude_identify_user: amplitudeIdentifyUserTool, + amplitude_group_identify: amplitudeGroupIdentifyTool, + amplitude_user_search: amplitudeUserSearchTool, + amplitude_user_activity: amplitudeUserActivityTool, + amplitude_user_profile: amplitudeUserProfileTool, + amplitude_event_segmentation: amplitudeEventSegmentationTool, + amplitude_get_active_users: amplitudeGetActiveUsersTool, + amplitude_realtime_active_users: amplitudeRealtimeActiveUsersTool, + amplitude_list_events: amplitudeListEventsTool, + amplitude_get_revenue: amplitudeGetRevenueTool, arxiv_get_author_papers: arxivGetAuthorPapersTool, arxiv_get_paper: arxivGetPaperTool, arxiv_search: arxivSearchTool, @@ -3163,6 +3196,7 @@ export const tools: Record = { google_maps_speed_limits: googleMapsSpeedLimitsTool, google_maps_timezone: googleMapsTimezoneTool, google_maps_validate_address: googleMapsValidateAddressTool, + google_pagespeed_analyze: googlePagespeedAnalyzeTool, google_tasks_create: googleTasksCreateTool, google_tasks_delete: googleTasksDeleteTool, google_tasks_get: googleTasksGetTool, @@ -3637,6 +3671,12 @@ export const tools: Record = { outlook_mark_unread: outlookMarkUnreadTool, outlook_delete: outlookDeleteTool, outlook_copy: outlookCopyTool, + pagerduty_list_incidents: pagerdutyListIncidentsTool, + pagerduty_create_incident: pagerdutyCreateIncidentTool, + pagerduty_update_incident: pagerdutyUpdateIncidentTool, + pagerduty_add_note: pagerdutyAddNoteTool, + pagerduty_list_services: pagerdutyListServicesTool, + pagerduty_list_oncalls: pagerdutyListOncallsTool, linear_read_issues: linearReadIssuesTool, linear_create_issue: linearCreateIssueTool, linear_get_issue: linearGetIssueTool, From 79bb4e5ad8b8d3d6bce2c7501d24d258c6fef978 Mon Sep 17 00:00:00 2001 From: Waleed Date: Sun, 1 Mar 2026 22:53:18 -0800 Subject: [PATCH 003/152] feat(docs): add API reference with OpenAPI spec and auto-generated endpoint pages (#3388) * feat(docs): add API reference with OpenAPI spec and auto-generated endpoint pages * multiline curl * random improvements * cleanup * update docs copy * fix build * cast * fix builg --------- Co-authored-by: Claude Opus 4.6 Co-authored-by: Lakee Sivaraya <71339072+lakeesiv@users.noreply.github.com> Co-authored-by: Vikhyath Mondreti Co-authored-by: Vikhyath Mondreti --- apps/docs/app/[lang]/[[...slug]]/page.tsx | 169 +- apps/docs/app/[lang]/layout.tsx | 6 +- apps/docs/app/global.css | 712 ++++++- apps/docs/app/layout.config.tsx | 21 - .../docs-layout/sidebar-components.tsx | 35 +- .../components/docs-layout/toc-footer.tsx | 20 +- apps/docs/components/navbar/navbar.tsx | 29 +- apps/docs/components/structured-data.tsx | 19 +- apps/docs/components/ui/response-section.tsx | 169 ++ .../docs/de/api-reference/authentication.mdx | 94 + .../docs/de/api-reference/getting-started.mdx | 210 ++ .../content/docs/de/api-reference/meta.json | 16 + .../content/docs/de/api-reference/python.mdx | 766 +++++++ .../docs/de/api-reference/typescript.mdx | 1052 +++++++++ apps/docs/content/docs/de/meta.json | 24 + .../(generated)/workflows/meta.json | 3 + .../docs/en/api-reference/authentication.mdx | 94 + .../docs/en/api-reference/getting-started.mdx | 210 ++ .../content/docs/en/api-reference/meta.json | 16 + .../content/docs/en/api-reference/python.mdx | 761 +++++++ .../docs/en/api-reference/typescript.mdx | 1035 +++++++++ apps/docs/content/docs/en/execution/api.mdx | 2 +- apps/docs/content/docs/en/meta.json | 1 - .../en/permissions/roles-and-permissions.mdx | 2 +- .../docs/es/api-reference/authentication.mdx | 94 + .../docs/es/api-reference/getting-started.mdx | 210 ++ .../content/docs/es/api-reference/meta.json | 16 + .../content/docs/es/api-reference/python.mdx | 766 +++++++ .../docs/es/api-reference/typescript.mdx | 1052 +++++++++ apps/docs/content/docs/es/meta.json | 24 + .../docs/fr/api-reference/authentication.mdx | 94 + .../docs/fr/api-reference/getting-started.mdx | 210 ++ .../content/docs/fr/api-reference/meta.json | 16 + .../content/docs/fr/api-reference/python.mdx | 766 +++++++ .../docs/fr/api-reference/typescript.mdx | 1052 +++++++++ apps/docs/content/docs/fr/meta.json | 24 + .../docs/ja/api-reference/authentication.mdx | 94 + .../docs/ja/api-reference/getting-started.mdx | 210 ++ .../content/docs/ja/api-reference/meta.json | 16 + .../content/docs/ja/api-reference/python.mdx | 766 +++++++ .../docs/ja/api-reference/typescript.mdx | 1052 +++++++++ apps/docs/content/docs/ja/meta.json | 24 + .../docs/zh/api-reference/authentication.mdx | 94 + .../docs/zh/api-reference/getting-started.mdx | 210 ++ .../content/docs/zh/api-reference/meta.json | 16 + .../content/docs/zh/api-reference/python.mdx | 766 +++++++ .../docs/zh/api-reference/typescript.mdx | 1052 +++++++++ apps/docs/content/docs/zh/meta.json | 24 + apps/docs/lib/llms.ts | 3 +- apps/docs/lib/openapi.ts | 132 ++ apps/docs/lib/source.ts | 92 +- apps/docs/openapi.json | 1893 +++++++++++++++++ apps/docs/package.json | 8 +- bun.lock | 164 +- 54 files changed, 16235 insertions(+), 171 deletions(-) delete mode 100644 apps/docs/app/layout.config.tsx create mode 100644 apps/docs/components/ui/response-section.tsx create mode 100644 apps/docs/content/docs/de/api-reference/authentication.mdx create mode 100644 apps/docs/content/docs/de/api-reference/getting-started.mdx create mode 100644 apps/docs/content/docs/de/api-reference/meta.json create mode 100644 apps/docs/content/docs/de/api-reference/python.mdx create mode 100644 apps/docs/content/docs/de/api-reference/typescript.mdx create mode 100644 apps/docs/content/docs/de/meta.json create mode 100644 apps/docs/content/docs/en/api-reference/(generated)/workflows/meta.json create mode 100644 apps/docs/content/docs/en/api-reference/authentication.mdx create mode 100644 apps/docs/content/docs/en/api-reference/getting-started.mdx create mode 100644 apps/docs/content/docs/en/api-reference/meta.json create mode 100644 apps/docs/content/docs/en/api-reference/python.mdx create mode 100644 apps/docs/content/docs/en/api-reference/typescript.mdx create mode 100644 apps/docs/content/docs/es/api-reference/authentication.mdx create mode 100644 apps/docs/content/docs/es/api-reference/getting-started.mdx create mode 100644 apps/docs/content/docs/es/api-reference/meta.json create mode 100644 apps/docs/content/docs/es/api-reference/python.mdx create mode 100644 apps/docs/content/docs/es/api-reference/typescript.mdx create mode 100644 apps/docs/content/docs/es/meta.json create mode 100644 apps/docs/content/docs/fr/api-reference/authentication.mdx create mode 100644 apps/docs/content/docs/fr/api-reference/getting-started.mdx create mode 100644 apps/docs/content/docs/fr/api-reference/meta.json create mode 100644 apps/docs/content/docs/fr/api-reference/python.mdx create mode 100644 apps/docs/content/docs/fr/api-reference/typescript.mdx create mode 100644 apps/docs/content/docs/fr/meta.json create mode 100644 apps/docs/content/docs/ja/api-reference/authentication.mdx create mode 100644 apps/docs/content/docs/ja/api-reference/getting-started.mdx create mode 100644 apps/docs/content/docs/ja/api-reference/meta.json create mode 100644 apps/docs/content/docs/ja/api-reference/python.mdx create mode 100644 apps/docs/content/docs/ja/api-reference/typescript.mdx create mode 100644 apps/docs/content/docs/ja/meta.json create mode 100644 apps/docs/content/docs/zh/api-reference/authentication.mdx create mode 100644 apps/docs/content/docs/zh/api-reference/getting-started.mdx create mode 100644 apps/docs/content/docs/zh/api-reference/meta.json create mode 100644 apps/docs/content/docs/zh/api-reference/python.mdx create mode 100644 apps/docs/content/docs/zh/api-reference/typescript.mdx create mode 100644 apps/docs/content/docs/zh/meta.json create mode 100644 apps/docs/lib/openapi.ts create mode 100644 apps/docs/openapi.json diff --git a/apps/docs/app/[lang]/[[...slug]]/page.tsx b/apps/docs/app/[lang]/[[...slug]]/page.tsx index 0a4afc98186..461baf2f549 100644 --- a/apps/docs/app/[lang]/[[...slug]]/page.tsx +++ b/apps/docs/app/[lang]/[[...slug]]/page.tsx @@ -1,5 +1,8 @@ import type React from 'react' +import type { Root } from 'fumadocs-core/page-tree' import { findNeighbour } from 'fumadocs-core/page-tree' +import type { ApiPageProps } from 'fumadocs-openapi/ui' +import { createAPIPage } from 'fumadocs-openapi/ui' import { Pre } from 'fumadocs-ui/components/codeblock' import defaultMdxComponents from 'fumadocs-ui/mdx' import { DocsBody, DocsDescription, DocsPage, DocsTitle } from 'fumadocs-ui/page' @@ -12,28 +15,75 @@ import { LLMCopyButton } from '@/components/page-actions' import { StructuredData } from '@/components/structured-data' import { CodeBlock } from '@/components/ui/code-block' import { Heading } from '@/components/ui/heading' +import { ResponseSection } from '@/components/ui/response-section' +import { i18n } from '@/lib/i18n' +import { getApiSpecContent, openapi } from '@/lib/openapi' import { type PageData, source } from '@/lib/source' +const SUPPORTED_LANGUAGES: Set = new Set(i18n.languages) +const BASE_URL = 'https://docs.sim.ai' + +function resolveLangAndSlug(params: { slug?: string[]; lang: string }) { + const isValidLang = SUPPORTED_LANGUAGES.has(params.lang) + const lang = isValidLang ? params.lang : 'en' + const slug = isValidLang ? params.slug : [params.lang, ...(params.slug ?? [])] + return { lang, slug } +} + +const APIPage = createAPIPage(openapi, { + playground: { enabled: false }, + content: { + renderOperationLayout: async (slots) => { + return ( +
+
+ {slots.header} + {slots.apiPlayground} + {slots.authSchemes &&
{slots.authSchemes}
} + {slots.paremeters} + {slots.body &&
{slots.body}
} + {slots.responses} + {slots.callbacks} +
+
+ {slots.apiExample} +
+
+ ) + }, + }, +}) + export default async function Page(props: { params: Promise<{ slug?: string[]; lang: string }> }) { const params = await props.params - const page = source.getPage(params.slug, params.lang) + const { lang, slug } = resolveLangAndSlug(params) + const page = source.getPage(slug, lang) if (!page) notFound() - const data = page.data as PageData - const MDX = data.body - const baseUrl = 'https://docs.sim.ai' - const markdownContent = await data.getText('processed') + const data = page.data as unknown as PageData & { + _openapi?: { method?: string } + getAPIPageProps?: () => ApiPageProps + } + const isOpenAPI = '_openapi' in data && data._openapi != null + const isApiReference = slug?.some((s) => s === 'api-reference') ?? false - const pageTreeRecord = source.pageTree as Record - const pageTree = - pageTreeRecord[params.lang] ?? pageTreeRecord.en ?? Object.values(pageTreeRecord)[0] - const neighbours = pageTree ? findNeighbour(pageTree, page.url) : null + const pageTreeRecord = source.pageTree as Record + const pageTree = pageTreeRecord[lang] ?? pageTreeRecord.en ?? Object.values(pageTreeRecord)[0] + const rawNeighbours = pageTree ? findNeighbour(pageTree, page.url) : null + const neighbours = isApiReference + ? { + previous: rawNeighbours?.previous?.url.includes('/api-reference/') + ? rawNeighbours.previous + : undefined, + next: rawNeighbours?.next?.url.includes('/api-reference/') ? rawNeighbours.next : undefined, + } + : rawNeighbours const generateBreadcrumbs = () => { const breadcrumbs: Array<{ name: string; url: string }> = [ { name: 'Home', - url: baseUrl, + url: BASE_URL, }, ] @@ -41,7 +91,7 @@ export default async function Page(props: { params: Promise<{ slug?: string[]; l let currentPath = '' urlParts.forEach((part, index) => { - if (index === 0 && ['en', 'es', 'fr', 'de', 'ja', 'zh'].includes(part)) { + if (index === 0 && SUPPORTED_LANGUAGES.has(part)) { currentPath = `/${part}` return } @@ -56,12 +106,12 @@ export default async function Page(props: { params: Promise<{ slug?: string[]; l if (index === urlParts.length - 1) { breadcrumbs.push({ name: data.title, - url: `${baseUrl}${page.url}`, + url: `${BASE_URL}${page.url}`, }) } else { breadcrumbs.push({ name: name, - url: `${baseUrl}${currentPath}`, + url: `${BASE_URL}${currentPath}`, }) } }) @@ -73,7 +123,6 @@ export default async function Page(props: { params: Promise<{ slug?: string[]; l const CustomFooter = () => (
- {/* Navigation links */}
{neighbours?.previous ? ( - {/* Divider line */}
- {/* Social icons */}
) + if (isOpenAPI && data.getAPIPageProps) { + const apiProps = data.getAPIPageProps() + const apiPageContent = getApiSpecContent( + data.title, + data.description, + apiProps.operations ?? [] + ) + + return ( + <> + + , + }} + > +
+
+
+ +
+ +
+ {data.title} + {data.description} +
+ + + +
+ + ) + } + + const MDX = data.body + const markdownContent = await data.getText('processed') + return ( <> }) { const params = await props.params - const page = source.getPage(params.slug, params.lang) + const { lang, slug } = resolveLangAndSlug(params) + const page = source.getPage(slug, lang) if (!page) notFound() - const data = page.data as PageData - const baseUrl = 'https://docs.sim.ai' - const fullUrl = `${baseUrl}${page.url}` + const data = page.data as unknown as PageData + const fullUrl = `${BASE_URL}${page.url}` - const ogImageUrl = `${baseUrl}/api/og?title=${encodeURIComponent(data.title)}` + const ogImageUrl = `${BASE_URL}/api/og?title=${encodeURIComponent(data.title)}` return { title: data.title, @@ -286,10 +389,10 @@ export async function generateMetadata(props: { url: fullUrl, siteName: 'Sim Documentation', type: 'article', - locale: params.lang === 'en' ? 'en_US' : `${params.lang}_${params.lang.toUpperCase()}`, + locale: lang === 'en' ? 'en_US' : `${lang}_${lang.toUpperCase()}`, alternateLocale: ['en', 'es', 'fr', 'de', 'ja', 'zh'] - .filter((lang) => lang !== params.lang) - .map((lang) => (lang === 'en' ? 'en_US' : `${lang}_${lang.toUpperCase()}`)), + .filter((l) => l !== lang) + .map((l) => (l === 'en' ? 'en_US' : `${l}_${l.toUpperCase()}`)), images: [ { url: ogImageUrl, @@ -323,13 +426,13 @@ export async function generateMetadata(props: { alternates: { canonical: fullUrl, languages: { - 'x-default': `${baseUrl}${page.url.replace(`/${params.lang}`, '')}`, - en: `${baseUrl}${page.url.replace(`/${params.lang}`, '')}`, - es: `${baseUrl}/es${page.url.replace(`/${params.lang}`, '')}`, - fr: `${baseUrl}/fr${page.url.replace(`/${params.lang}`, '')}`, - de: `${baseUrl}/de${page.url.replace(`/${params.lang}`, '')}`, - ja: `${baseUrl}/ja${page.url.replace(`/${params.lang}`, '')}`, - zh: `${baseUrl}/zh${page.url.replace(`/${params.lang}`, '')}`, + 'x-default': `${BASE_URL}${page.url.replace(`/${lang}`, '')}`, + en: `${BASE_URL}${page.url.replace(`/${lang}`, '')}`, + es: `${BASE_URL}/es${page.url.replace(`/${lang}`, '')}`, + fr: `${BASE_URL}/fr${page.url.replace(`/${lang}`, '')}`, + de: `${BASE_URL}/de${page.url.replace(`/${lang}`, '')}`, + ja: `${BASE_URL}/ja${page.url.replace(`/${lang}`, '')}`, + zh: `${BASE_URL}/zh${page.url.replace(`/${lang}`, '')}`, }, }, } diff --git a/apps/docs/app/[lang]/layout.tsx b/apps/docs/app/[lang]/layout.tsx index d76a11f103a..250e249c7bb 100644 --- a/apps/docs/app/[lang]/layout.tsx +++ b/apps/docs/app/[lang]/layout.tsx @@ -55,8 +55,11 @@ type LayoutProps = { params: Promise<{ lang: string }> } +const SUPPORTED_LANGUAGES: Set = new Set(i18n.languages) + export default async function Layout({ children, params }: LayoutProps) { - const { lang } = await params + const { lang: rawLang } = await params + const lang = SUPPORTED_LANGUAGES.has(rawLang) ? rawLang : 'en' const structuredData = { '@context': 'https://schema.org', @@ -107,6 +110,7 @@ export default async function Layout({ children, params }: LayoutProps) { title: , }} sidebar={{ + tabs: false, defaultOpenLevel: 0, collapsible: false, footer: null, diff --git a/apps/docs/app/global.css b/apps/docs/app/global.css index 70ec578bf95..120feee2567 100644 --- a/apps/docs/app/global.css +++ b/apps/docs/app/global.css @@ -1,6 +1,7 @@ @import "tailwindcss"; @import "fumadocs-ui/css/neutral.css"; @import "fumadocs-ui/css/preset.css"; +@import "fumadocs-openapi/css/preset.css"; /* Prevent overscroll bounce effect on the page */ html, @@ -8,18 +9,12 @@ body { overscroll-behavior: none; } -@theme { - --color-fd-primary: #33c482; /* Green from Sim logo */ - --font-geist-sans: var(--font-geist-sans); - --font-geist-mono: var(--font-geist-mono); +/* Reserve scrollbar space to prevent layout jitter between pages */ +html { + scrollbar-gutter: stable; } -/* Ensure primary color is set in both light and dark modes */ -:root { - --color-fd-primary: #33c482; -} - -.dark { +@theme { --color-fd-primary: #33c482; } @@ -34,12 +29,6 @@ body { "Liberation Mono", "Courier New", monospace; } -/* Target any potential border classes */ -* { - --fd-border-sidebar: transparent !important; -} - -/* Override any CSS custom properties for borders */ :root { --fd-border: transparent !important; --fd-border-sidebar: transparent !important; @@ -86,7 +75,6 @@ body { [data-sidebar-container], #nd-sidebar { background: transparent !important; - background-color: transparent !important; border: none !important; --color-fd-muted: transparent !important; --color-fd-card: transparent !important; @@ -96,9 +84,7 @@ body { aside[data-sidebar], aside#nd-sidebar { background: transparent !important; - background-color: transparent !important; border: none !important; - border-right: none !important; } /* Fumadocs v16: Add sidebar placeholder styling for grid area */ @@ -157,7 +143,6 @@ aside#nd-sidebar { #nd-sidebar > div { padding: 0.5rem 12px 12px; background: transparent !important; - background-color: transparent !important; } /* Override sidebar item styling to match Raindrop */ @@ -434,10 +419,6 @@ aside[data-sidebar], #nd-sidebar, #nd-sidebar * { border: none !important; - border-right: none !important; - border-left: none !important; - border-top: none !important; - border-bottom: none !important; } /* Override fumadocs background colors for sidebar */ @@ -447,7 +428,6 @@ aside[data-sidebar], --color-fd-muted: transparent !important; --color-fd-secondary: transparent !important; background: transparent !important; - background-color: transparent !important; } /* Force normal text flow in sidebar */ @@ -564,16 +544,682 @@ main[data-main] { padding-top: 1.5rem !important; } -/* Override Fumadocs default content padding */ -article[data-content], -div[data-content] { - padding-top: 1.5rem !important; -} - -/* Remove any unwanted borders/outlines from video elements */ +/* Remove any unwanted outlines from video elements */ video { outline: none !important; - border-style: solid !important; +} + +/* API Reference Pages — Mintlify-style overrides */ + +/* OpenAPI pages: span main + TOC grid columns for wide two-column layout. + The grid has columns: spacer | sidebar | main | toc | spacer. + By spanning columns 3-4, the article fills both main and toc areas, + while the grid structure stays identical to non-OpenAPI pages (no jitter). */ +#nd-page:has(.api-page-header) { + grid-column: 3 / span 2 !important; + max-width: 1400px !important; +} + +/* Hide the empty TOC aside on OpenAPI pages so it doesn't overlay content */ +#nd-docs-layout:has(#nd-page .api-page-header) #nd-toc { + display: none; +} + +/* Hide the default "Response Body" heading rendered by fumadocs-openapi */ +.response-section-wrapper > .response-section-content > h2, +.response-section-wrapper > .response-section-content > h3 { + display: none !important; +} + +/* Hide default accordion triggers (status code rows) — we show our own dropdown */ +.response-section-wrapper [data-orientation="vertical"] > [data-state] > h3 { + display: none !important; +} + +/* Ensure API reference pages use the same font as the rest of the docs */ +#nd-page:has(.api-page-header), +#nd-page:has(.api-page-header) h2, +#nd-page:has(.api-page-header) h3, +#nd-page:has(.api-page-header) h4, +#nd-page:has(.api-page-header) p, +#nd-page:has(.api-page-header) span, +#nd-page:has(.api-page-header) div, +#nd-page:has(.api-page-header) label, +#nd-page:has(.api-page-header) button { + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, + "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif; +} + +/* Method badge pills in page content — colored background pills */ +#nd-page span.font-mono.font-medium[class*="text-green"] { + background-color: rgb(220 252 231 / 0.6); + padding: 0.125rem 0.5rem; + border-radius: 0.375rem; + font-size: 0.75rem; +} +html.dark #nd-page span.font-mono.font-medium[class*="text-green"] { + background-color: rgb(34 197 94 / 0.15); +} + +#nd-page span.font-mono.font-medium[class*="text-blue"] { + background-color: rgb(219 234 254 / 0.6); + padding: 0.125rem 0.5rem; + border-radius: 0.375rem; + font-size: 0.75rem; +} +html.dark #nd-page span.font-mono.font-medium[class*="text-blue"] { + background-color: rgb(59 130 246 / 0.15); +} + +#nd-page span.font-mono.font-medium[class*="text-orange"] { + background-color: rgb(255 237 213 / 0.6); + padding: 0.125rem 0.5rem; + border-radius: 0.375rem; + font-size: 0.75rem; +} +html.dark #nd-page span.font-mono.font-medium[class*="text-orange"] { + background-color: rgb(249 115 22 / 0.15); +} + +#nd-page span.font-mono.font-medium[class*="text-red"] { + background-color: rgb(254 226 226 / 0.6); + padding: 0.125rem 0.5rem; + border-radius: 0.375rem; + font-size: 0.75rem; +} +html.dark #nd-page span.font-mono.font-medium[class*="text-red"] { + background-color: rgb(239 68 68 / 0.15); +} + +/* Sidebar links with method badges — flex for vertical centering */ +#nd-sidebar a:has(span.font-mono.font-medium) { + display: flex !important; + align-items: center !important; + gap: 6px; +} + +/* Sidebar method badges — ensure proper inline flex display */ +#nd-sidebar a span.font-mono.font-medium { + display: inline-flex; + align-items: center; + justify-content: center; + min-width: 2.25rem; + font-size: 10px !important; + line-height: 1 !important; + padding: 2.5px 4px; + border-radius: 3px; + flex-shrink: 0; +} + +/* Sidebar GET badges */ +#nd-sidebar a span.font-mono.font-medium[class*="text-green"] { + background-color: rgb(220 252 231 / 0.6); +} +html.dark #nd-sidebar a span.font-mono.font-medium[class*="text-green"] { + background-color: rgb(34 197 94 / 0.15); +} + +/* Sidebar POST badges */ +#nd-sidebar a span.font-mono.font-medium[class*="text-blue"] { + background-color: rgb(219 234 254 / 0.6); +} +html.dark #nd-sidebar a span.font-mono.font-medium[class*="text-blue"] { + background-color: rgb(59 130 246 / 0.15); +} + +/* Sidebar PUT badges */ +#nd-sidebar a span.font-mono.font-medium[class*="text-orange"] { + background-color: rgb(255 237 213 / 0.6); +} +html.dark #nd-sidebar a span.font-mono.font-medium[class*="text-orange"] { + background-color: rgb(249 115 22 / 0.15); +} + +/* Sidebar DELETE badges */ +#nd-sidebar a span.font-mono.font-medium[class*="text-red"] { + background-color: rgb(254 226 226 / 0.6); +} +html.dark #nd-sidebar a span.font-mono.font-medium[class*="text-red"] { + background-color: rgb(239 68 68 / 0.15); +} + +/* Code block containers — match regular docs styling */ +#nd-page:has(.api-page-header) figure.shiki { + border-radius: 0.75rem !important; + background-color: var(--color-fd-card) !important; +} + +/* Hide "Filter Properties" search bar everywhere — main page and popovers */ +input[placeholder="Filter Properties"] { + display: none !important; +} +div:has(> input[placeholder="Filter Properties"]) { + display: none !important; +} +/* Remove top border on first visible property after hidden Filter Properties */ +div:has(> input[placeholder="Filter Properties"]) + .text-sm.border-t { + border-top: none !important; +} + +/* Hide "TypeScript Definitions" copy panel on API pages */ +#nd-page:has(.api-page-header) div.not-prose.rounded-xl.border.p-3.mb-4 { + display: none !important; +} +#nd-page:has(.api-page-header) div.not-prose.rounded-xl.border.p-3:has(> div > p.font-medium) { + display: none !important; +} + +/* Hide info tags (Format, Default, etc.) everywhere — main page and popovers */ +div.flex.flex-row.gap-2.flex-wrap.not-prose:has(> div.bg-fd-secondary) { + display: none !important; +} +div.flex.flex-row.items-start.bg-fd-secondary.border.rounded-lg.text-xs { + display: none !important; +} + +/* Method+path bar — cleaner, lighter styling like Gumloop. + Override bg-fd-card CSS variable directly for reliability. */ +#nd-page:has(.api-page-header) div.flex.flex-row.items-center.rounded-xl.border.not-prose { + --color-fd-card: rgb(249 250 251) !important; + background-color: rgb(249 250 251) !important; + border-color: rgb(229 231 235) !important; +} +html.dark + #nd-page:has(.api-page-header) + div.flex.flex-row.items-center.rounded-xl.border.not-prose { + --color-fd-card: rgb(24 24 27) !important; + background-color: rgb(24 24 27) !important; + border-color: rgb(63 63 70) !important; +} +/* Method badge inside path bar — cleaner sans-serif, softer colors */ +#nd-page:has(.api-page-header) + div.flex.flex-row.items-center.rounded-xl.border.not-prose + span.font-mono.font-medium { + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif !important; + font-weight: 600 !important; + font-size: 0.6875rem !important; + letter-spacing: 0.025em; + text-transform: uppercase; +} +/* POST — softer blue */ +#nd-page:has(.api-page-header) + div.flex.flex-row.items-center.rounded-xl.border.not-prose + span.font-mono.font-medium[class*="text-blue"] { + color: rgb(37 99 235) !important; + background-color: rgb(219 234 254 / 0.7) !important; +} +html.dark + #nd-page:has(.api-page-header) + div.flex.flex-row.items-center.rounded-xl.border.not-prose + span.font-mono.font-medium[class*="text-blue"] { + color: rgb(96 165 250) !important; + background-color: rgb(59 130 246 / 0.15) !important; +} +/* GET — softer green */ +#nd-page:has(.api-page-header) + div.flex.flex-row.items-center.rounded-xl.border.not-prose + span.font-mono.font-medium[class*="text-green"] { + color: rgb(22 163 74) !important; + background-color: rgb(220 252 231 / 0.7) !important; +} +html.dark + #nd-page:has(.api-page-header) + div.flex.flex-row.items-center.rounded-xl.border.not-prose + span.font-mono.font-medium[class*="text-green"] { + color: rgb(74 222 128) !important; + background-color: rgb(34 197 94 / 0.15) !important; +} + +/* Path text inside method+path bar — monospace, bright like Gumloop */ +#nd-page:has(.api-page-header) div.flex.flex-row.items-center.rounded-xl.border.not-prose code { + color: rgb(55 65 81) !important; + background: none !important; + border: none !important; + padding: 0 !important; + font-size: 0.8125rem !important; +} +html.dark + #nd-page:has(.api-page-header) + div.flex.flex-row.items-center.rounded-xl.border.not-prose + code { + color: rgb(229 231 235) !important; +} + +/* Inline code in API pages — neutral color instead of red. + Exclude code inside the method+path bar (handled above). */ +#nd-page:has(.api-page-header) .prose :not(pre) > code { + color: rgb(79 70 229) !important; +} +html.dark #nd-page:has(.api-page-header) .prose :not(pre) > code { + color: rgb(165 180 252) !important; +} + +/* Response Section — custom dropdown-based rendering (Mintlify style) */ + +/* Hide divider lines between accordion items */ +.response-section-wrapper [data-orientation="vertical"].divide-y > * { + border-top-width: 0 !important; + border-bottom-width: 0 !important; +} +.response-section-wrapper [data-orientation="vertical"].divide-y { + border-top: none !important; +} + +/* Remove content type labels inside accordion items (we show one in the header) */ +.response-section-wrapper [data-orientation="vertical"] p.not-prose:has(code.text-xs) { + display: none !important; +} + +/* Hide the top-level response description (e.g. "Execution was successfully cancelled.") + but NOT field descriptions inside Schema which also use prose-no-margin. + The response description is a direct child of AccordionContent (role=region) with mb-2. */ +.response-section-wrapper [data-orientation="vertical"] [role="region"] > .prose-no-margin.mb-2, +.response-section-wrapper + [data-orientation="vertical"] + [role="region"] + > div + > .prose-no-margin.mb-2 { + display: none !important; +} + +/* Remove left padding on accordion content so it aligns with Path Parameters */ +.response-section-wrapper [data-orientation="vertical"] [role="region"] { + padding-inline-start: 0 !important; +} + +/* Response section header */ +.response-section-header { + display: flex; + align-items: center; + gap: 1rem; + margin-top: 1.75rem; + margin-bottom: 0.5rem; +} + +.response-section-title { + font-size: 1.5rem; + font-weight: 600; + margin: 0; + color: var(--color-fd-foreground); + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, -apple-system, sans-serif; +} + +.response-section-meta { + display: flex; + align-items: center; + gap: 0.75rem; + margin-left: auto; +} + +/* Status code dropdown */ +.response-section-dropdown-wrapper { + position: relative; +} + +.response-section-dropdown-trigger { + display: flex; + align-items: center; + gap: 0.25rem; + padding: 0.125rem 0.25rem; + font-size: 0.875rem; + font-weight: 500; + color: var(--color-fd-muted-foreground); + background: none; + border: none; + cursor: pointer; + border-radius: 0.25rem; + transition: color 0.15s; + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif; +} +.response-section-dropdown-trigger:hover { + color: var(--color-fd-foreground); +} + +.response-section-chevron { + width: 0.75rem; + height: 0.75rem; + transition: transform 0.15s; +} +.response-section-chevron-open { + transform: rotate(180deg); +} + +.response-section-dropdown-menu { + position: absolute; + top: calc(100% + 0.25rem); + left: 0; + z-index: 50; + min-width: 5rem; + background-color: white; + border: 1px solid rgb(229 231 235); + border-radius: 0.5rem; + box-shadow: + 0 4px 6px -1px rgb(0 0 0 / 0.1), + 0 2px 4px -2px rgb(0 0 0 / 0.1); + padding: 0.25rem; + overflow: hidden; +} +html.dark .response-section-dropdown-menu { + background-color: rgb(24 24 27); + border-color: rgb(63 63 70); + box-shadow: 0 4px 6px -1px rgb(0 0 0 / 0.3); +} + +.response-section-dropdown-item { + display: flex; + align-items: center; + justify-content: space-between; + width: 100%; + padding: 0.375rem 0.5rem; + font-size: 0.875rem; + color: var(--color-fd-muted-foreground); + background: none; + border: none; + cursor: pointer; + border-radius: 0.25rem; + transition: + background-color 0.1s, + color 0.1s; + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif; +} +.response-section-dropdown-item:hover { + background-color: rgb(243 244 246); + color: var(--color-fd-foreground); +} +html.dark .response-section-dropdown-item:hover { + background-color: rgb(39 39 42); +} +.response-section-dropdown-item-selected { + color: var(--color-fd-foreground); +} + +.response-section-check { + width: 0.875rem; + height: 0.875rem; +} + +.response-section-content-type { + font-size: 0.875rem; + color: var(--color-fd-muted-foreground); + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif; +} + +/* Response schema container — remove border to match Path Parameters style */ +.response-section-wrapper [data-orientation="vertical"] .border.px-3.py-2.rounded-lg { + border: none !important; + padding: 0 !important; + border-radius: 0 !important; + background-color: transparent; +} + +/* Property row — reorder: name (1) → type badge (2) → required badge (3) */ +#nd-page:has(.api-page-header) .flex.flex-wrap.items-center.gap-3.not-prose { + display: flex; + flex-wrap: wrap; + align-items: center; +} + +/* Name span — order 1 */ +#nd-page:has(.api-page-header) + .flex.flex-wrap.items-center.gap-3.not-prose + > span.font-medium.font-mono.text-fd-primary { + order: 1; +} + +/* Type badge — order 2, grey pill like Mintlify */ +#nd-page:has(.api-page-header) + .flex.flex-wrap.items-center.gap-3.not-prose + > span.text-sm.font-mono.text-fd-muted-foreground { + order: 2; + background-color: rgb(240 240 243); + color: rgb(100 100 110); + padding: 0.125rem 0.5rem; + border-radius: 0.375rem; + font-size: 0.6875rem; + line-height: 1.25rem; + font-weight: 500; + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif; +} +html.dark + #nd-page:has(.api-page-header) + .flex.flex-wrap.items-center.gap-3.not-prose + > span.text-sm.font-mono.text-fd-muted-foreground { + background-color: rgb(39 39 42); + color: rgb(212 212 216); +} + +/* Hide the "*" inside the name span — we'll add "required" as a ::after on the flex row */ +#nd-page:has(.api-page-header) span.font-medium.font-mono.text-fd-primary > span.text-red-400 { + display: none; +} + +/* Required badge — order 3, light red pill */ +#nd-page:has(.api-page-header) + .flex.flex-wrap.items-center.gap-3.not-prose:has(span.text-red-400)::after { + content: "required"; + order: 3; + display: inline-flex; + align-items: center; + background-color: rgb(254 235 235); + color: rgb(220 38 38); + padding: 0.125rem 0.5rem; + border-radius: 0.375rem; + font-size: 0.6875rem; + line-height: 1.25rem; + font-weight: 500; + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif; +} +html.dark + #nd-page:has(.api-page-header) + .flex.flex-wrap.items-center.gap-3.not-prose:has(span.text-red-400)::after { + background-color: rgb(127 29 29 / 0.2); + color: rgb(252 165 165); +} + +/* Optional "?" indicator — hide it */ +#nd-page:has(.api-page-header) + span.font-medium.font-mono.text-fd-primary + > span.text-fd-muted-foreground { + display: none; +} + +/* Hide the auth scheme type label (e.g. "apiKey") next to Authorization heading */ +#nd-page:has(.api-page-header) .flex.items-start.justify-between.gap-2 > div.not-prose { + display: none !important; +} + +/* Auth property — replace "" with "string" badge, add "header" and "required" badges. + Auth properties use my-4 (vs py-4 for regular properties). */ + +/* Auth property flex row — name: order 1, type: order 2, ::before "header": order 3, ::after "required": order 4 */ +#nd-page:has(.api-page-header) + div.my-4 + > .flex.flex-wrap.items-center.gap-3.not-prose + > span.font-medium.font-mono.text-fd-primary { + order: 1; +} +#nd-page:has(.api-page-header) + div.my-4 + > .flex.flex-wrap.items-center.gap-3.not-prose + > span.text-sm.font-mono.text-fd-muted-foreground { + order: 2; + font-size: 0; + padding: 0 !important; + background: none !important; + line-height: 0; +} +#nd-page:has(.api-page-header) + div.my-4 + > .flex.flex-wrap.items-center.gap-3.not-prose + > span.text-sm.font-mono.text-fd-muted-foreground::after { + content: "string"; + font-size: 0.6875rem; + line-height: 1.25rem; + font-weight: 500; + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif; + background-color: rgb(240 240 243); + color: rgb(100 100 110); + padding: 0.125rem 0.5rem; + border-radius: 0.375rem; + display: inline-flex; + align-items: center; +} +html.dark + #nd-page:has(.api-page-header) + div.my-4 + > .flex.flex-wrap.items-center.gap-3.not-prose + > span.text-sm.font-mono.text-fd-muted-foreground::after { + background-color: rgb(39 39 42); + color: rgb(212 212 216); +} + +/* "header" badge via ::before on the auth flex row */ +#nd-page:has(.api-page-header) div.my-4 > .flex.flex-wrap.items-center.gap-3.not-prose::before { + content: "header"; + order: 3; + display: inline-flex; + align-items: center; + background-color: rgb(240 240 243); + color: rgb(100 100 110); + padding: 0.125rem 0.5rem; + border-radius: 0.375rem; + font-size: 0.6875rem; + line-height: 1.25rem; + font-weight: 500; + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif; +} +html.dark + #nd-page:has(.api-page-header) + div.my-4 + > .flex.flex-wrap.items-center.gap-3.not-prose::before { + background-color: rgb(39 39 42); + color: rgb(212 212 216); +} + +/* "required" badge via ::after on the auth flex row — light red pill */ +#nd-page:has(.api-page-header) div.my-4 > .flex.flex-wrap.items-center.gap-3.not-prose::after { + content: "required"; + order: 4; + display: inline-flex; + align-items: center; + background-color: rgb(254 235 235); + color: rgb(220 38 38); + padding: 0.125rem 0.5rem; + border-radius: 0.375rem; + font-size: 0.6875rem; + line-height: 1.25rem; + font-weight: 500; + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif; +} +html.dark + #nd-page:has(.api-page-header) + div.my-4 + > .flex.flex-wrap.items-center.gap-3.not-prose::after { + background-color: rgb(127 29 29 / 0.2); + color: rgb(252 165 165); +} + +/* Hide "In: header" text below auth property — redundant with the header badge */ +#nd-page:has(.api-page-header) div.my-4 .prose-no-margin p:has(> code) { + display: none !important; +} + +/* Section dividers — bottom border after Authorization and Body sections. */ +.api-section-divider { + padding-bottom: 0.5rem; + border-bottom: 1px solid rgb(229 231 235 / 0.6); +} +html.dark .api-section-divider { + border-bottom-color: rgb(255 255 255 / 0.07); +} + +/* Property rows — breathing room like Mintlify. + Regular properties use border-t py-4; auth properties use border-t my-4. */ +#nd-page:has(.api-page-header) .text-sm.border-t.py-4 { + padding-top: 1.25rem !important; + padding-bottom: 1.25rem !important; +} +#nd-page:has(.api-page-header) .text-sm.border-t.my-4 { + margin-top: 1.25rem !important; + margin-bottom: 1.25rem !important; + padding-top: 1.25rem; +} + +/* Divider lines between fields — very subtle like Mintlify */ +#nd-page:has(.api-page-header) .text-sm.border-t { + border-color: rgb(229 231 235 / 0.6); +} +html.dark #nd-page:has(.api-page-header) .text-sm.border-t { + border-color: rgb(255 255 255 / 0.07); +} + +/* Body/Callback section "application/json" label — remove inline code styling */ +#nd-page:has(.api-page-header) .flex.gap-2.items-center.justify-between p.not-prose code.text-xs, +#nd-page:has(.api-page-header) .flex.justify-between.gap-2.items-end p.not-prose code.text-xs { + background: none !important; + border: none !important; + padding: 0 !important; + color: var(--color-fd-muted-foreground) !important; + font-size: 0.875rem !important; + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif !important; +} + +/* Object/array type triggers in property rows — order 2 + badge chip styling */ +#nd-page:has(.api-page-header) .flex.flex-wrap.items-center.gap-3.not-prose > button, +#nd-page:has(.api-page-header) .flex.flex-wrap.items-center.gap-3.not-prose > span:has(> button) { + order: 2; + background-color: rgb(240 240 243); + color: rgb(100 100 110); + padding: 0.125rem 0.5rem; + border-radius: 0.375rem; + font-size: 0.6875rem; + line-height: 1.25rem; + font-weight: 500; + font-family: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif; +} +html.dark #nd-page:has(.api-page-header) .flex.flex-wrap.items-center.gap-3.not-prose > button, +html.dark + #nd-page:has(.api-page-header) + .flex.flex-wrap.items-center.gap-3.not-prose + > span:has(> button) { + background-color: rgb(39 39 42); + color: rgb(212 212 216); +} + +/* Section headings (Authorization, Path Parameters, etc.) — consistent top spacing */ +#nd-page:has(.api-page-header) .min-w-0.flex-1 h2 { + margin-top: 1.75rem !important; + margin-bottom: 0.25rem !important; +} + +/* Code examples in right column — wrap long lines instead of horizontal scroll */ +#nd-page:has(.api-page-header) pre { + white-space: pre-wrap !important; + word-break: break-all !important; +} +#nd-page:has(.api-page-header) pre code { + width: 100% !important; + word-break: break-all !important; + overflow-wrap: break-word !important; +} + +/* API page header — constrain title/copy-page to left content column, not full width. + Only applies on OpenAPI pages (which have the two-column layout). */ +@media (min-width: 1280px) { + .api-page-header { + max-width: calc(100% - 400px - 1.5rem); + } +} + +/* Footer navigation — constrain to left content column on OpenAPI pages only. + Target pages that contain the two-column layout via :has() selector. */ +#nd-page:has(.api-page-header) > div:last-child { + max-width: calc(100% - 400px - 1.5rem); +} +@media (max-width: 1024px) { + #nd-page:has(.api-page-header) > div:last-child { + max-width: 100%; + } } /* Tailwind v4 content sources */ diff --git a/apps/docs/app/layout.config.tsx b/apps/docs/app/layout.config.tsx deleted file mode 100644 index 1998c90b8cb..00000000000 --- a/apps/docs/app/layout.config.tsx +++ /dev/null @@ -1,21 +0,0 @@ -import type { BaseLayoutProps } from 'fumadocs-ui/layouts/shared' - -/** - * Shared layout configurations - * - * you can customise layouts individually from: - * Home Layout: app/(home)/layout.tsx - * Docs Layout: app/docs/layout.tsx - */ -export const baseOptions: BaseLayoutProps = { - nav: { - title: ( - <> - - - - My App - - ), - }, -} diff --git a/apps/docs/components/docs-layout/sidebar-components.tsx b/apps/docs/components/docs-layout/sidebar-components.tsx index e6fbe18cd11..7bd6039f8d0 100644 --- a/apps/docs/components/docs-layout/sidebar-components.tsx +++ b/apps/docs/components/docs-layout/sidebar-components.tsx @@ -52,15 +52,26 @@ export function SidebarItem({ item }: { item: Item }) { ) } +function isApiReferenceFolder(node: Folder): boolean { + if (node.index?.url.includes('/api-reference/')) return true + for (const child of node.children) { + if (child.type === 'page' && child.url.includes('/api-reference/')) return true + if (child.type === 'folder' && isApiReferenceFolder(child)) return true + } + return false +} + export function SidebarFolder({ item, children }: { item: Folder; children: ReactNode }) { const pathname = usePathname() const hasActiveChild = checkHasActiveChild(item, pathname) + const isApiRef = isApiReferenceFolder(item) + const isOnApiRefPage = stripLangPrefix(pathname).startsWith('/api-reference') const hasChildren = item.children.length > 0 - const [open, setOpen] = useState(hasActiveChild) + const [open, setOpen] = useState(hasActiveChild || (isApiRef && isOnApiRefPage)) useEffect(() => { - setOpen(hasActiveChild) - }, [hasActiveChild]) + setOpen(hasActiveChild || (isApiRef && isOnApiRefPage)) + }, [hasActiveChild, isApiRef, isOnApiRefPage]) const active = item.index ? isActive(item.index.url, pathname, false) : false @@ -157,16 +168,18 @@ export function SidebarFolder({ item, children }: { item: Folder; children: Reac {hasChildren && (
- {/* Mobile: simple indent */} -
{children}
- {/* Desktop: styled with border */} -
    - {children} -
+
+ {/* Mobile: simple indent */} +
{children}
+ {/* Desktop: styled with border */} +
    + {children} +
+
)}
diff --git a/apps/docs/components/docs-layout/toc-footer.tsx b/apps/docs/components/docs-layout/toc-footer.tsx index eaf29088f23..3e59619e5fa 100644 --- a/apps/docs/components/docs-layout/toc-footer.tsx +++ b/apps/docs/components/docs-layout/toc-footer.tsx @@ -1,12 +1,9 @@ 'use client' -import { useState } from 'react' import { ArrowRight, ChevronRight } from 'lucide-react' import Link from 'next/link' export function TOCFooter() { - const [isHovered, setIsHovered] = useState(false) - return (
@@ -21,18 +18,19 @@ export function TOCFooter() { href='https://sim.ai/signup' target='_blank' rel='noopener noreferrer' - onMouseEnter={() => setIsHovered(true)} - onMouseLeave={() => setIsHovered(false)} className='group mt-2 inline-flex h-8 w-fit items-center justify-center gap-1 whitespace-nowrap rounded-[10px] border border-[#2AAD6C] bg-gradient-to-b from-[#3ED990] to-[#2AAD6C] px-3 pr-[10px] pl-[12px] font-medium text-sm text-white shadow-[inset_0_2px_4px_0_#5EE8A8] outline-none transition-all hover:shadow-lg focus-visible:border-ring focus-visible:ring-[3px] focus-visible:ring-ring/50' aria-label='Get started with Sim - Sign up for free' > Get started - - {isHovered ? ( -
diff --git a/apps/docs/components/navbar/navbar.tsx b/apps/docs/components/navbar/navbar.tsx index db82c690624..231a0b334e4 100644 --- a/apps/docs/components/navbar/navbar.tsx +++ b/apps/docs/components/navbar/navbar.tsx @@ -1,12 +1,17 @@ 'use client' import Link from 'next/link' +import { usePathname } from 'next/navigation' import { LanguageDropdown } from '@/components/ui/language-dropdown' import { SearchTrigger } from '@/components/ui/search-trigger' import { SimLogoFull } from '@/components/ui/sim-logo' import { ThemeToggle } from '@/components/ui/theme-toggle' +import { cn } from '@/lib/utils' export function Navbar() { + const pathname = usePathname() + const isApiReference = pathname.includes('/api-reference') + return (
{/* Right cluster aligns with TOC edge */} -
+
+ + Documentation + + + API + Platform diff --git a/apps/docs/components/structured-data.tsx b/apps/docs/components/structured-data.tsx index c3aebd10d08..5875f3d7329 100644 --- a/apps/docs/components/structured-data.tsx +++ b/apps/docs/components/structured-data.tsx @@ -25,8 +25,8 @@ export function StructuredData({ headline: title, description: description, url: url, - datePublished: dateModified || new Date().toISOString(), - dateModified: dateModified || new Date().toISOString(), + ...(dateModified && { datePublished: dateModified }), + ...(dateModified && { dateModified }), author: { '@type': 'Organization', name: 'Sim Team', @@ -91,12 +91,6 @@ export function StructuredData({ inLanguage: ['en', 'es', 'fr', 'de', 'ja', 'zh'], } - const faqStructuredData = title.toLowerCase().includes('faq') && { - '@context': 'https://schema.org', - '@type': 'FAQPage', - mainEntity: [], - } - const softwareStructuredData = { '@context': 'https://schema.org', '@type': 'SoftwareApplication', @@ -151,15 +145,6 @@ export function StructuredData({ }} /> )} - {faqStructuredData && ( - ' - const file = new File([maliciousContent], 'malicious.html', { type: 'text/html' }) + const htmlContent = '

Hello World

' + const file = new File([htmlContent], 'document.html', { type: 'text/html' }) formData.append('file', file) formData.append('context', 'workspace') formData.append('workspaceId', 'test-workspace-id') @@ -436,35 +436,14 @@ describe('File Upload Security Tests', () => { const response = await POST(req as unknown as NextRequest) - expect(response.status).toBe(400) - const data = await response.json() - expect(data.message).toContain("File type 'html' is not allowed") - }) - - it('should reject HTML files to prevent XSS', async () => { - const formData = new FormData() - const maliciousContent = '' - const file = new File([maliciousContent], 'malicious.html', { type: 'text/html' }) - formData.append('file', file) - formData.append('context', 'workspace') - formData.append('workspaceId', 'test-workspace-id') - - const req = new Request('http://localhost/api/files/upload', { - method: 'POST', - body: formData, - }) - - const response = await POST(req as unknown as NextRequest) - - expect(response.status).toBe(400) - const data = await response.json() - expect(data.message).toContain("File type 'html' is not allowed") + expect(response.status).toBe(200) }) - it('should reject SVG files to prevent XSS', async () => { + it('should accept SVG files (supported image type)', async () => { const formData = new FormData() - const maliciousSvg = '' - const file = new File([maliciousSvg], 'malicious.svg', { type: 'image/svg+xml' }) + const svgContent = + '' + const file = new File([svgContent], 'image.svg', { type: 'image/svg+xml' }) formData.append('file', file) formData.append('context', 'workspace') formData.append('workspaceId', 'test-workspace-id') @@ -476,9 +455,7 @@ describe('File Upload Security Tests', () => { const response = await POST(req as unknown as NextRequest) - expect(response.status).toBe(400) - const data = await response.json() - expect(data.message).toContain("File type 'svg' is not allowed") + expect(response.status).toBe(200) }) it('should reject JavaScript files', async () => { @@ -526,8 +503,8 @@ describe('File Upload Security Tests', () => { const validFile = new File(['valid content'], 'valid.pdf', { type: 'application/pdf' }) formData.append('file', validFile) - const invalidFile = new File([''], 'malicious.html', { - type: 'text/html', + const invalidFile = new File(['binary content'], 'malicious.exe', { + type: 'application/x-msdownload', }) formData.append('file', invalidFile) formData.append('context', 'workspace') @@ -542,7 +519,7 @@ describe('File Upload Security Tests', () => { expect(response.status).toBe(400) const data = await response.json() - expect(data.message).toContain("File type 'html' is not allowed") + expect(data.message).toContain("File type 'exe' is not allowed") }) }) diff --git a/apps/sim/app/api/files/upload/route.ts b/apps/sim/app/api/files/upload/route.ts index f227dd74202..cd2baf45d90 100644 --- a/apps/sim/app/api/files/upload/route.ts +++ b/apps/sim/app/api/files/upload/route.ts @@ -4,8 +4,13 @@ import { sanitizeFileName } from '@/executor/constants' import '@/lib/uploads/core/setup.server' import { getSession } from '@/lib/auth' import type { StorageContext } from '@/lib/uploads/config' -import { isImageFileType } from '@/lib/uploads/utils/file-utils' -import { validateFileType } from '@/lib/uploads/utils/validation' +import { isImageFileType, resolveFileType } from '@/lib/uploads/utils/file-utils' +import { + SUPPORTED_AUDIO_EXTENSIONS, + SUPPORTED_DOCUMENT_EXTENSIONS, + SUPPORTED_VIDEO_EXTENSIONS, + validateFileType, +} from '@/lib/uploads/utils/validation' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { createErrorResponse, @@ -13,38 +18,13 @@ import { InvalidRequestError, } from '@/app/api/files/utils' -const ALLOWED_EXTENSIONS = new Set([ - // Documents - 'pdf', - 'doc', - 'docx', - 'txt', - 'md', - 'csv', - 'xlsx', - 'xls', - 'json', - 'yaml', - 'yml', - // Images - 'png', - 'jpg', - 'jpeg', - 'gif', - // Audio - 'mp3', - 'm4a', - 'wav', - 'webm', - 'ogg', - 'flac', - 'aac', - 'opus', - // Video - 'mp4', - 'mov', - 'avi', - 'mkv', +const IMAGE_EXTENSIONS = ['png', 'jpg', 'jpeg', 'gif', 'webp', 'svg'] as const + +const ALLOWED_EXTENSIONS = new Set([ + ...SUPPORTED_DOCUMENT_EXTENSIONS, + ...IMAGE_EXTENSIONS, + ...SUPPORTED_AUDIO_EXTENSIONS, + ...SUPPORTED_VIDEO_EXTENSIONS, ]) function validateFileExtension(filename: string): boolean { @@ -251,9 +231,19 @@ export async function POST(request: NextRequest) { } } - // Handle image-only contexts (copilot, chat, profile-pictures) + // Handle copilot, chat, profile-pictures contexts if (context === 'copilot' || context === 'chat' || context === 'profile-pictures') { - if (!isImageFileType(file.type)) { + if (context === 'copilot') { + const { isSupportedFileType: isCopilotSupported } = await import( + '@/lib/uploads/contexts/copilot/copilot-file-manager' + ) + const resolvedType = resolveFileType(file) + if (!isImageFileType(resolvedType) && !isCopilotSupported(resolvedType)) { + throw new InvalidRequestError( + 'Unsupported file type. Allowed: images, PDF, and text files (TXT, CSV, MD, HTML, JSON, XML).' + ) + } + } else if (!isImageFileType(file.type)) { throw new InvalidRequestError( `Only image files (JPEG, PNG, GIF, WebP, SVG) are allowed for ${context} uploads` ) diff --git a/apps/sim/app/api/files/utils.test.ts b/apps/sim/app/api/files/utils.test.ts index a31c7ca42fa..58d1791f922 100644 --- a/apps/sim/app/api/files/utils.test.ts +++ b/apps/sim/app/api/files/utils.test.ts @@ -170,9 +170,7 @@ describe('extractFilename', () => { 'inline; filename="safe-image.png"' ) expect(response.headers.get('X-Content-Type-Options')).toBe('nosniff') - expect(response.headers.get('Content-Security-Policy')).toBe( - "default-src 'none'; style-src 'unsafe-inline'; sandbox;" - ) + expect(response.headers.get('Content-Security-Policy')).toBeNull() }) it('should serve PDFs inline safely', () => { @@ -203,33 +201,31 @@ describe('extractFilename', () => { expect(response.headers.get('X-Content-Type-Options')).toBe('nosniff') }) - it('should force attachment for SVG files to prevent XSS', () => { + it('should serve SVG files inline with CSP sandbox protection', () => { const response = createFileResponse({ buffer: Buffer.from( '' ), contentType: 'image/svg+xml', - filename: 'malicious.svg', + filename: 'image.svg', }) expect(response.status).toBe(200) - expect(response.headers.get('Content-Type')).toBe('application/octet-stream') - expect(response.headers.get('Content-Disposition')).toBe( - 'attachment; filename="malicious.svg"' + expect(response.headers.get('Content-Type')).toBe('image/svg+xml') + expect(response.headers.get('Content-Disposition')).toBe('inline; filename="image.svg"') + expect(response.headers.get('Content-Security-Policy')).toBe( + "default-src 'none'; style-src 'unsafe-inline'; sandbox;" ) }) - it('should override dangerous content types to safe alternatives', () => { + it('should not apply CSP sandbox to non-SVG files', () => { const response = createFileResponse({ - buffer: Buffer.from('safe content'), - contentType: 'image/svg+xml', - filename: 'image.png', // Extension doesn't match content-type + buffer: Buffer.from('hello'), + contentType: 'text/plain', + filename: 'readme.txt', }) - expect(response.status).toBe(200) - // Should override SVG content type to plain text for safety - expect(response.headers.get('Content-Type')).toBe('text/plain') - expect(response.headers.get('Content-Disposition')).toBe('inline; filename="image.png"') + expect(response.headers.get('Content-Security-Policy')).toBeNull() }) it('should force attachment for JavaScript files', () => { @@ -302,15 +298,22 @@ describe('extractFilename', () => { }) describe('Content Security Policy', () => { - it('should include CSP header in all responses', () => { - const response = createFileResponse({ + it('should include CSP header only for SVG responses', () => { + const svgResponse = createFileResponse({ + buffer: Buffer.from(''), + contentType: 'image/svg+xml', + filename: 'icon.svg', + }) + expect(svgResponse.headers.get('Content-Security-Policy')).toBe( + "default-src 'none'; style-src 'unsafe-inline'; sandbox;" + ) + + const txtResponse = createFileResponse({ buffer: Buffer.from('test'), contentType: 'text/plain', filename: 'test.txt', }) - - const csp = response.headers.get('Content-Security-Policy') - expect(csp).toBe("default-src 'none'; style-src 'unsafe-inline'; sandbox;") + expect(txtResponse.headers.get('Content-Security-Policy')).toBeNull() }) it('should include X-Content-Type-Options header', () => { diff --git a/apps/sim/app/api/files/utils.ts b/apps/sim/app/api/files/utils.ts index 953c9b8989c..a4831cdd3b7 100644 --- a/apps/sim/app/api/files/utils.ts +++ b/apps/sim/app/api/files/utils.ts @@ -1,5 +1,5 @@ import { existsSync } from 'fs' -import { join, resolve, sep } from 'path' +import path from 'path' import { createLogger } from '@sim/logger' import { NextResponse } from 'next/server' import { UPLOAD_DIR } from '@/lib/uploads/config' @@ -21,6 +21,7 @@ export interface FileResponse { buffer: Buffer contentType: string filename: string + cacheControl?: string } export class FileNotFoundError extends Error { @@ -60,6 +61,8 @@ export const contentTypeMap: Record = { jpg: 'image/jpeg', jpeg: 'image/jpeg', gif: 'image/gif', + svg: 'image/svg+xml', + webp: 'image/webp', zip: 'application/zip', googleFolder: 'application/vnd.google-apps.folder', } @@ -76,6 +79,7 @@ export const binaryExtensions = [ 'jpg', 'jpeg', 'gif', + 'webp', 'pdf', ] @@ -155,7 +159,7 @@ function sanitizeFilename(filename: string): string { return sanitized }) - return sanitizedSegments.join(sep) + return sanitizedSegments.join(path.sep) } export function findLocalFile(filename: string): string | null { @@ -168,17 +172,18 @@ export function findLocalFile(filename: string): string | null { } const possiblePaths = [ - join(UPLOAD_DIR, sanitizedFilename), - join(process.cwd(), 'uploads', sanitizedFilename), + path.join(UPLOAD_DIR, sanitizedFilename), + path.join(process.cwd(), 'uploads', sanitizedFilename), ] - for (const path of possiblePaths) { - const resolvedPath = resolve(path) - const allowedDirs = [resolve(UPLOAD_DIR), resolve(process.cwd(), 'uploads')] + for (const filePath of possiblePaths) { + const resolvedPath = path.resolve(filePath) + const allowedDirs = [path.resolve(UPLOAD_DIR), path.resolve(process.cwd(), 'uploads')] // Must be within allowed directory but NOT the directory itself const isWithinAllowedDir = allowedDirs.some( - (allowedDir) => resolvedPath.startsWith(allowedDir + sep) && resolvedPath !== allowedDir + (allowedDir) => + resolvedPath.startsWith(allowedDir + path.sep) && resolvedPath !== allowedDir ) if (!isWithinAllowedDir) { @@ -202,13 +207,15 @@ const SAFE_INLINE_TYPES = new Set([ 'image/jpeg', 'image/jpg', 'image/gif', + 'image/svg+xml', + 'image/webp', 'application/pdf', 'text/plain', 'text/csv', 'application/json', ]) -const FORCE_ATTACHMENT_EXTENSIONS = new Set(['html', 'htm', 'svg', 'js', 'css', 'xml']) +const FORCE_ATTACHMENT_EXTENSIONS = new Set(['html', 'htm', 'js', 'css', 'xml']) function getSecureFileHeaders(filename: string, originalContentType: string) { const extension = filename.split('.').pop()?.toLowerCase() || '' @@ -222,7 +229,7 @@ function getSecureFileHeaders(filename: string, originalContentType: string) { let safeContentType = originalContentType - if (originalContentType === 'text/html' || originalContentType === 'image/svg+xml') { + if (originalContentType === 'text/html') { safeContentType = 'text/plain' } @@ -251,16 +258,18 @@ function encodeFilenameForHeader(storageKey: string): string { export function createFileResponse(file: FileResponse): NextResponse { const { contentType, disposition } = getSecureFileHeaders(file.filename, file.contentType) - return new NextResponse(file.buffer as BodyInit, { - status: 200, - headers: { - 'Content-Type': contentType, - 'Content-Disposition': `${disposition}; ${encodeFilenameForHeader(file.filename)}`, - 'Cache-Control': 'public, max-age=31536000', - 'X-Content-Type-Options': 'nosniff', - 'Content-Security-Policy': "default-src 'none'; style-src 'unsafe-inline'; sandbox;", - }, - }) + const headers: Record = { + 'Content-Type': contentType, + 'Content-Disposition': `${disposition}; ${encodeFilenameForHeader(file.filename)}`, + 'Cache-Control': file.cacheControl || 'public, max-age=31536000', + 'X-Content-Type-Options': 'nosniff', + } + + if (contentType === 'image/svg+xml') { + headers['Content-Security-Policy'] = "default-src 'none'; style-src 'unsafe-inline'; sandbox;" + } + + return new NextResponse(file.buffer as BodyInit, { status: 200, headers }) } export function createErrorResponse(error: Error, status = 500): NextResponse { diff --git a/apps/sim/app/api/folders/[id]/duplicate/route.ts b/apps/sim/app/api/folders/[id]/duplicate/route.ts index 54eafdf36ff..e59cc943d15 100644 --- a/apps/sim/app/api/folders/[id]/duplicate/route.ts +++ b/apps/sim/app/api/folders/[id]/duplicate/route.ts @@ -17,6 +17,7 @@ const DuplicateRequestSchema = z.object({ workspaceId: z.string().optional(), parentId: z.string().nullable().optional(), color: z.string().optional(), + newId: z.string().uuid().optional(), }) // POST /api/folders/[id]/duplicate - Duplicate a folder with all its child folders and workflows @@ -33,7 +34,13 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: try { const body = await req.json() - const { name, workspaceId, parentId, color } = DuplicateRequestSchema.parse(body) + const { + name, + workspaceId, + parentId, + color, + newId: clientNewId, + } = DuplicateRequestSchema.parse(body) logger.info(`[${requestId}] Duplicating folder ${sourceFolderId} for user ${session.user.id}`) @@ -60,7 +67,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const targetWorkspaceId = workspaceId || sourceFolder.workspaceId const { newFolderId, folderMapping } = await db.transaction(async (tx) => { - const newFolderId = crypto.randomUUID() + const newFolderId = clientNewId || crypto.randomUUID() const now = new Date() const targetParentId = parentId ?? sourceFolder.parentId diff --git a/apps/sim/app/api/folders/[id]/route.ts b/apps/sim/app/api/folders/[id]/route.ts index 96ab40c2f1e..41b9a6276cb 100644 --- a/apps/sim/app/api/folders/[id]/route.ts +++ b/apps/sim/app/api/folders/[id]/route.ts @@ -1,11 +1,12 @@ import { db } from '@sim/db' import { workflow, workflowFolder } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' +import { archiveWorkflowsByIdsInWorkspace } from '@/lib/workflows/lifecycle' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const logger = createLogger('FoldersIDAPI') @@ -151,7 +152,7 @@ export async function DELETE( const totalWorkflowsInWorkspace = await db .select({ id: workflow.id }) .from(workflow) - .where(eq(workflow.workspaceId, existingFolder.workspaceId)) + .where(and(eq(workflow.workspaceId, existingFolder.workspaceId), isNull(workflow.archivedAt))) if (workflowsInFolder > 0 && workflowsInFolder >= totalWorkflowsInWorkspace.length) { return NextResponse.json( @@ -222,12 +223,20 @@ async function deleteFolderRecursively( const workflowsInFolder = await db .select({ id: workflow.id }) .from(workflow) - .where(and(eq(workflow.folderId, folderId), eq(workflow.workspaceId, workspaceId))) + .where( + and( + eq(workflow.folderId, folderId), + eq(workflow.workspaceId, workspaceId), + isNull(workflow.archivedAt) + ) + ) if (workflowsInFolder.length > 0) { - await db - .delete(workflow) - .where(and(eq(workflow.folderId, folderId), eq(workflow.workspaceId, workspaceId))) + await archiveWorkflowsByIdsInWorkspace( + workspaceId, + workflowsInFolder.map((entry) => entry.id), + { requestId: `folder-${folderId}` } + ) stats.workflows += workflowsInFolder.length } @@ -252,7 +261,13 @@ async function countWorkflowsInFolderRecursively( const workflowsInFolder = await db .select({ id: workflow.id }) .from(workflow) - .where(and(eq(workflow.folderId, folderId), eq(workflow.workspaceId, workspaceId))) + .where( + and( + eq(workflow.folderId, folderId), + eq(workflow.workspaceId, workspaceId), + isNull(workflow.archivedAt) + ) + ) count += workflowsInFolder.length diff --git a/apps/sim/app/api/folders/route.test.ts b/apps/sim/app/api/folders/route.test.ts index 5fa3a709018..b31b527a745 100644 --- a/apps/sim/app/api/folders/route.test.ts +++ b/apps/sim/app/api/folders/route.test.ts @@ -455,7 +455,7 @@ describe('Folders API Route', () => { expect(response.status).toBe(400) const data = await response.json() - expect(data).toHaveProperty('error', 'Name and workspace ID are required') + expect(data).toHaveProperty('error', 'Invalid request data') } }) diff --git a/apps/sim/app/api/folders/route.ts b/apps/sim/app/api/folders/route.ts index 835231d31f2..2ae6d1673ab 100644 --- a/apps/sim/app/api/folders/route.ts +++ b/apps/sim/app/api/folders/route.ts @@ -3,12 +3,22 @@ import { workflow, workflowFolder } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, asc, eq, isNull, min } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const logger = createLogger('FoldersAPI') +const CreateFolderSchema = z.object({ + id: z.string().uuid().optional(), + name: z.string().min(1, 'Name is required'), + workspaceId: z.string().min(1, 'Workspace ID is required'), + parentId: z.string().optional(), + color: z.string().optional(), + sortOrder: z.number().int().optional(), +}) + // GET - Fetch folders for a workspace export async function GET(request: NextRequest) { try { @@ -59,13 +69,15 @@ export async function POST(request: NextRequest) { } const body = await request.json() - const { name, workspaceId, parentId, color, sortOrder: providedSortOrder } = body - - if (!name || !workspaceId) { - return NextResponse.json({ error: 'Name and workspace ID are required' }, { status: 400 }) - } + const { + id: clientId, + name, + workspaceId, + parentId, + color, + sortOrder: providedSortOrder, + } = CreateFolderSchema.parse(body) - // Check if user has workspace permissions (at least 'write' access to create folders) const workspacePermission = await getUserEntityPermissions( session.user.id, 'workspace', @@ -79,8 +91,7 @@ export async function POST(request: NextRequest) { ) } - // Generate a new ID - const id = crypto.randomUUID() + const id = clientId || crypto.randomUUID() const newFolder = await db.transaction(async (tx) => { let sortOrder: number @@ -150,6 +161,14 @@ export async function POST(request: NextRequest) { return NextResponse.json({ folder: newFolder }) } catch (error) { + if (error instanceof z.ZodError) { + logger.warn('Invalid folder creation data', { errors: error.errors }) + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + logger.error('Error creating folder:', { error }) return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) } diff --git a/apps/sim/app/api/form/[identifier]/route.ts b/apps/sim/app/api/form/[identifier]/route.ts index d6d4f019e4c..986a77610d8 100644 --- a/apps/sim/app/api/form/[identifier]/route.ts +++ b/apps/sim/app/api/form/[identifier]/route.ts @@ -2,7 +2,7 @@ import { randomUUID } from 'crypto' import { db } from '@sim/db' import { form, workflow, workflowBlocks } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { addCorsHeaders, validateAuthToken } from '@/lib/core/security/deployment' @@ -91,7 +91,7 @@ export async function POST( customizations: form.customizations, }) .from(form) - .where(eq(form.identifier, identifier)) + .where(and(eq(form.identifier, identifier), isNull(form.archivedAt))) .limit(1) if (deploymentResult.length === 0) { @@ -107,7 +107,7 @@ export async function POST( const [workflowRecord] = await db .select({ workspaceId: workflow.workspaceId }) .from(workflow) - .where(eq(workflow.id, deployment.workflowId)) + .where(and(eq(workflow.id, deployment.workflowId), isNull(workflow.archivedAt))) .limit(1) const workspaceId = workflowRecord?.workspaceId @@ -312,7 +312,7 @@ export async function GET( showBranding: form.showBranding, }) .from(form) - .where(eq(form.identifier, identifier)) + .where(and(eq(form.identifier, identifier), isNull(form.archivedAt))) .limit(1) if (deploymentResult.length === 0) { diff --git a/apps/sim/app/api/form/manage/[id]/route.ts b/apps/sim/app/api/form/manage/[id]/route.ts index e64e52fb1e6..577363b8d9c 100644 --- a/apps/sim/app/api/form/manage/[id]/route.ts +++ b/apps/sim/app/api/form/manage/[id]/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { form } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' @@ -134,7 +134,7 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise< const existingIdentifier = await db .select() .from(form) - .where(eq(form.identifier, identifier)) + .where(and(eq(form.identifier, identifier), isNull(form.archivedAt))) .limit(1) if (existingIdentifier.length > 0) { @@ -241,7 +241,7 @@ export async function DELETE( return createErrorResponse('Form not found or access denied', 404) } - await db.update(form).set({ isActive: false, updatedAt: new Date() }).where(eq(form.id, id)) + await db.delete(form).where(eq(form.id, id)) logger.info(`Form ${id} deleted (soft delete)`) diff --git a/apps/sim/app/api/form/route.ts b/apps/sim/app/api/form/route.ts index 4ebb577f1e8..3becf417a44 100644 --- a/apps/sim/app/api/form/route.ts +++ b/apps/sim/app/api/form/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { form } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { v4 as uuidv4 } from 'uuid' import { z } from 'zod' @@ -73,7 +73,10 @@ export async function GET(request: NextRequest) { return createErrorResponse('Unauthorized', 401) } - const deployments = await db.select().from(form).where(eq(form.userId, session.user.id)) + const deployments = await db + .select() + .from(form) + .where(and(eq(form.userId, session.user.id), isNull(form.archivedAt))) return createSuccessResponse({ deployments }) } catch (error: any) { @@ -118,21 +121,20 @@ export async function POST(request: NextRequest) { ) } - const existingIdentifier = await db - .select() - .from(form) - .where(eq(form.identifier, identifier)) - .limit(1) + // Check identifier availability and workflow access in parallel + const [existingIdentifier, { hasAccess, workflow: workflowRecord }] = await Promise.all([ + db + .select() + .from(form) + .where(and(eq(form.identifier, identifier), isNull(form.archivedAt))) + .limit(1), + checkWorkflowAccessForFormCreation(workflowId, session.user.id), + ]) if (existingIdentifier.length > 0) { return createErrorResponse('Identifier already in use', 400) } - const { hasAccess, workflow: workflowRecord } = await checkWorkflowAccessForFormCreation( - workflowId, - session.user.id - ) - if (!hasAccess || !workflowRecord) { return createErrorResponse('Workflow not found or access denied', 404) } diff --git a/apps/sim/app/api/form/utils.ts b/apps/sim/app/api/form/utils.ts index e39d210ac30..9f4bafd05ad 100644 --- a/apps/sim/app/api/form/utils.ts +++ b/apps/sim/app/api/form/utils.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { form, workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest, NextResponse } from 'next/server' import { isEmailAllowed, @@ -57,7 +57,7 @@ export async function checkFormAccess( .select({ form: form, workflowWorkspaceId: workflow.workspaceId }) .from(form) .innerJoin(workflow, eq(form.workflowId, workflow.id)) - .where(eq(form.id, formId)) + .where(and(eq(form.id, formId), isNull(form.archivedAt))) .limit(1) if (formData.length === 0) { diff --git a/apps/sim/app/api/form/validate/route.ts b/apps/sim/app/api/form/validate/route.ts index 8352149fd9b..0b2b8a076e7 100644 --- a/apps/sim/app/api/form/validate/route.ts +++ b/apps/sim/app/api/form/validate/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { form } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { z } from 'zod' import { getSession } from '@/lib/auth' @@ -50,7 +50,7 @@ export async function GET(request: NextRequest) { const existingForm = await db .select({ id: form.id }) .from(form) - .where(eq(form.identifier, validatedIdentifier)) + .where(and(eq(form.identifier, validatedIdentifier), isNull(form.archivedAt))) .limit(1) const isAvailable = existingForm.length === 0 diff --git a/apps/sim/app/api/function/execute/route.ts b/apps/sim/app/api/function/execute/route.ts index 441bf788d9a..24e992401b7 100644 --- a/apps/sim/app/api/function/execute/route.ts +++ b/apps/sim/app/api/function/execute/route.ts @@ -610,6 +610,7 @@ export async function POST(req: NextRequest) { workflowVariables = {}, workflowId, isCustomTool = false, + _sandboxFiles, } = body const executionParams = { ...params } @@ -722,6 +723,7 @@ export async function POST(req: NextRequest) { code: codeForE2B, language: CodeLanguage.JavaScript, timeoutMs: timeout, + sandboxFiles: _sandboxFiles, }) const executionTime = Date.now() - execStart stdout += e2bStdout @@ -785,6 +787,7 @@ export async function POST(req: NextRequest) { code: codeForE2B, language: CodeLanguage.Python, timeoutMs: timeout, + sandboxFiles: _sandboxFiles, }) const executionTime = Date.now() - execStart stdout += e2bStdout diff --git a/apps/sim/app/api/jobs/[jobId]/route.ts b/apps/sim/app/api/jobs/[jobId]/route.ts index 14be54facb1..cb8a43a80de 100644 --- a/apps/sim/app/api/jobs/[jobId]/route.ts +++ b/apps/sim/app/api/jobs/[jobId]/route.ts @@ -40,6 +40,14 @@ export async function GET( logger.warn(`[${requestId}] Access denied to workflow ${job.metadata.workflowId}`) return createErrorResponse('Access denied', 403) } + + if (authResult.apiKeyType === 'workspace' && authResult.workspaceId) { + const { getWorkflowById } = await import('@/lib/workflows/utils') + const workflow = await getWorkflowById(job.metadata.workflowId as string) + if (!workflow?.workspaceId || workflow.workspaceId !== authResult.workspaceId) { + return createErrorResponse('API key is not authorized for this workspace', 403) + } + } } else if (job.metadata?.userId && job.metadata.userId !== authenticatedUserId) { logger.warn(`[${requestId}] Access denied to user ${job.metadata.userId}`) return createErrorResponse('Access denied', 403) diff --git a/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/documents/route.test.ts b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/documents/route.test.ts new file mode 100644 index 00000000000..4c34f2e5ffa --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/documents/route.test.ts @@ -0,0 +1,202 @@ +/** + * @vitest-environment node + */ +import { createMockRequest } from '@sim/testing' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { mockCheckSession, mockCheckAccess, mockCheckWriteAccess, mockDbChain } = vi.hoisted(() => { + const chain = { + select: vi.fn().mockReturnThis(), + from: vi.fn().mockReturnThis(), + where: vi.fn().mockReturnThis(), + orderBy: vi.fn().mockResolvedValue([]), + limit: vi.fn().mockResolvedValue([]), + update: vi.fn().mockReturnThis(), + set: vi.fn().mockReturnThis(), + returning: vi.fn().mockResolvedValue([]), + } + return { + mockCheckSession: vi.fn(), + mockCheckAccess: vi.fn(), + mockCheckWriteAccess: vi.fn(), + mockDbChain: chain, + } +}) + +vi.mock('@sim/db', () => ({ db: mockDbChain })) +vi.mock('@sim/db/schema', () => ({ + document: { + id: 'id', + connectorId: 'connectorId', + deletedAt: 'deletedAt', + filename: 'filename', + externalId: 'externalId', + sourceUrl: 'sourceUrl', + enabled: 'enabled', + userExcluded: 'userExcluded', + uploadedAt: 'uploadedAt', + processingStatus: 'processingStatus', + }, + knowledgeConnector: { + id: 'id', + knowledgeBaseId: 'knowledgeBaseId', + deletedAt: 'deletedAt', + }, +})) +vi.mock('@/app/api/knowledge/utils', () => ({ + checkKnowledgeBaseAccess: mockCheckAccess, + checkKnowledgeBaseWriteAccess: mockCheckWriteAccess, +})) +vi.mock('@/lib/auth/hybrid', () => ({ + checkSessionOrInternalAuth: mockCheckSession, +})) +vi.mock('@/lib/core/utils/request', () => ({ + generateRequestId: vi.fn().mockReturnValue('test-req-id'), +})) + +import { GET, PATCH } from '@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route' + +describe('Connector Documents API Route', () => { + const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' }) + + beforeEach(() => { + vi.clearAllMocks() + mockDbChain.select.mockReturnThis() + mockDbChain.from.mockReturnThis() + mockDbChain.where.mockReturnThis() + mockDbChain.orderBy.mockResolvedValue([]) + mockDbChain.limit.mockResolvedValue([]) + mockDbChain.update.mockReturnThis() + mockDbChain.set.mockReturnThis() + mockDbChain.returning.mockResolvedValue([]) + }) + + describe('GET', () => { + it('returns 401 when unauthenticated', async () => { + mockCheckSession.mockResolvedValue({ success: false, userId: null }) + + const req = createMockRequest('GET') + const response = await GET(req as never, { params: mockParams }) + + expect(response.status).toBe(401) + }) + + it('returns 404 when connector not found', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([]) + + const req = createMockRequest('GET') + const response = await GET(req as never, { params: mockParams }) + + expect(response.status).toBe(404) + }) + + it('returns documents list on success', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckAccess.mockResolvedValue({ hasAccess: true }) + + const doc = { id: 'doc-1', filename: 'test.txt', userExcluded: false } + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }]) + mockDbChain.orderBy.mockResolvedValueOnce([doc]) + + const url = 'http://localhost/api/knowledge/kb-123/connectors/conn-456/documents' + const req = createMockRequest('GET', undefined, undefined, url) + Object.assign(req, { nextUrl: new URL(url) }) + const response = await GET(req as never, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.data.documents).toHaveLength(1) + expect(data.data.counts.active).toBe(1) + expect(data.data.counts.excluded).toBe(0) + }) + + it('includes excluded documents when includeExcluded=true', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckAccess.mockResolvedValue({ hasAccess: true }) + + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }]) + mockDbChain.orderBy + .mockResolvedValueOnce([{ id: 'doc-1', userExcluded: false }]) + .mockResolvedValueOnce([{ id: 'doc-2', userExcluded: true }]) + + const url = + 'http://localhost/api/knowledge/kb-123/connectors/conn-456/documents?includeExcluded=true' + const req = createMockRequest('GET', undefined, undefined, url) + Object.assign(req, { nextUrl: new URL(url) }) + const response = await GET(req as never, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.data.documents).toHaveLength(2) + expect(data.data.counts.active).toBe(1) + expect(data.data.counts.excluded).toBe(1) + }) + }) + + describe('PATCH', () => { + it('returns 401 when unauthenticated', async () => { + mockCheckSession.mockResolvedValue({ success: false, userId: null }) + + const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] }) + const response = await PATCH(req as never, { params: mockParams }) + + expect(response.status).toBe(401) + }) + + it('returns 400 for invalid body', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }]) + + const req = createMockRequest('PATCH', { documentIds: [] }) + const response = await PATCH(req as never, { params: mockParams }) + + expect(response.status).toBe(400) + }) + + it('returns 404 when connector not found', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([]) + + const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] }) + const response = await PATCH(req as never, { params: mockParams }) + + expect(response.status).toBe(404) + }) + + it('returns success for restore operation', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }]) + mockDbChain.returning.mockResolvedValueOnce([{ id: 'doc-1' }]) + + const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] }) + const response = await PATCH(req as never, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.data.restoredCount).toBe(1) + }) + + it('returns success for exclude operation', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }]) + mockDbChain.returning.mockResolvedValueOnce([{ id: 'doc-2' }, { id: 'doc-3' }]) + + const req = createMockRequest('PATCH', { + operation: 'exclude', + documentIds: ['doc-2', 'doc-3'], + }) + const response = await PATCH(req as never, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.data.excludedCount).toBe(2) + expect(data.data.documentIds).toEqual(['doc-2', 'doc-3']) + }) + }) +}) diff --git a/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/documents/route.ts b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/documents/route.ts new file mode 100644 index 00000000000..0b5e64c528d --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/documents/route.ts @@ -0,0 +1,217 @@ +import { db } from '@sim/db' +import { document, knowledgeConnector } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, inArray, isNull } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils' + +const logger = createLogger('ConnectorDocumentsAPI') + +type RouteParams = { params: Promise<{ id: string; connectorId: string }> } + +/** + * GET /api/knowledge/[id]/connectors/[connectorId]/documents + * Returns documents for a connector, optionally including user-excluded ones. + */ +export async function GET(request: NextRequest, { params }: RouteParams) { + const requestId = generateRequestId() + const { id: knowledgeBaseId, connectorId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId) + if (!accessCheck.hasAccess) { + const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const connectorRows = await db + .select({ id: knowledgeConnector.id }) + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + if (connectorRows.length === 0) { + return NextResponse.json({ error: 'Connector not found' }, { status: 404 }) + } + + const includeExcluded = request.nextUrl.searchParams.get('includeExcluded') === 'true' + + const activeDocs = await db + .select({ + id: document.id, + filename: document.filename, + externalId: document.externalId, + sourceUrl: document.sourceUrl, + enabled: document.enabled, + userExcluded: document.userExcluded, + uploadedAt: document.uploadedAt, + processingStatus: document.processingStatus, + }) + .from(document) + .where( + and( + eq(document.connectorId, connectorId), + isNull(document.archivedAt), + isNull(document.deletedAt), + eq(document.userExcluded, false) + ) + ) + .orderBy(document.filename) + + const excludedDocs = includeExcluded + ? await db + .select({ + id: document.id, + filename: document.filename, + externalId: document.externalId, + sourceUrl: document.sourceUrl, + enabled: document.enabled, + userExcluded: document.userExcluded, + uploadedAt: document.uploadedAt, + processingStatus: document.processingStatus, + }) + .from(document) + .where( + and( + eq(document.connectorId, connectorId), + eq(document.userExcluded, true), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) + .orderBy(document.filename) + : [] + + const docs = [...activeDocs, ...excludedDocs] + const activeCount = activeDocs.length + const excludedCount = excludedDocs.length + + return NextResponse.json({ + success: true, + data: { + documents: docs, + counts: { active: activeCount, excluded: excludedCount }, + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error fetching connector documents`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +const PatchSchema = z.object({ + operation: z.enum(['restore', 'exclude']), + documentIds: z.array(z.string()).min(1), +}) + +/** + * PATCH /api/knowledge/[id]/connectors/[connectorId]/documents + * Restore or exclude connector documents. + */ +export async function PATCH(request: NextRequest, { params }: RouteParams) { + const requestId = generateRequestId() + const { id: knowledgeBaseId, connectorId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) + if (!writeCheck.hasAccess) { + const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const connectorRows = await db + .select({ id: knowledgeConnector.id }) + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + if (connectorRows.length === 0) { + return NextResponse.json({ error: 'Connector not found' }, { status: 404 }) + } + + const body = await request.json() + const parsed = PatchSchema.safeParse(body) + if (!parsed.success) { + return NextResponse.json( + { error: 'Invalid request', details: parsed.error.flatten() }, + { status: 400 } + ) + } + + const { operation, documentIds } = parsed.data + + if (operation === 'restore') { + const updated = await db + .update(document) + .set({ userExcluded: false, enabled: true }) + .where( + and( + eq(document.connectorId, connectorId), + inArray(document.id, documentIds), + eq(document.userExcluded, true), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) + .returning({ id: document.id }) + + logger.info(`[${requestId}] Restored ${updated.length} excluded documents`, { connectorId }) + + return NextResponse.json({ + success: true, + data: { restoredCount: updated.length, documentIds: updated.map((d) => d.id) }, + }) + } + + const updated = await db + .update(document) + .set({ userExcluded: true, enabled: false }) + .where( + and( + eq(document.connectorId, connectorId), + inArray(document.id, documentIds), + eq(document.userExcluded, false), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) + .returning({ id: document.id }) + + logger.info(`[${requestId}] Excluded ${updated.length} documents`, { connectorId }) + + return NextResponse.json({ + success: true, + data: { excludedCount: updated.length, documentIds: updated.map((d) => d.id) }, + }) + } catch (error) { + logger.error(`[${requestId}] Error updating connector documents`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/route.test.ts b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/route.test.ts new file mode 100644 index 00000000000..c39c7866b69 --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/route.test.ts @@ -0,0 +1,230 @@ +/** + * @vitest-environment node + */ +import { createMockRequest } from '@sim/testing' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { mockCheckSession, mockCheckAccess, mockCheckWriteAccess, mockDbChain, mockValidateConfig } = + vi.hoisted(() => { + const chain = { + select: vi.fn().mockReturnThis(), + from: vi.fn().mockReturnThis(), + where: vi.fn().mockReturnThis(), + orderBy: vi.fn().mockReturnThis(), + limit: vi.fn().mockResolvedValue([]), + execute: vi.fn().mockResolvedValue(undefined), + transaction: vi.fn(), + insert: vi.fn().mockReturnThis(), + values: vi.fn().mockResolvedValue(undefined), + update: vi.fn().mockReturnThis(), + delete: vi.fn().mockReturnThis(), + set: vi.fn().mockReturnThis(), + returning: vi.fn().mockResolvedValue([]), + } + return { + mockCheckSession: vi.fn(), + mockCheckAccess: vi.fn(), + mockCheckWriteAccess: vi.fn(), + mockDbChain: chain, + mockValidateConfig: vi.fn(), + } + }) + +vi.mock('@sim/db', () => ({ db: mockDbChain })) +vi.mock('@sim/db/schema', () => ({ + document: { + id: 'id', + connectorId: 'connectorId', + fileUrl: 'fileUrl', + archivedAt: 'archivedAt', + deletedAt: 'deletedAt', + }, + embedding: { documentId: 'documentId' }, + knowledgeBase: { id: 'id', userId: 'userId' }, + knowledgeConnector: { + id: 'id', + knowledgeBaseId: 'knowledgeBaseId', + archivedAt: 'archivedAt', + deletedAt: 'deletedAt', + connectorType: 'connectorType', + credentialId: 'credentialId', + }, + knowledgeConnectorSyncLog: { connectorId: 'connectorId', startedAt: 'startedAt' }, +})) +vi.mock('@/app/api/knowledge/utils', () => ({ + checkKnowledgeBaseAccess: mockCheckAccess, + checkKnowledgeBaseWriteAccess: mockCheckWriteAccess, +})) +vi.mock('@/lib/auth/hybrid', () => ({ + checkSessionOrInternalAuth: mockCheckSession, +})) +vi.mock('@/lib/core/utils/request', () => ({ + generateRequestId: vi.fn().mockReturnValue('test-req-id'), +})) +vi.mock('@/app/api/auth/oauth/utils', () => ({ + refreshAccessTokenIfNeeded: vi.fn(), +})) +vi.mock('@/connectors/registry', () => ({ + CONNECTOR_REGISTRY: { + jira: { validateConfig: mockValidateConfig }, + }, +})) +vi.mock('@/lib/knowledge/tags/service', () => ({ + cleanupUnusedTagDefinitions: vi.fn().mockResolvedValue(undefined), +})) +vi.mock('@/lib/knowledge/documents/service', () => ({ + deleteDocumentStorageFiles: vi.fn().mockResolvedValue(undefined), +})) + +import { DELETE, GET, PATCH } from '@/app/api/knowledge/[id]/connectors/[connectorId]/route' + +describe('Knowledge Connector By ID API Route', () => { + const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' }) + + beforeEach(() => { + vi.clearAllMocks() + mockDbChain.select.mockReturnThis() + mockDbChain.from.mockReturnThis() + mockDbChain.where.mockReturnThis() + mockDbChain.orderBy.mockReturnThis() + mockDbChain.limit.mockResolvedValue([]) + mockDbChain.execute.mockResolvedValue(undefined) + mockDbChain.transaction.mockImplementation( + async (callback: (tx: typeof mockDbChain) => unknown) => callback(mockDbChain) + ) + mockDbChain.update.mockReturnThis() + mockDbChain.delete.mockReturnThis() + mockDbChain.set.mockReturnThis() + mockDbChain.returning.mockResolvedValue([]) + }) + + describe('GET', () => { + it('returns 401 when unauthenticated', async () => { + mockCheckSession.mockResolvedValue({ success: false, userId: null }) + + const req = createMockRequest('GET') + const response = await GET(req, { params: mockParams }) + + expect(response.status).toBe(401) + }) + + it('returns 404 when KB not found', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckAccess.mockResolvedValue({ hasAccess: false, notFound: true }) + + const req = createMockRequest('GET') + const response = await GET(req, { params: mockParams }) + + expect(response.status).toBe(404) + }) + + it('returns 404 when connector not found', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([]) + + const req = createMockRequest('GET') + const response = await GET(req, { params: mockParams }) + + expect(response.status).toBe(404) + }) + + it('returns connector with sync logs on success', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckAccess.mockResolvedValue({ hasAccess: true }) + + const mockConnector = { id: 'conn-456', connectorType: 'jira', status: 'active' } + const mockLogs = [{ id: 'log-1', status: 'completed' }] + + mockDbChain.limit.mockResolvedValueOnce([mockConnector]).mockResolvedValueOnce(mockLogs) + + const req = createMockRequest('GET') + const response = await GET(req, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.success).toBe(true) + expect(data.data.id).toBe('conn-456') + expect(data.data.syncLogs).toHaveLength(1) + }) + }) + + describe('PATCH', () => { + it('returns 401 when unauthenticated', async () => { + mockCheckSession.mockResolvedValue({ success: false, userId: null }) + + const req = createMockRequest('PATCH', { status: 'paused' }) + const response = await PATCH(req, { params: mockParams }) + + expect(response.status).toBe(401) + }) + + it('returns 400 for invalid body', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + + const req = createMockRequest('PATCH', { syncIntervalMinutes: 'not a number' }) + const response = await PATCH(req, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(400) + expect(data.error).toBe('Invalid request') + }) + + it('returns 404 when connector not found during sourceConfig validation', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([]) + + const req = createMockRequest('PATCH', { sourceConfig: { project: 'NEW' } }) + const response = await PATCH(req, { params: mockParams }) + + expect(response.status).toBe(404) + }) + + it('returns 200 and updates status', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + + const updatedConnector = { id: 'conn-456', status: 'paused', syncIntervalMinutes: 120 } + mockDbChain.limit.mockResolvedValueOnce([updatedConnector]) + + const req = createMockRequest('PATCH', { status: 'paused', syncIntervalMinutes: 120 }) + const response = await PATCH(req, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.success).toBe(true) + expect(data.data.status).toBe('paused') + }) + }) + + describe('DELETE', () => { + it('returns 401 when unauthenticated', async () => { + mockCheckSession.mockResolvedValue({ success: false, userId: null }) + + const req = createMockRequest('DELETE') + const response = await DELETE(req, { params: mockParams }) + + expect(response.status).toBe(401) + }) + + it('returns 200 on successful hard-delete', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.where + .mockReturnValueOnce(mockDbChain) + .mockResolvedValueOnce([{ id: 'doc-1', fileUrl: '/api/uploads/test.txt' }]) + .mockReturnValueOnce(mockDbChain) + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }]) + mockDbChain.returning.mockResolvedValueOnce([{ id: 'conn-456' }]) + + const req = createMockRequest('DELETE') + const response = await DELETE(req, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.success).toBe(true) + }) + }) +}) diff --git a/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/route.ts b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/route.ts new file mode 100644 index 00000000000..cfdca60afa0 --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/route.ts @@ -0,0 +1,331 @@ +import { db } from '@sim/db' +import { + document, + embedding, + knowledgeBase, + knowledgeConnector, + knowledgeConnectorSyncLog, +} from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, desc, eq, inArray, isNull, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { decryptApiKey } from '@/lib/api-key/crypto' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { deleteDocumentStorageFiles } from '@/lib/knowledge/documents/service' +import { cleanupUnusedTagDefinitions } from '@/lib/knowledge/tags/service' +import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils' +import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils' +import { CONNECTOR_REGISTRY } from '@/connectors/registry' + +const logger = createLogger('KnowledgeConnectorByIdAPI') + +type RouteParams = { params: Promise<{ id: string; connectorId: string }> } + +const UpdateConnectorSchema = z.object({ + sourceConfig: z.record(z.unknown()).optional(), + syncIntervalMinutes: z.number().int().min(0).optional(), + status: z.enum(['active', 'paused']).optional(), +}) + +/** + * GET /api/knowledge/[id]/connectors/[connectorId] - Get connector details with recent sync logs + */ +export async function GET(request: NextRequest, { params }: RouteParams) { + const requestId = generateRequestId() + const { id: knowledgeBaseId, connectorId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId) + if (!accessCheck.hasAccess) { + const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const connectorRows = await db + .select() + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + if (connectorRows.length === 0) { + return NextResponse.json({ error: 'Connector not found' }, { status: 404 }) + } + + const syncLogs = await db + .select() + .from(knowledgeConnectorSyncLog) + .where(eq(knowledgeConnectorSyncLog.connectorId, connectorId)) + .orderBy(desc(knowledgeConnectorSyncLog.startedAt)) + .limit(10) + + const { encryptedApiKey: _, ...connectorData } = connectorRows[0] + return NextResponse.json({ + success: true, + data: { + ...connectorData, + syncLogs, + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error fetching connector`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +/** + * PATCH /api/knowledge/[id]/connectors/[connectorId] - Update a connector + */ +export async function PATCH(request: NextRequest, { params }: RouteParams) { + const requestId = generateRequestId() + const { id: knowledgeBaseId, connectorId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) + if (!writeCheck.hasAccess) { + const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const body = await request.json() + const parsed = UpdateConnectorSchema.safeParse(body) + if (!parsed.success) { + return NextResponse.json( + { error: 'Invalid request', details: parsed.error.flatten() }, + { status: 400 } + ) + } + + if (parsed.data.sourceConfig !== undefined) { + const existingRows = await db + .select() + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + if (existingRows.length === 0) { + return NextResponse.json({ error: 'Connector not found' }, { status: 404 }) + } + + const existing = existingRows[0] + const connectorConfig = CONNECTOR_REGISTRY[existing.connectorType] + + if (!connectorConfig) { + return NextResponse.json( + { error: `Unknown connector type: ${existing.connectorType}` }, + { status: 400 } + ) + } + + const kbRows = await db + .select({ userId: knowledgeBase.userId }) + .from(knowledgeBase) + .where(eq(knowledgeBase.id, knowledgeBaseId)) + .limit(1) + + if (kbRows.length === 0) { + return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 }) + } + + let accessToken: string | null = null + if (connectorConfig.auth.mode === 'apiKey') { + if (!existing.encryptedApiKey) { + return NextResponse.json( + { error: 'API key not found. Please reconfigure the connector.' }, + { status: 400 } + ) + } + accessToken = (await decryptApiKey(existing.encryptedApiKey)).decrypted + } else { + if (!existing.credentialId) { + return NextResponse.json( + { error: 'OAuth credential not found. Please reconfigure the connector.' }, + { status: 400 } + ) + } + accessToken = await refreshAccessTokenIfNeeded( + existing.credentialId, + kbRows[0].userId, + `patch-${connectorId}` + ) + } + + if (!accessToken) { + return NextResponse.json( + { error: 'Failed to refresh access token. Please reconnect your account.' }, + { status: 401 } + ) + } + + const validation = await connectorConfig.validateConfig(accessToken, parsed.data.sourceConfig) + if (!validation.valid) { + return NextResponse.json( + { error: validation.error || 'Invalid source configuration' }, + { status: 400 } + ) + } + } + + const updates: Record = { updatedAt: new Date() } + if (parsed.data.sourceConfig !== undefined) { + updates.sourceConfig = parsed.data.sourceConfig + } + if (parsed.data.syncIntervalMinutes !== undefined) { + updates.syncIntervalMinutes = parsed.data.syncIntervalMinutes + if (parsed.data.syncIntervalMinutes > 0) { + updates.nextSyncAt = new Date(Date.now() + parsed.data.syncIntervalMinutes * 60 * 1000) + } else { + updates.nextSyncAt = null + } + } + if (parsed.data.status !== undefined) { + updates.status = parsed.data.status + } + + await db + .update(knowledgeConnector) + .set(updates) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + + const updated = await db + .select() + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + const { encryptedApiKey: __, ...updatedData } = updated[0] + return NextResponse.json({ success: true, data: updatedData }) + } catch (error) { + logger.error(`[${requestId}] Error updating connector`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +/** + * DELETE /api/knowledge/[id]/connectors/[connectorId] - Hard-delete a connector + */ +export async function DELETE(request: NextRequest, { params }: RouteParams) { + const requestId = generateRequestId() + const { id: knowledgeBaseId, connectorId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) + if (!writeCheck.hasAccess) { + const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const existingConnector = await db + .select({ id: knowledgeConnector.id }) + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + if (existingConnector.length === 0) { + return NextResponse.json({ error: 'Connector not found' }, { status: 404 }) + } + + const connectorDocuments = await db.transaction(async (tx) => { + await tx.execute(sql`SELECT 1 FROM knowledge_connector WHERE id = ${connectorId} FOR UPDATE`) + + const docs = await tx + .select({ id: document.id, fileUrl: document.fileUrl }) + .from(document) + .where( + and( + eq(document.connectorId, connectorId), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) + + const documentIds = docs.map((doc) => doc.id) + if (documentIds.length > 0) { + await tx.delete(embedding).where(inArray(embedding.documentId, documentIds)) + await tx.delete(document).where(inArray(document.id, documentIds)) + } + + const deletedConnectors = await tx + .delete(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .returning({ id: knowledgeConnector.id }) + + if (deletedConnectors.length === 0) { + throw new Error('Connector not found') + } + + return docs + }) + + await deleteDocumentStorageFiles(connectorDocuments, requestId) + + await cleanupUnusedTagDefinitions(knowledgeBaseId, requestId).catch((error) => { + logger.warn(`[${requestId}] Failed to cleanup tag definitions`, error) + }) + + logger.info(`[${requestId}] Hard-deleted connector ${connectorId} and its documents`) + + return NextResponse.json({ success: true }) + } catch (error) { + logger.error(`[${requestId}] Error deleting connector`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/sync/route.test.ts b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/sync/route.test.ts new file mode 100644 index 00000000000..12a873a6f43 --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/sync/route.test.ts @@ -0,0 +1,107 @@ +/** + * @vitest-environment node + */ +import { createMockRequest } from '@sim/testing' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { mockCheckSession, mockCheckWriteAccess, mockDispatchSync, mockDbChain } = vi.hoisted(() => { + const chain = { + select: vi.fn().mockReturnThis(), + from: vi.fn().mockReturnThis(), + where: vi.fn().mockReturnThis(), + orderBy: vi.fn().mockResolvedValue([]), + limit: vi.fn().mockResolvedValue([]), + update: vi.fn().mockReturnThis(), + set: vi.fn().mockReturnThis(), + } + return { + mockCheckSession: vi.fn(), + mockCheckWriteAccess: vi.fn(), + mockDispatchSync: vi.fn().mockResolvedValue(undefined), + mockDbChain: chain, + } +}) + +vi.mock('@sim/db', () => ({ db: mockDbChain })) +vi.mock('@sim/db/schema', () => ({ + knowledgeConnector: { + id: 'id', + knowledgeBaseId: 'knowledgeBaseId', + deletedAt: 'deletedAt', + status: 'status', + }, +})) +vi.mock('@/app/api/knowledge/utils', () => ({ + checkKnowledgeBaseWriteAccess: mockCheckWriteAccess, +})) +vi.mock('@/lib/auth/hybrid', () => ({ + checkSessionOrInternalAuth: mockCheckSession, +})) +vi.mock('@/lib/core/utils/request', () => ({ + generateRequestId: vi.fn().mockReturnValue('test-req-id'), +})) +vi.mock('@/lib/knowledge/connectors/sync-engine', () => ({ + dispatchSync: mockDispatchSync, +})) + +import { POST } from '@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route' + +describe('Connector Manual Sync API Route', () => { + const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' }) + + beforeEach(() => { + vi.clearAllMocks() + mockDbChain.select.mockReturnThis() + mockDbChain.from.mockReturnThis() + mockDbChain.where.mockReturnThis() + mockDbChain.orderBy.mockResolvedValue([]) + mockDbChain.limit.mockResolvedValue([]) + mockDbChain.update.mockReturnThis() + mockDbChain.set.mockReturnThis() + }) + + it('returns 401 when unauthenticated', async () => { + mockCheckSession.mockResolvedValue({ success: false, userId: null }) + + const req = createMockRequest('POST') + const response = await POST(req as never, { params: mockParams }) + + expect(response.status).toBe(401) + }) + + it('returns 404 when connector not found', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([]) + + const req = createMockRequest('POST') + const response = await POST(req as never, { params: mockParams }) + + expect(response.status).toBe(404) + }) + + it('returns 409 when connector is syncing', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456', status: 'syncing' }]) + + const req = createMockRequest('POST') + const response = await POST(req as never, { params: mockParams }) + + expect(response.status).toBe(409) + }) + + it('dispatches sync on valid request', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456', status: 'active' }]) + + const req = createMockRequest('POST') + const response = await POST(req as never, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.success).toBe(true) + expect(mockDispatchSync).toHaveBeenCalledWith('conn-456', { requestId: 'test-req-id' }) + }) +}) diff --git a/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/sync/route.ts b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/sync/route.ts new file mode 100644 index 00000000000..e6aae66eb27 --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/sync/route.ts @@ -0,0 +1,72 @@ +import { db } from '@sim/db' +import { knowledgeConnector } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, isNull } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine' +import { checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils' + +const logger = createLogger('ConnectorManualSyncAPI') + +type RouteParams = { params: Promise<{ id: string; connectorId: string }> } + +/** + * POST /api/knowledge/[id]/connectors/[connectorId]/sync - Trigger a manual sync + */ +export async function POST(request: NextRequest, { params }: RouteParams) { + const requestId = generateRequestId() + const { id: knowledgeBaseId, connectorId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) + if (!writeCheck.hasAccess) { + const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const connectorRows = await db + .select() + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + if (connectorRows.length === 0) { + return NextResponse.json({ error: 'Connector not found' }, { status: 404 }) + } + + if (connectorRows[0].status === 'syncing') { + return NextResponse.json({ error: 'Sync already in progress' }, { status: 409 }) + } + + logger.info(`[${requestId}] Manual sync triggered for connector ${connectorId}`) + + dispatchSync(connectorId, { requestId }).catch((error) => { + logger.error( + `[${requestId}] Failed to dispatch manual sync for connector ${connectorId}`, + error + ) + }) + + return NextResponse.json({ + success: true, + message: 'Sync triggered', + }) + } catch (error) { + logger.error(`[${requestId}] Error triggering manual sync`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/knowledge/[id]/connectors/route.ts b/apps/sim/app/api/knowledge/[id]/connectors/route.ts new file mode 100644 index 00000000000..c28cea60e46 --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/route.ts @@ -0,0 +1,251 @@ +import { db } from '@sim/db' +import { knowledgeBase, knowledgeBaseTagDefinitions, knowledgeConnector } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, desc, eq, isNull, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { encryptApiKey } from '@/lib/api-key/crypto' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine' +import { allocateTagSlots } from '@/lib/knowledge/constants' +import { createTagDefinition } from '@/lib/knowledge/tags/service' +import { getCredential } from '@/app/api/auth/oauth/utils' +import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils' +import { CONNECTOR_REGISTRY } from '@/connectors/registry' + +const logger = createLogger('KnowledgeConnectorsAPI') + +const CreateConnectorSchema = z.object({ + connectorType: z.string().min(1), + credentialId: z.string().min(1).optional(), + apiKey: z.string().min(1).optional(), + sourceConfig: z.record(z.unknown()), + syncIntervalMinutes: z.number().int().min(0).default(1440), +}) + +/** + * GET /api/knowledge/[id]/connectors - List connectors for a knowledge base + */ +export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const requestId = generateRequestId() + const { id: knowledgeBaseId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId) + if (!accessCheck.hasAccess) { + const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const connectors = await db + .select() + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .orderBy(desc(knowledgeConnector.createdAt)) + + return NextResponse.json({ + success: true, + data: connectors.map(({ encryptedApiKey: _, ...rest }) => rest), + }) + } catch (error) { + logger.error(`[${requestId}] Error listing connectors`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +/** + * POST /api/knowledge/[id]/connectors - Create a new connector + */ +export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const requestId = generateRequestId() + const { id: knowledgeBaseId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) + if (!writeCheck.hasAccess) { + const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const body = await request.json() + const parsed = CreateConnectorSchema.safeParse(body) + if (!parsed.success) { + return NextResponse.json( + { error: 'Invalid request', details: parsed.error.flatten() }, + { status: 400 } + ) + } + + const { connectorType, credentialId, apiKey, sourceConfig, syncIntervalMinutes } = parsed.data + + const connectorConfig = CONNECTOR_REGISTRY[connectorType] + if (!connectorConfig) { + return NextResponse.json( + { error: `Unknown connector type: ${connectorType}` }, + { status: 400 } + ) + } + + let resolvedCredentialId: string | null = null + let resolvedEncryptedApiKey: string | null = null + let accessToken: string + + if (connectorConfig.auth.mode === 'apiKey') { + if (!apiKey) { + return NextResponse.json({ error: 'API key is required' }, { status: 400 }) + } + accessToken = apiKey + } else { + if (!credentialId) { + return NextResponse.json({ error: 'Credential is required' }, { status: 400 }) + } + + const credential = await getCredential(requestId, credentialId, auth.userId) + if (!credential) { + return NextResponse.json({ error: 'Credential not found' }, { status: 400 }) + } + + if (!credential.accessToken) { + return NextResponse.json( + { error: 'Credential has no access token. Please reconnect your account.' }, + { status: 400 } + ) + } + + accessToken = credential.accessToken + resolvedCredentialId = credentialId + } + + const validation = await connectorConfig.validateConfig(accessToken, sourceConfig) + if (!validation.valid) { + return NextResponse.json( + { error: validation.error || 'Invalid source configuration' }, + { status: 400 } + ) + } + + let finalSourceConfig: Record = { ...sourceConfig } + + if (connectorConfig.auth.mode === 'apiKey' && apiKey) { + const { encrypted } = await encryptApiKey(apiKey) + resolvedEncryptedApiKey = encrypted + } + + const tagSlotMapping: Record = {} + + if (connectorConfig.tagDefinitions?.length) { + const disabledIds = new Set((sourceConfig.disabledTagIds as string[] | undefined) ?? []) + const enabledDefs = connectorConfig.tagDefinitions.filter((td) => !disabledIds.has(td.id)) + + const existingDefs = await db + .select({ tagSlot: knowledgeBaseTagDefinitions.tagSlot }) + .from(knowledgeBaseTagDefinitions) + .where(eq(knowledgeBaseTagDefinitions.knowledgeBaseId, knowledgeBaseId)) + + const usedSlots = new Set(existingDefs.map((d) => d.tagSlot)) + const { mapping, skipped: skippedTags } = allocateTagSlots(enabledDefs, usedSlots) + Object.assign(tagSlotMapping, mapping) + + for (const name of skippedTags) { + logger.warn(`[${requestId}] No available slots for "${name}"`) + } + + if (skippedTags.length > 0 && Object.keys(tagSlotMapping).length === 0) { + return NextResponse.json( + { error: `No available tag slots. Could not assign: ${skippedTags.join(', ')}` }, + { status: 422 } + ) + } + + finalSourceConfig = { ...finalSourceConfig, tagSlotMapping } + } + + const now = new Date() + const connectorId = crypto.randomUUID() + const nextSyncAt = + syncIntervalMinutes > 0 ? new Date(now.getTime() + syncIntervalMinutes * 60 * 1000) : null + + await db.transaction(async (tx) => { + await tx.execute(sql`SELECT 1 FROM knowledge_base WHERE id = ${knowledgeBaseId} FOR UPDATE`) + + const activeKb = await tx + .select({ id: knowledgeBase.id }) + .from(knowledgeBase) + .where(and(eq(knowledgeBase.id, knowledgeBaseId), isNull(knowledgeBase.deletedAt))) + .limit(1) + + if (activeKb.length === 0) { + throw new Error('Knowledge base not found') + } + + for (const [semanticId, slot] of Object.entries(tagSlotMapping)) { + const td = connectorConfig.tagDefinitions!.find((d) => d.id === semanticId)! + await createTagDefinition( + { + knowledgeBaseId, + tagSlot: slot, + displayName: td.displayName, + fieldType: td.fieldType, + }, + requestId, + tx + ) + } + + await tx.insert(knowledgeConnector).values({ + id: connectorId, + knowledgeBaseId, + connectorType, + credentialId: resolvedCredentialId, + encryptedApiKey: resolvedEncryptedApiKey, + sourceConfig: finalSourceConfig, + syncIntervalMinutes, + status: 'active', + nextSyncAt, + createdAt: now, + updatedAt: now, + }) + }) + + logger.info(`[${requestId}] Created connector ${connectorId} for KB ${knowledgeBaseId}`) + + dispatchSync(connectorId, { requestId }).catch((error) => { + logger.error( + `[${requestId}] Failed to dispatch initial sync for connector ${connectorId}`, + error + ) + }) + + const created = await db + .select() + .from(knowledgeConnector) + .where(eq(knowledgeConnector.id, connectorId)) + .limit(1) + + const { encryptedApiKey: _, ...createdData } = created[0] + return NextResponse.json({ success: true, data: createdData }, { status: 201 }) + } catch (error) { + if (error instanceof Error && error.message === 'Knowledge base not found') { + return NextResponse.json({ error: 'Not found' }, { status: 404 }) + } + logger.error(`[${requestId}] Error creating connector`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/[chunkId]/route.ts b/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/[chunkId]/route.ts index 08c02d508b6..aae50974041 100644 --- a/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/[chunkId]/route.ts +++ b/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/[chunkId]/route.ts @@ -95,6 +95,16 @@ export async function PUT( return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } + if (accessCheck.document?.connectorId) { + logger.warn( + `[${requestId}] User ${session.user.id} attempted to update chunk on connector-synced document: Doc=${documentId}` + ) + return NextResponse.json( + { error: 'Chunks from connector-synced documents are read-only' }, + { status: 403 } + ) + } + const body = await req.json() try { @@ -167,6 +177,16 @@ export async function DELETE( return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } + if (accessCheck.document?.connectorId) { + logger.warn( + `[${requestId}] User ${session.user.id} attempted to delete chunk on connector-synced document: Doc=${documentId}` + ) + return NextResponse.json( + { error: 'Chunks from connector-synced documents are read-only' }, + { status: 403 } + ) + } + await deleteChunk(chunkId, documentId, requestId) logger.info( diff --git a/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/route.ts b/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/route.ts index c7979d41b00..762f9be66cf 100644 --- a/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/route.ts +++ b/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/route.ts @@ -158,6 +158,16 @@ export async function POST( return NextResponse.json({ error: 'Document not found' }, { status: 404 }) } + if (doc.connectorId) { + logger.warn( + `[${requestId}] User ${userId} attempted to create chunk on connector-synced document: Doc=${documentId}` + ) + return NextResponse.json( + { error: 'Chunks from connector-synced documents are read-only' }, + { status: 403 } + ) + } + // Allow manual chunk creation even if document is not fully processed // but it should exist and not be in failed state if (doc.processingStatus === 'failed') { @@ -283,6 +293,16 @@ export async function PATCH( return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } + if (accessCheck.document?.connectorId) { + logger.warn( + `[${requestId}] User ${userId} attempted batch chunk operation on connector-synced document: Doc=${documentId}` + ) + return NextResponse.json( + { error: 'Chunks from connector-synced documents are read-only' }, + { status: 403 } + ) + } + const body = await req.json() try { diff --git a/apps/sim/app/api/knowledge/[id]/documents/route.ts b/apps/sim/app/api/knowledge/[id]/documents/route.ts index d8ac7324cff..18f7af35ac2 100644 --- a/apps/sim/app/api/knowledge/[id]/documents/route.ts +++ b/apps/sim/app/api/knowledge/[id]/documents/route.ts @@ -13,6 +13,7 @@ import { getDocuments, getProcessingConfig, processDocumentsWithQueue, + type TagFilterCondition, } from '@/lib/knowledge/documents/service' import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' @@ -131,6 +132,21 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id: ? (sortOrderParam as SortOrder) : undefined + let tagFilters: TagFilterCondition[] | undefined + const tagFiltersParam = url.searchParams.get('tagFilters') + if (tagFiltersParam) { + try { + const parsed = JSON.parse(tagFiltersParam) + if (Array.isArray(parsed)) { + tagFilters = parsed.filter( + (f: TagFilterCondition) => f.tagSlot && f.operator && f.value !== undefined + ) + } + } catch { + logger.warn(`[${requestId}] Invalid tagFilters param`) + } + } + const result = await getDocuments( knowledgeBaseId, { @@ -140,6 +156,7 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id: offset, ...(sortBy && { sortBy }), ...(sortOrder && { sortOrder }), + tagFilters, }, requestId ) @@ -351,8 +368,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const errorMessage = error instanceof Error ? error.message : 'Failed to create document' const isStorageLimitError = errorMessage.includes('Storage limit exceeded') || errorMessage.includes('storage limit') + const isMissingKnowledgeBase = errorMessage === 'Knowledge base not found' - return NextResponse.json({ error: errorMessage }, { status: isStorageLimitError ? 413 : 500 }) + return NextResponse.json( + { error: errorMessage }, + { status: isMissingKnowledgeBase ? 404 : isStorageLimitError ? 413 : 500 } + ) } } diff --git a/apps/sim/app/api/knowledge/[id]/next-available-slot/route.ts b/apps/sim/app/api/knowledge/[id]/next-available-slot/route.ts index b328b7d5b63..54318d6f600 100644 --- a/apps/sim/app/api/knowledge/[id]/next-available-slot/route.ts +++ b/apps/sim/app/api/knowledge/[id]/next-available-slot/route.ts @@ -30,7 +30,10 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id: const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id) if (!accessCheck.hasAccess) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + return NextResponse.json( + { error: accessCheck.notFound ? 'Not found' : 'Forbidden' }, + { status: accessCheck.notFound ? 404 : 403 } + ) } // Get existing definitions once and reuse diff --git a/apps/sim/app/api/knowledge/[id]/restore/route.ts b/apps/sim/app/api/knowledge/[id]/restore/route.ts new file mode 100644 index 00000000000..d2021685320 --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/restore/route.ts @@ -0,0 +1,58 @@ +import { db } from '@sim/db' +import { knowledgeBase } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { restoreKnowledgeBase } from '@/lib/knowledge/service' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('RestoreKnowledgeBaseAPI') + +export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const requestId = generateRequestId() + const { id } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [kb] = await db + .select({ + id: knowledgeBase.id, + workspaceId: knowledgeBase.workspaceId, + userId: knowledgeBase.userId, + }) + .from(knowledgeBase) + .where(eq(knowledgeBase.id, id)) + .limit(1) + + if (!kb) { + return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 }) + } + + if (kb.workspaceId) { + const permission = await getUserEntityPermissions(auth.userId, 'workspace', kb.workspaceId) + if (permission !== 'admin' && permission !== 'write') { + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + } else if (kb.userId !== auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + await restoreKnowledgeBase(id, requestId) + + logger.info(`[${requestId}] Restored knowledge base ${id}`) + + return NextResponse.json({ success: true }) + } catch (error) { + logger.error(`[${requestId}] Error restoring knowledge base ${id}`, error) + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/knowledge/[id]/tag-definitions/[tagId]/route.ts b/apps/sim/app/api/knowledge/[id]/tag-definitions/[tagId]/route.ts index a141461ec09..08b56be3e24 100644 --- a/apps/sim/app/api/knowledge/[id]/tag-definitions/[tagId]/route.ts +++ b/apps/sim/app/api/knowledge/[id]/tag-definitions/[tagId]/route.ts @@ -1,9 +1,9 @@ import { randomUUID } from 'crypto' import { createLogger } from '@sim/logger' import { type NextRequest, NextResponse } from 'next/server' -import { getSession } from '@/lib/auth' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { deleteTagDefinition } from '@/lib/knowledge/tags/service' -import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils' +import { checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils' export const dynamic = 'force-dynamic' @@ -22,17 +22,20 @@ export async function DELETE( `[${requestId}] Deleting tag definition ${tagId} from knowledge base ${knowledgeBaseId}` ) - const session = await getSession() - if (!session?.user?.id) { + const auth = await checkSessionOrInternalAuth(req, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } - const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id) + const accessCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) if (!accessCheck.hasAccess) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + return NextResponse.json( + { error: accessCheck.notFound ? 'Not found' : 'Forbidden' }, + { status: accessCheck.notFound ? 404 : 403 } + ) } - const deletedTag = await deleteTagDefinition(tagId, requestId) + const deletedTag = await deleteTagDefinition(knowledgeBaseId, tagId, requestId) return NextResponse.json({ success: true, diff --git a/apps/sim/app/api/knowledge/[id]/tag-definitions/route.ts b/apps/sim/app/api/knowledge/[id]/tag-definitions/route.ts index ad1c7f4dddd..f4e75b0f13c 100644 --- a/apps/sim/app/api/knowledge/[id]/tag-definitions/route.ts +++ b/apps/sim/app/api/knowledge/[id]/tag-definitions/route.ts @@ -5,7 +5,7 @@ import { z } from 'zod' import { AuthType, checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { SUPPORTED_FIELD_TYPES } from '@/lib/knowledge/constants' import { createTagDefinition, getTagDefinitions } from '@/lib/knowledge/tags/service' -import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils' +import { checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils' export const dynamic = 'force-dynamic' @@ -26,9 +26,12 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id: // For session auth, verify KB access. Internal JWT is trusted. if (auth.authType === AuthType.SESSION && auth.userId) { - const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId) + const accessCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) if (!accessCheck.hasAccess) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + return NextResponse.json( + { error: accessCheck.notFound ? 'Not found' : 'Forbidden' }, + { status: accessCheck.notFound ? 404 : 403 } + ) } } @@ -63,9 +66,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: // For session auth, verify KB access. Internal JWT is trusted. if (auth.authType === AuthType.SESSION && auth.userId) { - const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId) + const accessCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) if (!accessCheck.hasAccess) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + return NextResponse.json( + { error: accessCheck.notFound ? 'Not found' : 'Forbidden' }, + { status: accessCheck.notFound ? 404 : 403 } + ) } } diff --git a/apps/sim/app/api/knowledge/[id]/tag-usage/route.ts b/apps/sim/app/api/knowledge/[id]/tag-usage/route.ts index 788ae897583..8b311143ffb 100644 --- a/apps/sim/app/api/knowledge/[id]/tag-usage/route.ts +++ b/apps/sim/app/api/knowledge/[id]/tag-usage/route.ts @@ -24,7 +24,10 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id: const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id) if (!accessCheck.hasAccess) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + return NextResponse.json( + { error: accessCheck.notFound ? 'Not found' : 'Forbidden' }, + { status: accessCheck.notFound ? 404 : 403 } + ) } const usageStats = await getTagUsage(knowledgeBaseId, requestId) diff --git a/apps/sim/app/api/knowledge/connectors/sync/route.ts b/apps/sim/app/api/knowledge/connectors/sync/route.ts new file mode 100644 index 00000000000..dfddc72e445 --- /dev/null +++ b/apps/sim/app/api/knowledge/connectors/sync/route.ts @@ -0,0 +1,71 @@ +import { db } from '@sim/db' +import { knowledgeBase, knowledgeConnector } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, inArray, isNull, lte } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { verifyCronAuth } from '@/lib/auth/internal' +import { generateRequestId } from '@/lib/core/utils/request' +import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('ConnectorSyncSchedulerAPI') + +/** + * Cron endpoint that checks for connectors due for sync and dispatches sync jobs. + * Should be called every 5 minutes by an external cron service. + */ +export async function GET(request: NextRequest) { + const requestId = generateRequestId() + logger.info(`[${requestId}] Connector sync scheduler triggered`) + + const authError = verifyCronAuth(request, 'Connector sync scheduler') + if (authError) { + return authError + } + + try { + const now = new Date() + + const dueConnectors = await db + .select({ + id: knowledgeConnector.id, + }) + .from(knowledgeConnector) + .innerJoin(knowledgeBase, eq(knowledgeConnector.knowledgeBaseId, knowledgeBase.id)) + .where( + and( + inArray(knowledgeConnector.status, ['active', 'error']), + lte(knowledgeConnector.nextSyncAt, now), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt), + isNull(knowledgeBase.deletedAt) + ) + ) + + logger.info(`[${requestId}] Found ${dueConnectors.length} connectors due for sync`) + + if (dueConnectors.length === 0) { + return NextResponse.json({ + success: true, + message: 'No connectors due for sync', + count: 0, + }) + } + + for (const connector of dueConnectors) { + dispatchSync(connector.id, { requestId }).catch((error) => { + logger.error(`[${requestId}] Failed to dispatch sync for connector ${connector.id}`, error) + }) + } + + return NextResponse.json({ + success: true, + message: `Dispatched ${dueConnectors.length} connector sync(s)`, + count: dueConnectors.length, + }) + } catch (error) { + logger.error(`[${requestId}] Connector sync scheduler error`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/knowledge/route.ts b/apps/sim/app/api/knowledge/route.ts index f266d90d8da..d6a80bab115 100644 --- a/apps/sim/app/api/knowledge/route.ts +++ b/apps/sim/app/api/knowledge/route.ts @@ -5,7 +5,11 @@ import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { PlatformEvents } from '@/lib/core/telemetry' import { generateRequestId } from '@/lib/core/utils/request' -import { createKnowledgeBase, getKnowledgeBases } from '@/lib/knowledge/service' +import { + createKnowledgeBase, + getKnowledgeBases, + type KnowledgeBaseScope, +} from '@/lib/knowledge/service' const logger = createLogger('KnowledgeBaseAPI') @@ -61,8 +65,12 @@ export async function GET(req: NextRequest) { const { searchParams } = new URL(req.url) const workspaceId = searchParams.get('workspaceId') + const scope = (searchParams.get('scope') ?? 'active') as KnowledgeBaseScope + if (!['active', 'archived', 'all'].includes(scope)) { + return NextResponse.json({ error: 'Invalid scope' }, { status: 400 }) + } - const knowledgeBasesWithCounts = await getKnowledgeBases(session.user.id, workspaceId) + const knowledgeBasesWithCounts = await getKnowledgeBases(session.user.id, workspaceId, scope) return NextResponse.json({ success: true, diff --git a/apps/sim/app/api/knowledge/search/route.test.ts b/apps/sim/app/api/knowledge/search/route.test.ts index d736edc44e9..30027bca10b 100644 --- a/apps/sim/app/api/knowledge/search/route.test.ts +++ b/apps/sim/app/api/knowledge/search/route.test.ts @@ -5,12 +5,7 @@ * * @vitest-environment node */ -import { - createEnvMock, - createMockRequest, - mockKnowledgeSchemas, - requestUtilsMock, -} from '@sim/testing' +import { createEnvMock, createMockRequest, requestUtilsMock } from '@sim/testing' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' const { @@ -61,7 +56,74 @@ vi.mock('drizzle-orm', () => ({ })), })) -mockKnowledgeSchemas() +vi.mock('@sim/db/schema', () => ({ + knowledgeBase: { + id: 'kb_id', + userId: 'user_id', + name: 'kb_name', + description: 'description', + tokenCount: 'token_count', + embeddingModel: 'embedding_model', + embeddingDimension: 'embedding_dimension', + chunkingConfig: 'chunking_config', + workspaceId: 'workspace_id', + createdAt: 'created_at', + updatedAt: 'updated_at', + deletedAt: 'deleted_at', + }, + document: { + id: 'doc_id', + knowledgeBaseId: 'kb_id', + filename: 'filename', + fileUrl: 'file_url', + fileSize: 'file_size', + mimeType: 'mime_type', + chunkCount: 'chunk_count', + tokenCount: 'token_count', + characterCount: 'character_count', + processingStatus: 'processing_status', + processingStartedAt: 'processing_started_at', + processingCompletedAt: 'processing_completed_at', + processingError: 'processing_error', + enabled: 'enabled', + tag1: 'tag1', + tag2: 'tag2', + tag3: 'tag3', + tag4: 'tag4', + tag5: 'tag5', + tag6: 'tag6', + tag7: 'tag7', + uploadedAt: 'uploaded_at', + deletedAt: 'deleted_at', + }, + embedding: { + id: 'embedding_id', + documentId: 'doc_id', + knowledgeBaseId: 'kb_id', + chunkIndex: 'chunk_index', + content: 'content', + embedding: 'embedding', + tokenCount: 'token_count', + characterCount: 'character_count', + tag1: 'tag1', + tag2: 'tag2', + tag3: 'tag3', + tag4: 'tag4', + tag5: 'tag5', + tag6: 'tag6', + tag7: 'tag7', + createdAt: 'created_at', + }, + permissions: { + id: 'permission_id', + userId: 'user_id', + entityType: 'entity_type', + entityId: 'entity_id', + permissionType: 'permission_type', + createdAt: 'created_at', + updatedAt: 'updated_at', + }, +})) vi.mock('@sim/db', () => ({ db: mockDbChain, diff --git a/apps/sim/app/api/knowledge/search/route.ts b/apps/sim/app/api/knowledge/search/route.ts index 686f7c19cc1..348a60ec71d 100644 --- a/apps/sim/app/api/knowledge/search/route.ts +++ b/apps/sim/app/api/knowledge/search/route.ts @@ -28,7 +28,7 @@ const logger = createLogger('VectorSearchAPI') const StructuredTagFilterSchema = z.object({ tagName: z.string(), tagSlot: z.string().optional(), - fieldType: z.enum(['text', 'number', 'date', 'boolean']).default('text'), + fieldType: z.enum(['text', 'number', 'date', 'boolean']).optional(), operator: z.string().default('eq'), value: z.union([z.string(), z.number(), z.boolean()]), valueTo: z.union([z.string(), z.number()]).optional(), @@ -117,17 +117,56 @@ export async function POST(request: NextRequest) { // Handle tag filters if (validatedData.tagFilters && accessibleKbIds.length > 0) { - const kbId = accessibleKbIds[0] - const tagDefs = await getDocumentTagDefinitions(kbId) + const kbTagDefs = await Promise.all( + accessibleKbIds.map(async (kbId) => ({ + kbId, + tagDefs: await getDocumentTagDefinitions(kbId), + })) + ) - // Create mapping from display name to tag slot and fieldType const displayNameToTagDef: Record = {} - tagDefs.forEach((def) => { - displayNameToTagDef[def.displayName] = { - tagSlot: def.tagSlot, - fieldType: def.fieldType, + for (const { kbId, tagDefs } of kbTagDefs) { + const perKbMap = new Map( + tagDefs.map((def) => [ + def.displayName, + { tagSlot: def.tagSlot, fieldType: def.fieldType }, + ]) + ) + + for (const filter of validatedData.tagFilters) { + const current = perKbMap.get(filter.tagName) + if (!current) { + if (accessibleKbIds.length > 1) { + return NextResponse.json( + { + error: `Tag "${filter.tagName}" does not exist in all selected knowledge bases. Search those knowledge bases separately.`, + }, + { status: 400 } + ) + } + continue + } + + const existing = displayNameToTagDef[filter.tagName] + if ( + existing && + (existing.tagSlot !== current.tagSlot || existing.fieldType !== current.fieldType) + ) { + return NextResponse.json( + { + error: `Tag "${filter.tagName}" is not mapped consistently across the selected knowledge bases. Search those knowledge bases separately.`, + }, + { status: 400 } + ) + } + + displayNameToTagDef[filter.tagName] = current } - }) + + logger.debug(`[${requestId}] Loaded tag definitions for KB ${kbId}`, { + tagCount: tagDefs.length, + }) + } // Validate all tag filters first const undefinedTags: string[] = [] @@ -171,8 +210,8 @@ export async function POST(request: NextRequest) { // Build structured filters with validated data structuredFilters = validatedData.tagFilters.map((filter) => { const tagDef = displayNameToTagDef[filter.tagName]! - const tagSlot = filter.tagSlot || tagDef.tagSlot - const fieldType = filter.fieldType || tagDef.fieldType + const tagSlot = tagDef.tagSlot + const fieldType = tagDef.fieldType logger.debug( `[${requestId}] Structured filter: ${filter.tagName} -> ${tagSlot} (${fieldType}) ${filter.operator} ${filter.value}` @@ -212,6 +251,28 @@ export async function POST(request: NextRequest) { ) } + if (workflowId) { + const authorization = await authorizeWorkflowByWorkspacePermission({ + workflowId, + userId, + action: 'read', + }) + const workflowWorkspaceId = authorization.workflow?.workspaceId ?? null + if ( + workflowWorkspaceId && + accessChecks.some( + (accessCheck) => + accessCheck?.hasAccess && + accessCheck.knowledgeBase?.workspaceId !== workflowWorkspaceId + ) + ) { + return NextResponse.json( + { error: 'Knowledge base does not belong to the workflow workspace' }, + { status: 400 } + ) + } + } + let results: SearchResult[] const hasFilters = structuredFilters && structuredFilters.length > 0 diff --git a/apps/sim/app/api/knowledge/search/utils.ts b/apps/sim/app/api/knowledge/search/utils.ts index dc112fe24ab..8ca7e7c438a 100644 --- a/apps/sim/app/api/knowledge/search/utils.ts +++ b/apps/sim/app/api/knowledge/search/utils.ts @@ -17,7 +17,14 @@ export async function getDocumentNamesByIds( filename: document.filename, }) .from(document) - .where(and(inArray(document.id, uniqueIds), isNull(document.deletedAt))) + .where( + and( + inArray(document.id, uniqueIds), + eq(document.userExcluded, false), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) const documentNameMap: Record = {} documents.forEach((doc) => { @@ -313,6 +320,10 @@ async function executeTagFilterQuery( and( eq(embedding.knowledgeBaseId, knowledgeBaseIds[0]), eq(embedding.enabled, true), + eq(document.enabled, true), + eq(document.processingStatus, 'completed'), + eq(document.userExcluded, false), + isNull(document.archivedAt), isNull(document.deletedAt), ...tagFilterConditions ) @@ -326,6 +337,10 @@ async function executeTagFilterQuery( and( inArray(embedding.knowledgeBaseId, knowledgeBaseIds), eq(embedding.enabled, true), + eq(document.enabled, true), + eq(document.processingStatus, 'completed'), + eq(document.userExcluded, false), + isNull(document.archivedAt), isNull(document.deletedAt), ...tagFilterConditions ) @@ -353,6 +368,10 @@ async function executeVectorSearchOnIds( .where( and( inArray(embedding.id, embeddingIds), + eq(document.enabled, true), + eq(document.processingStatus, 'completed'), + eq(document.userExcluded, false), + isNull(document.archivedAt), isNull(document.deletedAt), sql`${embedding.embedding} <=> ${queryVector}::vector < ${distanceThreshold}` ) @@ -384,6 +403,10 @@ export async function handleTagOnlySearch(params: SearchParams): Promise ${queryVector}::vector < ${distanceThreshold}` ) @@ -455,6 +486,10 @@ export async function handleVectorOnlySearch(params: SearchParams): Promise ${queryVector}::vector < ${distanceThreshold}` ) diff --git a/apps/sim/app/api/knowledge/utils.test.ts b/apps/sim/app/api/knowledge/utils.test.ts index 0e8debe701a..7e87035a08f 100644 --- a/apps/sim/app/api/knowledge/utils.test.ts +++ b/apps/sim/app/api/knowledge/utils.test.ts @@ -144,6 +144,18 @@ vi.mock('@sim/db', () => { }), transaction: vi.fn(async (fn: any) => { await fn({ + select: () => ({ + from: () => ({ + innerJoin: () => ({ + where: () => ({ + limit: () => Promise.resolve([{ id: 'doc1' }]), + }), + }), + where: () => ({ + limit: () => Promise.resolve([{}]), + }), + }), + }), delete: () => ({ where: () => Promise.resolve(), }), diff --git a/apps/sim/app/api/knowledge/utils.ts b/apps/sim/app/api/knowledge/utils.ts index 7a2f82d071c..60042ccccf1 100644 --- a/apps/sim/app/api/knowledge/utils.ts +++ b/apps/sim/app/api/knowledge/utils.ts @@ -56,6 +56,10 @@ export interface DocumentData { boolean1?: boolean | null boolean2?: boolean | null boolean3?: boolean | null + // Connector fields + connectorId?: string | null + sourceUrl?: string | null + externalId?: string | null } export interface EmbeddingData { @@ -283,9 +287,21 @@ export async function checkDocumentWriteAccess( boolean1: document.boolean1, boolean2: document.boolean2, boolean3: document.boolean3, + // Connector fields + connectorId: document.connectorId, + sourceUrl: document.sourceUrl, + externalId: document.externalId, }) .from(document) - .where(and(eq(document.id, documentId), isNull(document.deletedAt))) + .where( + and( + eq(document.id, documentId), + eq(document.knowledgeBaseId, knowledgeBaseId), + eq(document.userExcluded, false), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) .limit(1) if (doc.length === 0) { @@ -325,6 +341,8 @@ export async function checkDocumentAccess( and( eq(document.id, documentId), eq(document.knowledgeBaseId, knowledgeBaseId), + eq(document.userExcluded, false), + isNull(document.archivedAt), isNull(document.deletedAt) ) ) @@ -368,6 +386,8 @@ export async function checkChunkAccess( and( eq(document.id, documentId), eq(document.knowledgeBaseId, knowledgeBaseId), + eq(document.userExcluded, false), + isNull(document.archivedAt), isNull(document.deletedAt) ) ) diff --git a/apps/sim/app/api/logs/[id]/route.ts b/apps/sim/app/api/logs/[id]/route.ts index 179655ae53f..494c2504157 100644 --- a/apps/sim/app/api/logs/[id]/route.ts +++ b/apps/sim/app/api/logs/[id]/route.ts @@ -1,5 +1,6 @@ import { db } from '@sim/db' import { + jobExecutionLogs, permissions, workflow, workflowDeploymentVersion, @@ -74,8 +75,64 @@ export async function GET(_request: NextRequest, { params }: { params: Promise<{ .limit(1) const log = rows[0] + + // Fallback: check job_execution_logs if (!log) { - return NextResponse.json({ error: 'Not found' }, { status: 404 }) + const jobRows = await db + .select({ + id: jobExecutionLogs.id, + executionId: jobExecutionLogs.executionId, + level: jobExecutionLogs.level, + status: jobExecutionLogs.status, + trigger: jobExecutionLogs.trigger, + startedAt: jobExecutionLogs.startedAt, + endedAt: jobExecutionLogs.endedAt, + totalDurationMs: jobExecutionLogs.totalDurationMs, + executionData: jobExecutionLogs.executionData, + cost: jobExecutionLogs.cost, + createdAt: jobExecutionLogs.createdAt, + }) + .from(jobExecutionLogs) + .innerJoin( + permissions, + and( + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, jobExecutionLogs.workspaceId), + eq(permissions.userId, userId) + ) + ) + .where(eq(jobExecutionLogs.id, id)) + .limit(1) + + const jobLog = jobRows[0] + if (!jobLog) { + return NextResponse.json({ error: 'Not found' }, { status: 404 }) + } + + const execData = jobLog.executionData as Record | null + const response = { + id: jobLog.id, + workflowId: null, + executionId: jobLog.executionId, + deploymentVersionId: null, + deploymentVersion: null, + deploymentVersionName: null, + level: jobLog.level, + status: jobLog.status, + duration: jobLog.totalDurationMs ? `${jobLog.totalDurationMs}ms` : null, + trigger: jobLog.trigger, + createdAt: jobLog.startedAt.toISOString(), + workflow: null, + jobTitle: (execData?.trigger?.source as string) || null, + executionData: { + totalDuration: jobLog.totalDurationMs, + ...execData, + enhanced: true, + }, + cost: jobLog.cost as any, + } + + return NextResponse.json({ data: response }) } const workflowSummary = log.workflowId diff --git a/apps/sim/app/api/logs/execution/[executionId]/route.ts b/apps/sim/app/api/logs/execution/[executionId]/route.ts index 90e0747b001..4e6495b4df9 100644 --- a/apps/sim/app/api/logs/execution/[executionId]/route.ts +++ b/apps/sim/app/api/logs/execution/[executionId]/route.ts @@ -1,5 +1,6 @@ import { db } from '@sim/db' import { + jobExecutionLogs, permissions, workflow, workflowExecutionLogs, @@ -60,9 +61,49 @@ export async function GET( .where(eq(workflowExecutionLogs.executionId, executionId)) .limit(1) + // Fallback: check job_execution_logs if (!workflowLog) { - logger.warn(`[${requestId}] Execution not found or access denied: ${executionId}`) - return NextResponse.json({ error: 'Workflow execution not found' }, { status: 404 }) + const [jobLog] = await db + .select({ + id: jobExecutionLogs.id, + executionId: jobExecutionLogs.executionId, + trigger: jobExecutionLogs.trigger, + startedAt: jobExecutionLogs.startedAt, + endedAt: jobExecutionLogs.endedAt, + totalDurationMs: jobExecutionLogs.totalDurationMs, + cost: jobExecutionLogs.cost, + executionData: jobExecutionLogs.executionData, + }) + .from(jobExecutionLogs) + .innerJoin( + permissions, + and( + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, jobExecutionLogs.workspaceId), + eq(permissions.userId, authenticatedUserId) + ) + ) + .where(eq(jobExecutionLogs.executionId, executionId)) + .limit(1) + + if (!jobLog) { + logger.warn(`[${requestId}] Execution not found or access denied: ${executionId}`) + return NextResponse.json({ error: 'Workflow execution not found' }, { status: 404 }) + } + + return NextResponse.json({ + executionId, + workflowId: null, + workflowState: null, + childWorkflowSnapshots: {}, + executionMetadata: { + trigger: jobLog.trigger, + startedAt: jobLog.startedAt.toISOString(), + endedAt: jobLog.endedAt?.toISOString(), + totalDurationMs: jobLog.totalDurationMs, + cost: jobLog.cost || null, + }, + }) } const [snapshot] = await db diff --git a/apps/sim/app/api/logs/route.ts b/apps/sim/app/api/logs/route.ts index e7080c9877e..f6f631415fb 100644 --- a/apps/sim/app/api/logs/route.ts +++ b/apps/sim/app/api/logs/route.ts @@ -1,5 +1,6 @@ import { db } from '@sim/db' import { + jobExecutionLogs, pausedExecutions, permissions, workflow, @@ -7,7 +8,22 @@ import { workflowExecutionLogs, } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, desc, eq, isNotNull, isNull, or, type SQL, sql } from 'drizzle-orm' +import { + and, + desc, + eq, + gt, + gte, + inArray, + isNotNull, + isNull, + lt, + lte, + ne, + or, + type SQL, + sql, +} from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { getSession } from '@/lib/auth' @@ -177,13 +193,29 @@ export async function GET(request: NextRequest) { conditions = and(conditions, commonFilters) } - const logs = await baseQuery + // Workflow-specific filters exclude job logs entirely + const hasWorkflowSpecificFilters = !!( + params.workflowIds || + params.folderIds || + params.workflowName || + params.folderName + ) + // If triggers filter is set and doesn't include 'mothership', skip job logs + const triggersList = params.triggers?.split(',').filter(Boolean) || [] + const triggersExcludeJobs = + triggersList.length > 0 && + !triggersList.includes('all') && + !triggersList.includes('mothership') + const includeJobLogs = !hasWorkflowSpecificFilters && !triggersExcludeJobs + + const fetchSize = params.limit + params.offset + + const workflowLogs = await baseQuery .where(and(workspaceFilter, conditions)) .orderBy(desc(workflowExecutionLogs.startedAt)) - .limit(params.limit) - .offset(params.offset) + .limit(fetchSize) - const countQuery = db + const workflowCountQuery = db .select({ count: sql`count(*)` }) .from(workflowExecutionLogs) .leftJoin( @@ -201,10 +233,141 @@ export async function GET(request: NextRequest) { ) .where(and(eq(workflowExecutionLogs.workspaceId, params.workspaceId), conditions)) - const countResult = await countQuery + // Build job log filters (subset of filters that apply to job logs) + let jobLogs: Array<{ + id: string + executionId: string + level: string + status: string + trigger: string + startedAt: Date + endedAt: Date | null + totalDurationMs: number | null + executionData: unknown + cost: unknown + createdAt: Date + jobTitle: string | null + }> = [] + let jobCount = 0 + + if (includeJobLogs) { + const jobConditions: SQL[] = [eq(jobExecutionLogs.workspaceId, params.workspaceId)] + + // Permission check + jobConditions.push( + sql`EXISTS (SELECT 1 FROM ${permissions} WHERE ${permissions.entityType} = 'workspace' AND ${permissions.entityId} = ${jobExecutionLogs.workspaceId} AND ${permissions.userId} = ${userId})` + ) - const count = countResult[0]?.count || 0 + // Level filter + if (params.level && params.level !== 'all') { + const levels = params.level.split(',').filter(Boolean) + const jobLevelConditions: SQL[] = [] + for (const level of levels) { + if (level === 'error') { + jobLevelConditions.push(eq(jobExecutionLogs.level, 'error')) + } else if (level === 'info') { + const c = and(eq(jobExecutionLogs.level, 'info'), isNotNull(jobExecutionLogs.endedAt)) + if (c) jobLevelConditions.push(c) + } + // 'running' and 'pending' don't apply to job logs (they complete synchronously) + } + if (jobLevelConditions.length > 0) { + jobConditions.push( + jobLevelConditions.length === 1 ? jobLevelConditions[0] : or(...jobLevelConditions)! + ) + } + } + // Trigger filter + if (triggersList.length > 0 && !triggersList.includes('all')) { + jobConditions.push(inArray(jobExecutionLogs.trigger, triggersList)) + } + + // Date filters + if (params.startDate) { + jobConditions.push(gte(jobExecutionLogs.startedAt, new Date(params.startDate))) + } + if (params.endDate) { + jobConditions.push(lte(jobExecutionLogs.startedAt, new Date(params.endDate))) + } + + // Search by executionId + if (params.search) { + jobConditions.push(sql`${jobExecutionLogs.executionId} ILIKE ${`%${params.search}%`}`) + } + if (params.executionId) { + jobConditions.push(eq(jobExecutionLogs.executionId, params.executionId)) + } + + // Cost filter + if (params.costOperator && params.costValue !== undefined) { + const costField = sql`(${jobExecutionLogs.cost}->>'total')::numeric` + const ops = { + '=': sql`=`, + '>': sql`>`, + '<': sql`<`, + '>=': sql`>=`, + '<=': sql`<=`, + '!=': sql`!=`, + } as const + jobConditions.push(sql`${costField} ${ops[params.costOperator]} ${params.costValue}`) + } + + // Duration filter + if (params.durationOperator && params.durationValue !== undefined) { + const durationOps: Record< + string, + (field: typeof jobExecutionLogs.totalDurationMs, val: number) => SQL | undefined + > = { + '=': (f, v) => eq(f, v), + '>': (f, v) => gt(f, v), + '<': (f, v) => lt(f, v), + '>=': (f, v) => gte(f, v), + '<=': (f, v) => lte(f, v), + '!=': (f, v) => ne(f, v), + } + const durationCond = durationOps[params.durationOperator]?.( + jobExecutionLogs.totalDurationMs, + params.durationValue + ) + if (durationCond) jobConditions.push(durationCond) + } + + const jobWhere = and(...jobConditions) + + const [jobLogResults, jobCountResult] = await Promise.all([ + db + .select({ + id: jobExecutionLogs.id, + executionId: jobExecutionLogs.executionId, + level: jobExecutionLogs.level, + status: jobExecutionLogs.status, + trigger: jobExecutionLogs.trigger, + startedAt: jobExecutionLogs.startedAt, + endedAt: jobExecutionLogs.endedAt, + totalDurationMs: jobExecutionLogs.totalDurationMs, + executionData: + params.details === 'full' ? jobExecutionLogs.executionData : sql`NULL`, + cost: jobExecutionLogs.cost, + createdAt: jobExecutionLogs.createdAt, + jobTitle: sql`${jobExecutionLogs.executionData}->'trigger'->>'source'`, + }) + .from(jobExecutionLogs) + .where(jobWhere) + .orderBy(desc(jobExecutionLogs.startedAt)) + .limit(fetchSize), + db.select({ count: sql`count(*)` }).from(jobExecutionLogs).where(jobWhere), + ]) + + jobLogs = jobLogResults as typeof jobLogs + jobCount = Number(jobCountResult[0]?.count || 0) + } + + const workflowCountResult = await workflowCountQuery + const workflowCount = Number(workflowCountResult[0]?.count || 0) + const totalCount = workflowCount + jobCount + + // Transform workflow logs to the unified shape const blockExecutionsByExecution: Record = {} const createTraceSpans = (blockExecutions: any[]) => { @@ -289,7 +452,7 @@ export async function GET(request: NextRequest) { } } - const enhancedLogs = logs.map((log) => { + const transformedWorkflowLogs = workflowLogs.map((log) => { const blockExecutions = blockExecutionsByExecution[log.executionId] || [] let traceSpans = [] @@ -367,13 +530,60 @@ export async function GET(request: NextRequest) { (log.pausedStatus && log.pausedStatus !== 'fully_resumed'), } }) + + // Transform job logs to the same shape + const transformedJobLogs = jobLogs.map((log) => { + const execData = log.executionData as any + const costSummary = (log.cost as any) || { total: 0 } + + return { + id: log.id, + workflowId: null as string | null, + executionId: log.executionId, + deploymentVersionId: null as string | null, + deploymentVersion: null as number | null, + deploymentVersionName: null as string | null, + level: log.level, + status: log.status, + duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null, + trigger: log.trigger, + createdAt: log.startedAt.toISOString(), + files: undefined as any, + workflow: null as any, + jobTitle: log.jobTitle, + pauseSummary: { + status: null as string | null, + total: 0, + resumed: 0, + }, + executionData: + params.details === 'full' && execData + ? { + totalDuration: log.totalDurationMs, + traceSpans: execData.traceSpans || [], + blockExecutions: [], + finalOutput: execData.finalOutput, + enhanced: true, + trigger: execData.trigger, + } + : undefined, + cost: params.details === 'full' ? costSummary : { total: costSummary?.total || 0 }, + hasPendingPause: false, + } + }) + + // Merge, sort by createdAt (which is startedAt ISO string) desc, paginate + const allLogs = [...transformedWorkflowLogs, ...transformedJobLogs] + .sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime()) + .slice(params.offset, params.offset + params.limit) + return NextResponse.json( { - data: enhancedLogs, - total: Number(count), + data: allLogs, + total: totalCount, page: Math.floor(params.offset / params.limit) + 1, pageSize: params.limit, - totalPages: Math.ceil(Number(count) / params.limit), + totalPages: Math.ceil(totalCount / params.limit), }, { status: 200 } ) diff --git a/apps/sim/app/api/mcp/copilot/route.ts b/apps/sim/app/api/mcp/copilot/route.ts index 8fdb166ae45..f3c9551aa39 100644 --- a/apps/sim/app/api/mcp/copilot/route.ts +++ b/apps/sim/app/api/mcp/copilot/route.ts @@ -18,12 +18,7 @@ import { eq, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { validateOAuthAccessToken } from '@/lib/auth/oauth-token' import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription' -import { - ORCHESTRATION_TIMEOUT_MS, - SIM_AGENT_API_URL, - SIM_AGENT_VERSION, -} from '@/lib/copilot/constants' -import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator' +import { ORCHESTRATION_TIMEOUT_MS, SIM_AGENT_API_URL } from '@/lib/copilot/constants' import { orchestrateSubagentStream } from '@/lib/copilot/orchestrator/subagent' import { executeToolServerSide, @@ -33,10 +28,6 @@ import { DIRECT_TOOL_DEFS, SUBAGENT_TOOL_DEFS } from '@/lib/copilot/tools/mcp/de import { env } from '@/lib/core/config/env' import { RateLimiter } from '@/lib/core/rate-limiter' import { getBaseUrl } from '@/lib/core/utils/urls' -import { - authorizeWorkflowByWorkspacePermission, - resolveWorkflowIdForUser, -} from '@/lib/workflows/utils' const logger = createLogger('CopilotMcpAPI') const mcpRateLimiter = new RateLimiter() @@ -669,112 +660,12 @@ async function handleDirectToolCall( } } -/** - * Build mode uses the main chat orchestrator with the 'fast' command instead of - * the subagent endpoint. In Go, 'build' is not a registered subagent — it's a mode - * (ModeFast) on the main chat processor that bypasses subagent orchestration and - * executes all tools directly. - */ -async function handleBuildToolCall( - args: Record, - userId: string, - abortSignal?: AbortSignal -): Promise { - try { - const requestText = (args.request as string) || JSON.stringify(args) - const workflowId = args.workflowId as string | undefined - - const resolved = workflowId - ? await (async () => { - const authorization = await authorizeWorkflowByWorkspacePermission({ - workflowId, - userId, - action: 'read', - }) - return authorization.allowed ? { workflowId } : null - })() - : await resolveWorkflowIdForUser(userId) - - if (!resolved?.workflowId) { - return { - content: [ - { - type: 'text', - text: JSON.stringify( - { - success: false, - error: 'workflowId is required for build. Call create_workflow first.', - }, - null, - 2 - ), - }, - ], - isError: true, - } - } - - const chatId = randomUUID() - - const requestPayload = { - message: requestText, - workflowId: resolved.workflowId, - userId, - model: DEFAULT_COPILOT_MODEL, - mode: 'agent', - commands: ['fast'], - messageId: randomUUID(), - version: SIM_AGENT_VERSION, - headless: true, - chatId, - source: 'mcp', - } - - const result = await orchestrateCopilotStream(requestPayload, { - userId, - workflowId: resolved.workflowId, - chatId, - autoExecuteTools: true, - timeout: 300000, - interactive: false, - abortSignal, - }) - - const responseData = { - success: result.success, - content: result.content, - toolCalls: result.toolCalls, - error: result.error, - } - - return { - content: [{ type: 'text', text: JSON.stringify(responseData, null, 2) }], - isError: !result.success, - } - } catch (error) { - logger.error('Build tool call failed', { error }) - return { - content: [ - { - type: 'text', - text: `Build failed: ${error instanceof Error ? error.message : String(error)}`, - }, - ], - isError: true, - } - } -} - async function handleSubagentToolCall( toolDef: (typeof SUBAGENT_TOOL_DEFS)[number], args: Record, userId: string, abortSignal?: AbortSignal ): Promise { - if (toolDef.agentId === 'build') { - return handleBuildToolCall(args, userId, abortSignal) - } - try { const requestText = (args.request as string) || diff --git a/apps/sim/app/api/mcp/discover/route.ts b/apps/sim/app/api/mcp/discover/route.ts index 600e9362f6a..c386c304cc7 100644 --- a/apps/sim/app/api/mcp/discover/route.ts +++ b/apps/sim/app/api/mcp/discover/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { permissions, workflowMcpServer, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq, sql } from 'drizzle-orm' +import { and, eq, isNull, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { checkHybridAuth } from '@/lib/auth/hybrid' import { getBaseUrl } from '@/lib/core/utils/urls' @@ -26,12 +26,31 @@ export async function GET(request: NextRequest) { const userId = auth.userId + if (auth.apiKeyType === 'workspace' && !auth.workspaceId) { + return NextResponse.json( + { success: false, error: 'Workspace API key missing workspace scope' }, + { status: 403 } + ) + } + const userWorkspacePermissions = await db .select({ entityId: permissions.entityId }) .from(permissions) - .where(and(eq(permissions.userId, userId), eq(permissions.entityType, 'workspace'))) + .innerJoin(workspace, eq(permissions.entityId, workspace.id)) + .where( + and( + eq(permissions.userId, userId), + eq(permissions.entityType, 'workspace'), + isNull(workspace.archivedAt) + ) + ) - const workspaceIds = userWorkspacePermissions.map((w) => w.entityId) + const workspaceIds = + auth.apiKeyType === 'workspace' && auth.workspaceId + ? userWorkspacePermissions + .map((w) => w.entityId) + .filter((workspaceId) => workspaceId === auth.workspaceId) + : userWorkspacePermissions.map((w) => w.entityId) if (workspaceIds.length === 0) { return NextResponse.json({ success: true, servers: [] }) @@ -49,11 +68,18 @@ export async function GET(request: NextRequest) { SELECT COUNT(*)::int FROM "workflow_mcp_tool" WHERE "workflow_mcp_tool"."server_id" = "workflow_mcp_server"."id" + AND "workflow_mcp_tool"."archived_at" IS NULL )`.as('tool_count'), }) .from(workflowMcpServer) .leftJoin(workspace, eq(workflowMcpServer.workspaceId, workspace.id)) - .where(sql`${workflowMcpServer.workspaceId} IN ${workspaceIds}`) + .where( + and( + sql`${workflowMcpServer.workspaceId} IN ${workspaceIds}`, + isNull(workflowMcpServer.deletedAt), + isNull(workspace.archivedAt) + ) + ) .orderBy(workflowMcpServer.name) const baseUrl = getBaseUrl() diff --git a/apps/sim/app/api/mcp/events/route.test.ts b/apps/sim/app/api/mcp/events/route.test.ts index 2d5fd7bdedd..ca9be354ce7 100644 --- a/apps/sim/app/api/mcp/events/route.test.ts +++ b/apps/sim/app/api/mcp/events/route.test.ts @@ -19,6 +19,37 @@ vi.mock('@/lib/workspaces/permissions/utils', () => ({ getUserEntityPermissions: mockGetUserEntityPermissions, })) +vi.mock('@/lib/events/sse-endpoint', () => ({ + createWorkspaceSSE: (_config: any) => { + return async (request: any) => { + const session = await mockGetSession() + if (!session?.user?.id) { + return new Response('Unauthorized', { status: 401 }) + } + const url = new URL(request.url) + const workspaceId = url.searchParams.get('workspaceId') + if (!workspaceId) { + return new Response('Missing workspaceId query parameter', { status: 400 }) + } + const permissions = await mockGetUserEntityPermissions( + session.user.id, + 'workspace', + workspaceId + ) + if (!permissions) { + return new Response('Access denied to workspace', { status: 403 }) + } + return new Response(new ReadableStream({ start() {} }), { + headers: { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + Connection: 'keep-alive', + }, + }) + } + }, +})) + vi.mock('@/lib/mcp/connection-manager', () => ({ mcpConnectionManager: null, })) diff --git a/apps/sim/app/api/mcp/events/route.ts b/apps/sim/app/api/mcp/events/route.ts index fee4ca65fb5..61c0f4c82a0 100644 --- a/apps/sim/app/api/mcp/events/route.ts +++ b/apps/sim/app/api/mcp/events/route.ts @@ -8,66 +8,19 @@ * Auth is handled via session cookies (EventSource sends cookies automatically). */ -import { createLogger } from '@sim/logger' -import type { NextRequest } from 'next/server' -import { getSession } from '@/lib/auth' -import { SSE_HEADERS } from '@/lib/core/utils/sse' +import { createWorkspaceSSE } from '@/lib/events/sse-endpoint' import { mcpConnectionManager } from '@/lib/mcp/connection-manager' import { mcpPubSub } from '@/lib/mcp/pubsub' -import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' - -const logger = createLogger('McpEventsSSE') export const dynamic = 'force-dynamic' -const HEARTBEAT_INTERVAL_MS = 30_000 - -export async function GET(request: NextRequest) { - const session = await getSession() - if (!session?.user?.id) { - return new Response('Unauthorized', { status: 401 }) - } - - const { searchParams } = new URL(request.url) - const workspaceId = searchParams.get('workspaceId') - if (!workspaceId) { - return new Response('Missing workspaceId query parameter', { status: 400 }) - } - - const permissions = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) - if (!permissions) { - return new Response('Access denied to workspace', { status: 403 }) - } - - const encoder = new TextEncoder() - const unsubscribers: Array<() => void> = [] - let cleaned = false - - const cleanup = () => { - if (cleaned) return - cleaned = true - for (const unsub of unsubscribers) { - unsub() - } - logger.info(`SSE connection closed for workspace ${workspaceId}`) - } - - const stream = new ReadableStream({ - start(controller) { - const send = (eventName: string, data: Record) => { - if (cleaned) return - try { - controller.enqueue( - encoder.encode(`event: ${eventName}\ndata: ${JSON.stringify(data)}\n\n`) - ) - } catch { - // Stream already closed - } - } - - // Subscribe to external MCP server tool changes - if (mcpConnectionManager) { - const unsub = mcpConnectionManager.subscribe((event) => { +export const GET = createWorkspaceSSE({ + label: 'mcp-events', + subscriptions: [ + { + subscribe: (workspaceId, send) => { + if (!mcpConnectionManager) return () => {} + return mcpConnectionManager.subscribe((event) => { if (event.workspaceId !== workspaceId) return send('tools_changed', { source: 'external', @@ -75,12 +28,12 @@ export async function GET(request: NextRequest) { timestamp: event.timestamp, }) }) - unsubscribers.push(unsub) - } - - // Subscribe to workflow CRUD tool changes - if (mcpPubSub) { - const unsub = mcpPubSub.onWorkflowToolsChanged((event) => { + }, + }, + { + subscribe: (workspaceId, send) => { + if (!mcpPubSub) return () => {} + return mcpPubSub.onWorkflowToolsChanged((event) => { if (event.workspaceId !== workspaceId) return send('tools_changed', { source: 'workflow', @@ -88,43 +41,7 @@ export async function GET(request: NextRequest) { timestamp: Date.now(), }) }) - unsubscribers.push(unsub) - } - - // Heartbeat to keep the connection alive - const heartbeat = setInterval(() => { - if (cleaned) { - clearInterval(heartbeat) - return - } - try { - controller.enqueue(encoder.encode(': heartbeat\n\n')) - } catch { - clearInterval(heartbeat) - } - }, HEARTBEAT_INTERVAL_MS) - unsubscribers.push(() => clearInterval(heartbeat)) - - // Cleanup when client disconnects - request.signal.addEventListener( - 'abort', - () => { - cleanup() - try { - controller.close() - } catch { - // Already closed - } - }, - { once: true } - ) - - logger.info(`SSE connection opened for workspace ${workspaceId}`) - }, - cancel() { - cleanup() + }, }, - }) - - return new Response(stream, { headers: SSE_HEADERS }) -} + ], +}) diff --git a/apps/sim/app/api/mcp/serve/[serverId]/route.test.ts b/apps/sim/app/api/mcp/serve/[serverId]/route.test.ts index 77dd1adebf6..6113799bda7 100644 --- a/apps/sim/app/api/mcp/serve/[serverId]/route.test.ts +++ b/apps/sim/app/api/mcp/serve/[serverId]/route.test.ts @@ -12,6 +12,7 @@ const { mockGenerateInternalToken, mockDbSelect, mockDbFrom, + mockDbInnerJoin, mockDbWhere, mockDbLimit, fetchMock, @@ -21,6 +22,7 @@ const { mockGenerateInternalToken: vi.fn(), mockDbSelect: vi.fn(), mockDbFrom: vi.fn(), + mockDbInnerJoin: vi.fn(), mockDbWhere: vi.fn(), mockDbLimit: vi.fn(), fetchMock: vi.fn(), @@ -29,6 +31,7 @@ const { vi.mock('drizzle-orm', () => ({ and: vi.fn(), eq: vi.fn(), + isNull: vi.fn(), })) vi.mock('@sim/db', () => ({ @@ -44,6 +47,7 @@ vi.mock('@sim/db/schema', () => ({ workspaceId: 'workspaceId', isPublic: 'isPublic', createdBy: 'createdBy', + deletedAt: 'deletedAt', }, workflowMcpTool: { serverId: 'serverId', @@ -51,10 +55,16 @@ vi.mock('@sim/db/schema', () => ({ toolDescription: 'toolDescription', parameterSchema: 'parameterSchema', workflowId: 'workflowId', + archivedAt: 'archivedAt', }, workflow: { id: 'id', isDeployed: 'isDeployed', + archivedAt: 'archivedAt', + }, + workspace: { + id: 'id', + archivedAt: 'archivedAt', }, })) @@ -89,7 +99,8 @@ describe('MCP Serve Route', () => { vi.clearAllMocks() mockDbSelect.mockReturnValue({ from: mockDbFrom }) - mockDbFrom.mockReturnValue({ where: mockDbWhere }) + mockDbFrom.mockReturnValue({ innerJoin: mockDbInnerJoin, where: mockDbWhere }) + mockDbInnerJoin.mockReturnValue({ where: mockDbWhere }) mockDbWhere.mockReturnValue({ limit: mockDbLimit }) vi.stubGlobal('fetch', fetchMock) diff --git a/apps/sim/app/api/mcp/serve/[serverId]/route.ts b/apps/sim/app/api/mcp/serve/[serverId]/route.ts index 29c70120978..0be8778bc53 100644 --- a/apps/sim/app/api/mcp/serve/[serverId]/route.ts +++ b/apps/sim/app/api/mcp/serve/[serverId]/route.ts @@ -15,9 +15,9 @@ import { type RequestId, } from '@modelcontextprotocol/sdk/types.js' import { db } from '@sim/db' -import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema' +import { workflow, workflowMcpServer, workflowMcpTool, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { type AuthResult, AuthType, checkHybridAuth } from '@/lib/auth/hybrid' import { generateInternalToken } from '@/lib/auth/internal' @@ -66,7 +66,14 @@ async function getServer(serverId: string) { createdBy: workflowMcpServer.createdBy, }) .from(workflowMcpServer) - .where(eq(workflowMcpServer.id, serverId)) + .innerJoin(workspace, eq(workflowMcpServer.workspaceId, workspace.id)) + .where( + and( + eq(workflowMcpServer.id, serverId), + isNull(workflowMcpServer.deletedAt), + isNull(workspace.archivedAt) + ) + ) .limit(1) return server @@ -87,6 +94,10 @@ export async function GET(request: NextRequest, { params }: { params: Promise { @@ -262,7 +277,13 @@ async function handleToolsCall( workflowId: workflowMcpTool.workflowId, }) .from(workflowMcpTool) - .where(and(eq(workflowMcpTool.serverId, serverId), eq(workflowMcpTool.toolName, params.name))) + .where( + and( + eq(workflowMcpTool.serverId, serverId), + eq(workflowMcpTool.toolName, params.name), + isNull(workflowMcpTool.archivedAt) + ) + ) .limit(1) if (!tool) { return NextResponse.json( @@ -276,7 +297,7 @@ async function handleToolsCall( const [wf] = await db .select({ isDeployed: workflow.isDeployed }) .from(workflow) - .where(eq(workflow.id, tool.workflowId)) + .where(and(eq(workflow.id, tool.workflowId), isNull(workflow.archivedAt))) .limit(1) if (!wf?.isDeployed) { diff --git a/apps/sim/app/api/mcp/servers/[id]/route.ts b/apps/sim/app/api/mcp/servers/[id]/route.ts index 19c2609ab5f..597244a9703 100644 --- a/apps/sim/app/api/mcp/servers/[id]/route.ts +++ b/apps/sim/app/api/mcp/servers/[id]/route.ts @@ -82,11 +82,16 @@ export const PATCH = withMcpAuth<{ id: string }>('write')( ) } - // Only clear cache if URL changed (requires re-discovery) - const urlChanged = body.url && currentServer?.url !== body.url - if (urlChanged) { + const shouldClearCache = + (body.url !== undefined && currentServer?.url !== body.url) || + body.enabled !== undefined || + body.headers !== undefined || + body.timeout !== undefined || + body.retries !== undefined + + if (shouldClearCache) { await mcpService.clearCache(workspaceId) - logger.info(`[${requestId}] Cleared cache due to URL change`) + logger.info(`[${requestId}] Cleared MCP cache after server lifecycle update`) } logger.info(`[${requestId}] Successfully updated MCP server: ${serverId}`) diff --git a/apps/sim/app/api/mcp/workflow-servers/[id]/route.ts b/apps/sim/app/api/mcp/workflow-servers/[id]/route.ts index 4890dbc8f48..f5ed5371e19 100644 --- a/apps/sim/app/api/mcp/workflow-servers/[id]/route.ts +++ b/apps/sim/app/api/mcp/workflow-servers/[id]/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware' @@ -39,7 +39,11 @@ export const GET = withMcpAuth('read')( }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) @@ -50,7 +54,7 @@ export const GET = withMcpAuth('read')( const tools = await db .select() .from(workflowMcpTool) - .where(eq(workflowMcpTool.serverId, serverId)) + .where(and(eq(workflowMcpTool.serverId, serverId), isNull(workflowMcpTool.archivedAt))) logger.info( `[${requestId}] Found workflow MCP server: ${server.name} with ${tools.length} tools` @@ -87,7 +91,11 @@ export const PATCH = withMcpAuth('write')( .select({ id: workflowMcpServer.id }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) @@ -112,7 +120,7 @@ export const PATCH = withMcpAuth('write')( const [updatedServer] = await db .update(workflowMcpServer) .set(updateData) - .where(eq(workflowMcpServer.id, serverId)) + .where(and(eq(workflowMcpServer.id, serverId), isNull(workflowMcpServer.deletedAt))) .returning() logger.info(`[${requestId}] Successfully updated workflow MCP server: ${serverId}`) diff --git a/apps/sim/app/api/mcp/workflow-servers/[id]/tools/[toolId]/route.ts b/apps/sim/app/api/mcp/workflow-servers/[id]/tools/[toolId]/route.ts index 9a2d374ed83..f54caf4703e 100644 --- a/apps/sim/app/api/mcp/workflow-servers/[id]/tools/[toolId]/route.ts +++ b/apps/sim/app/api/mcp/workflow-servers/[id]/tools/[toolId]/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware' @@ -32,7 +32,11 @@ export const GET = withMcpAuth('read')( .select({ id: workflowMcpServer.id }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) @@ -43,7 +47,13 @@ export const GET = withMcpAuth('read')( const [tool] = await db .select() .from(workflowMcpTool) - .where(and(eq(workflowMcpTool.id, toolId), eq(workflowMcpTool.serverId, serverId))) + .where( + and( + eq(workflowMcpTool.id, toolId), + eq(workflowMcpTool.serverId, serverId), + isNull(workflowMcpTool.archivedAt) + ) + ) .limit(1) if (!tool) { @@ -81,7 +91,11 @@ export const PATCH = withMcpAuth('write')( .select({ id: workflowMcpServer.id }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) @@ -92,7 +106,13 @@ export const PATCH = withMcpAuth('write')( const [existingTool] = await db .select({ id: workflowMcpTool.id }) .from(workflowMcpTool) - .where(and(eq(workflowMcpTool.id, toolId), eq(workflowMcpTool.serverId, serverId))) + .where( + and( + eq(workflowMcpTool.id, toolId), + eq(workflowMcpTool.serverId, serverId), + isNull(workflowMcpTool.archivedAt) + ) + ) .limit(1) if (!existingTool) { @@ -166,7 +186,11 @@ export const DELETE = withMcpAuth('write')( .select({ id: workflowMcpServer.id }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) diff --git a/apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts b/apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts index bdd9139f937..b0887aef1f8 100644 --- a/apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts +++ b/apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware' @@ -33,7 +33,11 @@ export const GET = withMcpAuth('read')( .select({ id: workflowMcpServer.id }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) @@ -56,8 +60,11 @@ export const GET = withMcpAuth('read')( isDeployed: workflow.isDeployed, }) .from(workflowMcpTool) - .leftJoin(workflow, eq(workflowMcpTool.workflowId, workflow.id)) - .where(eq(workflowMcpTool.serverId, serverId)) + .leftJoin( + workflow, + and(eq(workflowMcpTool.workflowId, workflow.id), isNull(workflow.archivedAt)) + ) + .where(and(eq(workflowMcpTool.serverId, serverId), isNull(workflowMcpTool.archivedAt))) logger.info(`[${requestId}] Found ${tools.length} tools for server ${serverId}`) @@ -102,7 +109,11 @@ export const POST = withMcpAuth('write')( .select({ id: workflowMcpServer.id }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) @@ -119,7 +130,7 @@ export const POST = withMcpAuth('write')( workspaceId: workflow.workspaceId, }) .from(workflow) - .where(eq(workflow.id, body.workflowId)) + .where(and(eq(workflow.id, body.workflowId), isNull(workflow.archivedAt))) .limit(1) if (!workflowRecord) { @@ -157,7 +168,8 @@ export const POST = withMcpAuth('write')( .where( and( eq(workflowMcpTool.serverId, serverId), - eq(workflowMcpTool.workflowId, body.workflowId) + eq(workflowMcpTool.workflowId, body.workflowId), + isNull(workflowMcpTool.archivedAt) ) ) .limit(1) diff --git a/apps/sim/app/api/mcp/workflow-servers/route.ts b/apps/sim/app/api/mcp/workflow-servers/route.ts index 27515941323..185c551b271 100644 --- a/apps/sim/app/api/mcp/workflow-servers/route.ts +++ b/apps/sim/app/api/mcp/workflow-servers/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq, inArray, sql } from 'drizzle-orm' +import { and, eq, inArray, isNull, sql } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware' @@ -37,10 +37,13 @@ export const GET = withMcpAuth('read')( SELECT COUNT(*)::int FROM "workflow_mcp_tool" WHERE "workflow_mcp_tool"."server_id" = "workflow_mcp_server"."id" + AND "workflow_mcp_tool"."archived_at" IS NULL )`.as('tool_count'), }) .from(workflowMcpServer) - .where(eq(workflowMcpServer.workspaceId, workspaceId)) + .where( + and(eq(workflowMcpServer.workspaceId, workspaceId), isNull(workflowMcpServer.deletedAt)) + ) const serverIds = servers.map((s) => s.id) const tools = @@ -51,7 +54,12 @@ export const GET = withMcpAuth('read')( toolName: workflowMcpTool.toolName, }) .from(workflowMcpTool) - .where(inArray(workflowMcpTool.serverId, serverIds)) + .where( + and( + inArray(workflowMcpTool.serverId, serverIds), + isNull(workflowMcpTool.archivedAt) + ) + ) : [] const toolNamesByServer: Record = {} @@ -133,7 +141,7 @@ export const POST = withMcpAuth('write')( workspaceId: workflow.workspaceId, }) .from(workflow) - .where(inArray(workflow.id, workflowIds)) + .where(and(inArray(workflow.id, workflowIds), isNull(workflow.archivedAt))) for (const workflowRecord of workflows) { if (workflowRecord.workspaceId !== workspaceId) { diff --git a/apps/sim/app/api/mothership/chat/route.ts b/apps/sim/app/api/mothership/chat/route.ts new file mode 100644 index 00000000000..b351ddfb731 --- /dev/null +++ b/apps/sim/app/api/mothership/chat/route.ts @@ -0,0 +1,345 @@ +import { db } from '@sim/db' +import { copilotChats } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { eq, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle' +import { buildCopilotRequestPayload } from '@/lib/copilot/chat-payload' +import { createSSEStream, SSE_RESPONSE_HEADERS } from '@/lib/copilot/chat-streaming' +import type { OrchestratorResult } from '@/lib/copilot/orchestrator/types' +import { processContextsServer, resolveActiveResourceContext } from '@/lib/copilot/process-contents' +import { createRequestTracker, createUnauthorizedResponse } from '@/lib/copilot/request-helpers' +import { taskPubSub } from '@/lib/copilot/task-events' +import { generateWorkspaceContext } from '@/lib/copilot/workspace-context' +import { + assertActiveWorkspaceAccess, + getUserEntityPermissions, +} from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('MothershipChatAPI') + +const FileAttachmentSchema = z.object({ + id: z.string(), + key: z.string(), + filename: z.string(), + media_type: z.string(), + size: z.number(), +}) + +const ResourceAttachmentSchema = z.object({ + type: z.enum(['workflow', 'table', 'file', 'knowledgebase']), + id: z.string().min(1), +}) + +const MothershipMessageSchema = z.object({ + message: z.string().min(1, 'Message is required'), + workspaceId: z.string().min(1, 'workspaceId is required'), + userMessageId: z.string().optional(), + chatId: z.string().optional(), + createNewChat: z.boolean().optional().default(false), + fileAttachments: z.array(FileAttachmentSchema).optional(), + userTimezone: z.string().optional(), + resourceAttachments: z.array(ResourceAttachmentSchema).optional(), + contexts: z + .array( + z.object({ + kind: z.enum([ + 'past_chat', + 'workflow', + 'current_workflow', + 'blocks', + 'logs', + 'workflow_block', + 'knowledge', + 'templates', + 'docs', + 'table', + 'file', + ]), + label: z.string(), + chatId: z.string().optional(), + workflowId: z.string().optional(), + knowledgeId: z.string().optional(), + blockId: z.string().optional(), + blockIds: z.array(z.string()).optional(), + templateId: z.string().optional(), + executionId: z.string().optional(), + tableId: z.string().optional(), + fileId: z.string().optional(), + }) + ) + .optional(), +}) + +/** + * POST /api/mothership/chat + * Workspace-scoped chat — no workflowId, proxies to Go /api/mothership. + */ +export async function POST(req: NextRequest) { + const tracker = createRequestTracker() + + try { + const session = await getSession() + if (!session?.user?.id) { + return createUnauthorizedResponse() + } + + const authenticatedUserId = session.user.id + const body = await req.json() + const { + message, + workspaceId, + userMessageId: providedMessageId, + chatId, + createNewChat, + fileAttachments, + contexts, + resourceAttachments, + userTimezone, + } = MothershipMessageSchema.parse(body) + + const userMessageId = providedMessageId || crypto.randomUUID() + + try { + await assertActiveWorkspaceAccess(workspaceId, authenticatedUserId) + } catch { + return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 403 }) + } + + let agentContexts: Array<{ type: string; content: string }> = [] + if (Array.isArray(contexts) && contexts.length > 0) { + try { + agentContexts = await processContextsServer( + contexts as any, + authenticatedUserId, + message, + workspaceId + ) + } catch (e) { + logger.error(`[${tracker.requestId}] Failed to process contexts`, e) + } + } + + if (Array.isArray(resourceAttachments) && resourceAttachments.length > 0) { + const results = await Promise.allSettled( + resourceAttachments.map((r) => + resolveActiveResourceContext(r.type, r.id, workspaceId, authenticatedUserId) + ) + ) + for (const result of results) { + if (result.status === 'fulfilled' && result.value) { + agentContexts.push(result.value) + } else if (result.status === 'rejected') { + logger.error( + `[${tracker.requestId}] Failed to resolve resource attachment`, + result.reason + ) + } + } + } + + let currentChat: any = null + let conversationHistory: any[] = [] + let actualChatId = chatId + + if (chatId || createNewChat) { + const chatResult = await resolveOrCreateChat({ + chatId, + userId: authenticatedUserId, + workspaceId, + model: 'claude-opus-4-5', + type: 'mothership', + }) + currentChat = chatResult.chat + actualChatId = chatResult.chatId || chatId + conversationHistory = Array.isArray(chatResult.conversationHistory) + ? chatResult.conversationHistory + : [] + + if (chatId && !currentChat) { + return NextResponse.json({ error: 'Chat not found' }, { status: 404 }) + } + } + + if (actualChatId) { + const userMsg = { + id: userMessageId, + role: 'user' as const, + content: message, + timestamp: new Date().toISOString(), + ...(fileAttachments && + fileAttachments.length > 0 && { + fileAttachments: fileAttachments.map((f) => ({ + id: f.id, + key: f.key, + filename: f.filename, + media_type: f.media_type, + size: f.size, + })), + }), + ...(contexts && + contexts.length > 0 && { + contexts: contexts.map((c) => ({ + kind: c.kind, + label: c.label, + ...(c.workflowId && { workflowId: c.workflowId }), + ...(c.knowledgeId && { knowledgeId: c.knowledgeId }), + ...(c.tableId && { tableId: c.tableId }), + ...(c.fileId && { fileId: c.fileId }), + })), + }), + } + + const [updated] = await db + .update(copilotChats) + .set({ + messages: sql`${copilotChats.messages} || ${JSON.stringify([userMsg])}::jsonb`, + conversationId: userMessageId, + updatedAt: new Date(), + }) + .where(eq(copilotChats.id, actualChatId)) + .returning({ messages: copilotChats.messages }) + + if (updated) { + const freshMessages: any[] = Array.isArray(updated.messages) ? updated.messages : [] + conversationHistory = freshMessages.filter((m: any) => m.id !== userMessageId) + taskPubSub?.publishStatusChanged({ workspaceId, chatId: actualChatId, type: 'started' }) + } + } + + const [workspaceContext, userPermission] = await Promise.all([ + generateWorkspaceContext(workspaceId, authenticatedUserId), + getUserEntityPermissions(authenticatedUserId, 'workspace', workspaceId).catch(() => null), + ]) + + const requestPayload = await buildCopilotRequestPayload( + { + message, + workspaceId, + userId: authenticatedUserId, + userMessageId, + mode: 'agent', + model: '', + contexts: agentContexts, + fileAttachments, + chatId: actualChatId, + userPermission: userPermission ?? undefined, + workspaceContext, + userTimezone, + }, + { selectedModel: '' } + ) + + const stream = createSSEStream({ + requestPayload, + userId: authenticatedUserId, + streamId: userMessageId, + chatId: actualChatId, + currentChat, + isNewChat: conversationHistory.length === 0, + message, + titleModel: 'claude-opus-4-5', + requestId: tracker.requestId, + workspaceId, + orchestrateOptions: { + userId: authenticatedUserId, + workspaceId, + chatId: actualChatId, + goRoute: '/api/mothership', + autoExecuteTools: true, + interactive: false, + onComplete: async (result: OrchestratorResult) => { + if (!actualChatId) return + + const assistantMessage: Record = { + id: crypto.randomUUID(), + role: 'assistant' as const, + content: result.content, + timestamp: new Date().toISOString(), + } + if (result.toolCalls.length > 0) { + assistantMessage.toolCalls = result.toolCalls + } + if (result.contentBlocks.length > 0) { + assistantMessage.contentBlocks = result.contentBlocks.map((block) => { + const stored: Record = { type: block.type } + if (block.content) stored.content = block.content + if (block.type === 'tool_call' && block.toolCall) { + stored.toolCall = { + id: block.toolCall.id, + name: block.toolCall.name, + state: + block.toolCall.result?.success !== undefined + ? block.toolCall.result.success + ? 'success' + : 'error' + : block.toolCall.status, + result: block.toolCall.result, + ...(block.calledBy ? { calledBy: block.calledBy } : {}), + } + } + return stored + }) + } + + try { + const [row] = await db + .select({ messages: copilotChats.messages }) + .from(copilotChats) + .where(eq(copilotChats.id, actualChatId)) + .limit(1) + + const msgs: any[] = Array.isArray(row?.messages) ? row.messages : [] + const userIdx = msgs.findIndex((m: any) => m.id === userMessageId) + const alreadyHasResponse = + userIdx >= 0 && + userIdx + 1 < msgs.length && + (msgs[userIdx + 1] as any)?.role === 'assistant' + + if (!alreadyHasResponse) { + await db + .update(copilotChats) + .set({ + messages: sql`${copilotChats.messages} || ${JSON.stringify([assistantMessage])}::jsonb`, + conversationId: sql`CASE WHEN ${copilotChats.conversationId} = ${userMessageId} THEN NULL ELSE ${copilotChats.conversationId} END`, + updatedAt: new Date(), + }) + .where(eq(copilotChats.id, actualChatId)) + + taskPubSub?.publishStatusChanged({ + workspaceId, + chatId: actualChatId, + type: 'completed', + }) + } + } catch (error) { + logger.error(`[${tracker.requestId}] Failed to persist chat messages`, { + chatId: actualChatId, + error: error instanceof Error ? error.message : 'Unknown error', + }) + } + }, + }, + }) + + return new Response(stream, { headers: SSE_RESPONSE_HEADERS }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${tracker.requestId}] Error handling mothership chat:`, { + error: error instanceof Error ? error.message : 'Unknown error', + }) + + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/mothership/chat/stop/route.ts b/apps/sim/app/api/mothership/chat/stop/route.ts new file mode 100644 index 00000000000..763ff9b2cfc --- /dev/null +++ b/apps/sim/app/api/mothership/chat/stop/route.ts @@ -0,0 +1,110 @@ +import { db } from '@sim/db' +import { copilotChats } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { taskPubSub } from '@/lib/copilot/task-events' + +const logger = createLogger('MothershipChatStopAPI') + +const StoredToolCallSchema = z + .object({ + id: z.string().optional(), + name: z.string().optional(), + state: z.string().optional(), + params: z.record(z.unknown()).optional(), + result: z + .object({ + success: z.boolean(), + output: z.unknown().optional(), + error: z.string().optional(), + }) + .optional(), + display: z + .object({ + text: z.string().optional(), + }) + .optional(), + calledBy: z.string().optional(), + }) + .nullable() + +const ContentBlockSchema = z.object({ + type: z.string(), + content: z.string().optional(), + toolCall: StoredToolCallSchema.optional(), +}) + +const StopSchema = z.object({ + chatId: z.string(), + streamId: z.string(), + content: z.string(), + contentBlocks: z.array(ContentBlockSchema).optional(), +}) + +/** + * POST /api/mothership/chat/stop + * Persists partial assistant content when the user stops a stream mid-response. + * Clears conversationId so the server-side onComplete won't duplicate the message. + */ +export async function POST(req: NextRequest) { + try { + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const { chatId, streamId, content, contentBlocks } = StopSchema.parse(await req.json()) + + const setClause: Record = { + conversationId: null, + updatedAt: new Date(), + } + + const hasContent = content.trim().length > 0 + const hasBlocks = Array.isArray(contentBlocks) && contentBlocks.length > 0 + + if (hasContent || hasBlocks) { + const assistantMessage: Record = { + id: crypto.randomUUID(), + role: 'assistant' as const, + content, + timestamp: new Date().toISOString(), + } + if (hasBlocks) { + assistantMessage.contentBlocks = contentBlocks + } + setClause.messages = sql`${copilotChats.messages} || ${JSON.stringify([assistantMessage])}::jsonb` + } + + const [updated] = await db + .update(copilotChats) + .set(setClause) + .where( + and( + eq(copilotChats.id, chatId), + eq(copilotChats.userId, session.user.id), + eq(copilotChats.conversationId, streamId) + ) + ) + .returning({ workspaceId: copilotChats.workspaceId }) + + if (updated?.workspaceId) { + taskPubSub?.publishStatusChanged({ + workspaceId: updated.workspaceId, + chatId, + type: 'completed', + }) + } + + return NextResponse.json({ success: true }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: 'Invalid request' }, { status: 400 }) + } + logger.error('Error stopping chat stream:', error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/mothership/chats/read/route.ts b/apps/sim/app/api/mothership/chats/read/route.ts new file mode 100644 index 00000000000..e75ffd28d36 --- /dev/null +++ b/apps/sim/app/api/mothership/chats/read/route.ts @@ -0,0 +1,43 @@ +import { db } from '@sim/db' +import { copilotChats } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { + authenticateCopilotRequestSessionOnly, + createBadRequestResponse, + createInternalServerErrorResponse, + createUnauthorizedResponse, +} from '@/lib/copilot/request-helpers' + +const logger = createLogger('MarkTaskReadAPI') + +const MarkReadSchema = z.object({ + chatId: z.string().min(1), +}) + +export async function POST(request: NextRequest) { + try { + const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly() + if (!isAuthenticated || !userId) { + return createUnauthorizedResponse() + } + + const body = await request.json() + const { chatId } = MarkReadSchema.parse(body) + + await db + .update(copilotChats) + .set({ lastSeenAt: sql`GREATEST(${copilotChats.updatedAt}, NOW())` }) + .where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, userId))) + + return NextResponse.json({ success: true }) + } catch (error) { + if (error instanceof z.ZodError) { + return createBadRequestResponse('chatId is required') + } + logger.error('Error marking task as read:', error) + return createInternalServerErrorResponse('Failed to mark task as read') + } +} diff --git a/apps/sim/app/api/mothership/chats/route.ts b/apps/sim/app/api/mothership/chats/route.ts new file mode 100644 index 00000000000..f9b4e1748c2 --- /dev/null +++ b/apps/sim/app/api/mothership/chats/route.ts @@ -0,0 +1,106 @@ +import { db } from '@sim/db' +import { copilotChats } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, desc, eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { + authenticateCopilotRequestSessionOnly, + createBadRequestResponse, + createInternalServerErrorResponse, + createUnauthorizedResponse, +} from '@/lib/copilot/request-helpers' +import { taskPubSub } from '@/lib/copilot/task-events' +import { assertActiveWorkspaceAccess } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('MothershipChatsAPI') + +/** + * GET /api/mothership/chats?workspaceId=xxx + * Returns mothership (home) chats for the authenticated user in the given workspace. + */ +export async function GET(request: NextRequest) { + try { + const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly() + if (!isAuthenticated || !userId) { + return createUnauthorizedResponse() + } + + const workspaceId = request.nextUrl.searchParams.get('workspaceId') + if (!workspaceId) { + return createBadRequestResponse('workspaceId is required') + } + + await assertActiveWorkspaceAccess(workspaceId, userId) + + const chats = await db + .select({ + id: copilotChats.id, + title: copilotChats.title, + updatedAt: copilotChats.updatedAt, + conversationId: copilotChats.conversationId, + lastSeenAt: copilotChats.lastSeenAt, + }) + .from(copilotChats) + .where( + and( + eq(copilotChats.userId, userId), + eq(copilotChats.workspaceId, workspaceId), + eq(copilotChats.type, 'mothership') + ) + ) + .orderBy(desc(copilotChats.updatedAt)) + + return NextResponse.json({ success: true, data: chats }) + } catch (error) { + logger.error('Error fetching mothership chats:', error) + return createInternalServerErrorResponse('Failed to fetch chats') + } +} + +const CreateChatSchema = z.object({ + workspaceId: z.string().min(1), +}) + +/** + * POST /api/mothership/chats + * Creates an empty mothership chat and returns its ID. + */ +export async function POST(request: NextRequest) { + try { + const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly() + if (!isAuthenticated || !userId) { + return createUnauthorizedResponse() + } + + const body = await request.json() + const { workspaceId } = CreateChatSchema.parse(body) + + await assertActiveWorkspaceAccess(workspaceId, userId) + + const now = new Date() + const [chat] = await db + .insert(copilotChats) + .values({ + userId, + workspaceId, + type: 'mothership', + title: null, + model: 'claude-opus-4-5', + messages: [], + updatedAt: now, + lastSeenAt: now, + }) + .returning({ id: copilotChats.id }) + + taskPubSub?.publishStatusChanged({ workspaceId, chatId: chat.id, type: 'created' }) + + return NextResponse.json({ success: true, id: chat.id }) + } catch (error) { + if (error instanceof z.ZodError) { + return createBadRequestResponse('workspaceId is required') + } + logger.error('Error creating mothership chat:', error) + return createInternalServerErrorResponse('Failed to create chat') + } +} diff --git a/apps/sim/app/api/mothership/events/route.ts b/apps/sim/app/api/mothership/events/route.ts new file mode 100644 index 00000000000..38abba7b33f --- /dev/null +++ b/apps/sim/app/api/mothership/events/route.ts @@ -0,0 +1,32 @@ +/** + * SSE endpoint for task status events. + * + * Pushes `task_status` events to the browser when tasks are + * started, completed, created, deleted, or renamed. + * + * Auth is handled via session cookies (EventSource sends cookies automatically). + */ + +import { taskPubSub } from '@/lib/copilot/task-events' +import { createWorkspaceSSE } from '@/lib/events/sse-endpoint' + +export const dynamic = 'force-dynamic' + +export const GET = createWorkspaceSSE({ + label: 'mothership-events', + subscriptions: [ + { + subscribe: (workspaceId, send) => { + if (!taskPubSub) return () => {} + return taskPubSub.onStatusChanged((event) => { + if (event.workspaceId !== workspaceId) return + send('task_status', { + chatId: event.chatId, + type: event.type, + timestamp: Date.now(), + }) + }) + }, + }, + ], +}) diff --git a/apps/sim/app/api/mothership/execute/route.ts b/apps/sim/app/api/mothership/execute/route.ts new file mode 100644 index 00000000000..f7f2e72d71d --- /dev/null +++ b/apps/sim/app/api/mothership/execute/route.ts @@ -0,0 +1,126 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { buildIntegrationToolSchemas } from '@/lib/copilot/chat-payload' +import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator' +import { generateWorkspaceContext } from '@/lib/copilot/workspace-context' +import { + assertActiveWorkspaceAccess, + getUserEntityPermissions, +} from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('MothershipExecuteAPI') + +const MessageSchema = z.object({ + role: z.enum(['system', 'user', 'assistant']), + content: z.string(), +}) + +const ExecuteRequestSchema = z.object({ + messages: z.array(MessageSchema).min(1, 'At least one message is required'), + responseFormat: z.any().optional(), + workspaceId: z.string().min(1, 'workspaceId is required'), + userId: z.string().min(1, 'userId is required'), + chatId: z.string().optional(), +}) + +/** + * POST /api/mothership/execute + * + * Non-streaming endpoint for Mothership block execution within workflows. + * Called by the executor via internal JWT auth, not by the browser directly. + * Consumes the Go SSE stream internally and returns a single JSON response. + */ +export async function POST(req: NextRequest) { + try { + const auth = await checkInternalAuth(req, { requireWorkflowId: false }) + if (!auth.success) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) + } + + const body = await req.json() + const { messages, responseFormat, workspaceId, userId, chatId } = + ExecuteRequestSchema.parse(body) + + await assertActiveWorkspaceAccess(workspaceId, userId) + + const effectiveChatId = chatId || crypto.randomUUID() + const [workspaceContext, integrationTools, userPermission] = await Promise.all([ + generateWorkspaceContext(workspaceId, userId), + buildIntegrationToolSchemas(userId), + getUserEntityPermissions(userId, 'workspace', workspaceId).catch(() => null), + ]) + + const requestPayload: Record = { + messages, + responseFormat, + userId, + chatId: effectiveChatId, + mode: 'agent', + messageId: crypto.randomUUID(), + isHosted: true, + workspaceContext, + ...(integrationTools.length > 0 ? { integrationTools } : {}), + ...(userPermission ? { userPermission } : {}), + } + + const result = await orchestrateCopilotStream(requestPayload, { + userId, + workspaceId, + chatId: effectiveChatId, + goRoute: '/api/mothership/execute', + autoExecuteTools: true, + interactive: false, + }) + + if (!result.success) { + logger.error('Mothership execute failed', { + error: result.error, + errors: result.errors, + }) + return NextResponse.json( + { + error: result.error || 'Mothership execution failed', + content: result.content || '', + }, + { status: 500 } + ) + } + + const clientToolNames = new Set(integrationTools.map((t) => t.name)) + const clientToolCalls = (result.toolCalls || []).filter( + (tc: { name: string }) => clientToolNames.has(tc.name) || tc.name.startsWith('mcp-') + ) + + return NextResponse.json({ + content: result.content, + model: 'mothership', + tokens: result.usage + ? { + prompt: result.usage.prompt, + completion: result.usage.completion, + total: (result.usage.prompt || 0) + (result.usage.completion || 0), + } + : {}, + cost: result.cost || undefined, + toolCalls: clientToolCalls, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + logger.error('Mothership execute error', { + error: error instanceof Error ? error.message : 'Unknown error', + }) + + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/organizations/[id]/invitations/[invitationId]/route.ts b/apps/sim/app/api/organizations/[id]/invitations/[invitationId]/route.ts index 3532981267f..d7b1df2a776 100644 --- a/apps/sim/app/api/organizations/[id]/invitations/[invitationId]/route.ts +++ b/apps/sim/app/api/organizations/[id]/invitations/[invitationId]/route.ts @@ -23,6 +23,7 @@ import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { hasAccessControlAccess } from '@/lib/billing' import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage' +import { isOrgPlan } from '@/lib/billing/plan-helpers' import { requireStripeClient } from '@/lib/billing/stripe-client' import { getBaseUrl } from '@/lib/core/utils/urls' import { syncWorkspaceEnvCredentials } from '@/lib/credentials/environment' @@ -325,7 +326,7 @@ export async function PUT( .limit(1) const orgSub = orgSubs[0] - const orgIsPaid = orgSub && (orgSub.plan === 'team' || orgSub.plan === 'enterprise') + const orgIsPaid = orgSub && isOrgPlan(orgSub.plan) if (orgIsPaid) { const userId = session.user.id diff --git a/apps/sim/app/api/organizations/[id]/seats/route.ts b/apps/sim/app/api/organizations/[id]/seats/route.ts index eaadf5717ae..f8ccc35221e 100644 --- a/apps/sim/app/api/organizations/[id]/seats/route.ts +++ b/apps/sim/app/api/organizations/[id]/seats/route.ts @@ -6,6 +6,7 @@ import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { getSession } from '@/lib/auth' import { getPlanPricing } from '@/lib/billing/core/billing' +import { isTeam } from '@/lib/billing/plan-helpers' import { requireStripeClient } from '@/lib/billing/stripe-client' import { isBillingEnabled } from '@/lib/core/config/feature-flags' @@ -75,7 +76,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ const orgSubscription = subscriptionRecord[0] // Only team plans support seat changes (not enterprise - those are handled manually) - if (orgSubscription.plan !== 'team') { + if (!isTeam(orgSubscription.plan)) { return NextResponse.json( { error: 'Seat changes are only available for Team plans' }, { status: 400 } @@ -174,7 +175,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ .where(eq(subscription.id, orgSubscription.id)) // Update orgUsageLimit to reflect new seat count (seats × basePrice as minimum) - const { basePrice } = getPlanPricing('team') + const { basePrice } = getPlanPricing(orgSubscription.plan) const newMinimumLimit = newSeatCount * basePrice const orgData = await db diff --git a/apps/sim/app/api/referral-code/redeem/route.ts b/apps/sim/app/api/referral-code/redeem/route.ts index be3cbac9026..89b109dc6e6 100644 --- a/apps/sim/app/api/referral-code/redeem/route.ts +++ b/apps/sim/app/api/referral-code/redeem/route.ts @@ -24,6 +24,7 @@ import { z } from 'zod' import { getSession } from '@/lib/auth' import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription' import { applyBonusCredits } from '@/lib/billing/credits/bonus' +import { isEnterprise, isTeam } from '@/lib/billing/plan-helpers' const logger = createLogger('ReferralCodeRedemption') @@ -43,15 +44,15 @@ export async function POST(request: Request) { const subscription = await getHighestPrioritySubscription(session.user.id) - if (subscription?.plan === 'enterprise') { + if (isEnterprise(subscription?.plan)) { return NextResponse.json({ redeemed: false, error: 'Enterprise accounts cannot redeem referral codes', }) } - const isTeam = subscription?.plan === 'team' - const orgId = isTeam ? subscription.referenceId : null + const isTeamSub = isTeam(subscription?.plan) + const orgId = isTeamSub ? subscription!.referenceId : null const normalizedCode = code.trim().toUpperCase() diff --git a/apps/sim/app/api/schedules/[id]/route.test.ts b/apps/sim/app/api/schedules/[id]/route.test.ts index ca0e723be5c..c970e41714e 100644 --- a/apps/sim/app/api/schedules/[id]/route.test.ts +++ b/apps/sim/app/api/schedules/[id]/route.test.ts @@ -24,11 +24,22 @@ vi.mock('@sim/db', () => databaseMock) vi.mock('@sim/db/schema', () => ({ workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' }, - workflowSchedule: { id: 'id', workflowId: 'workflowId', status: 'status' }, + workflowSchedule: { + id: 'id', + workflowId: 'workflowId', + status: 'status', + cronExpression: 'cronExpression', + timezone: 'timezone', + sourceType: 'sourceType', + sourceWorkspaceId: 'sourceWorkspaceId', + archivedAt: 'archivedAt', + }, })) vi.mock('drizzle-orm', () => ({ + and: vi.fn(), eq: vi.fn(), + isNull: vi.fn(), })) vi.mock('@/lib/core/utils/request', () => requestUtilsMock) @@ -100,13 +111,13 @@ describe('Schedule PUT API (Reactivate)', () => { }) describe('Request Validation', () => { - it('returns 400 when action is not reactivate', async () => { + it('returns 400 when action is not a valid enum value', async () => { mockDbChain([ [{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }], [{ userId: 'user-1', workspaceId: null }], ]) - const res = await PUT(createRequest({ action: 'disable' }), createParams('sched-1')) + const res = await PUT(createRequest({ action: 'invalid-action' }), createParams('sched-1')) expect(res.status).toBe(400) const data = await res.json() diff --git a/apps/sim/app/api/schedules/[id]/route.ts b/apps/sim/app/api/schedules/[id]/route.ts index eb65f07b535..901e91392e8 100644 --- a/apps/sim/app/api/schedules/[id]/route.ts +++ b/apps/sim/app/api/schedules/[id]/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { workflowSchedule } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' @@ -9,18 +9,100 @@ import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' import { validateCronExpression } from '@/lib/workflows/schedules/utils' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' +import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' const logger = createLogger('ScheduleAPI') export const dynamic = 'force-dynamic' -const scheduleUpdateSchema = z.object({ - action: z.literal('reactivate'), -}) +const scheduleUpdateSchema = z.discriminatedUnion('action', [ + z.object({ action: z.literal('reactivate') }), + z.object({ action: z.literal('disable') }), + z.object({ + action: z.literal('update'), + title: z.string().min(1).optional(), + prompt: z.string().min(1).optional(), + cronExpression: z.string().optional(), + timezone: z.string().optional(), + lifecycle: z.enum(['persistent', 'until_complete']).optional(), + maxRuns: z.number().nullable().optional(), + }), +]) + +type ScheduleRow = { + id: string + workflowId: string | null + status: string + cronExpression: string | null + timezone: string | null + sourceType: string | null + sourceWorkspaceId: string | null +} + +async function fetchAndAuthorize( + requestId: string, + scheduleId: string, + userId: string, + action: 'read' | 'write' +): Promise<{ schedule: ScheduleRow; workspaceId: string | null } | NextResponse> { + const [schedule] = await db + .select({ + id: workflowSchedule.id, + workflowId: workflowSchedule.workflowId, + status: workflowSchedule.status, + cronExpression: workflowSchedule.cronExpression, + timezone: workflowSchedule.timezone, + sourceType: workflowSchedule.sourceType, + sourceWorkspaceId: workflowSchedule.sourceWorkspaceId, + }) + .from(workflowSchedule) + .where(and(eq(workflowSchedule.id, scheduleId), isNull(workflowSchedule.archivedAt))) + .limit(1) + + if (!schedule) { + logger.warn(`[${requestId}] Schedule not found: ${scheduleId}`) + return NextResponse.json({ error: 'Schedule not found' }, { status: 404 }) + } + + if (schedule.sourceType === 'job') { + if (!schedule.sourceWorkspaceId) { + return NextResponse.json({ error: 'Job has no workspace' }, { status: 400 }) + } + const permission = await verifyWorkspaceMembership(userId, schedule.sourceWorkspaceId) + const canWrite = permission === 'admin' || permission === 'write' + if (!permission || (action === 'write' && !canWrite)) { + return NextResponse.json({ error: 'Not authorized' }, { status: 403 }) + } + return { schedule, workspaceId: schedule.sourceWorkspaceId } + } + + if (!schedule.workflowId) { + logger.warn(`[${requestId}] Schedule has no workflow: ${scheduleId}`) + return NextResponse.json({ error: 'Schedule has no associated workflow' }, { status: 400 }) + } + + const authorization = await authorizeWorkflowByWorkspacePermission({ + workflowId: schedule.workflowId, + userId, + action, + }) + + if (!authorization.workflow) { + logger.warn(`[${requestId}] Workflow not found for schedule: ${scheduleId}`) + return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) + } + + if (!authorization.allowed) { + logger.warn(`[${requestId}] User not authorized to modify schedule: ${scheduleId}`) + return NextResponse.json( + { error: authorization.message || 'Not authorized to modify this schedule' }, + { status: authorization.status } + ) + } + + return { schedule, workspaceId: authorization.workflow.workspaceId ?? null } +} -/** - * Reactivate a disabled schedule - */ export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { const requestId = generateRequestId() @@ -40,44 +122,103 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ return NextResponse.json({ error: 'Invalid request body' }, { status: 400 }) } - const [schedule] = await db - .select({ - id: workflowSchedule.id, - workflowId: workflowSchedule.workflowId, - status: workflowSchedule.status, - cronExpression: workflowSchedule.cronExpression, - timezone: workflowSchedule.timezone, + const result = await fetchAndAuthorize(requestId, scheduleId, session.user.id, 'write') + if (result instanceof NextResponse) return result + const { schedule, workspaceId } = result + + const { action } = validation.data + + if (action === 'disable') { + if (schedule.status === 'disabled') { + return NextResponse.json({ message: 'Schedule is already disabled' }) + } + + await db + .update(workflowSchedule) + .set({ status: 'disabled', nextRunAt: null, updatedAt: new Date() }) + .where(and(eq(workflowSchedule.id, scheduleId), isNull(workflowSchedule.archivedAt))) + + logger.info(`[${requestId}] Disabled schedule: ${scheduleId}`) + + recordAudit({ + workspaceId, + actorId: session.user.id, + action: AuditAction.SCHEDULE_UPDATED, + resourceType: AuditResourceType.SCHEDULE, + resourceId: scheduleId, + actorName: session.user.name ?? undefined, + actorEmail: session.user.email ?? undefined, + description: `Disabled schedule ${scheduleId}`, + metadata: {}, + request, }) - .from(workflowSchedule) - .where(eq(workflowSchedule.id, scheduleId)) - .limit(1) - if (!schedule) { - logger.warn(`[${requestId}] Schedule not found: ${scheduleId}`) - return NextResponse.json({ error: 'Schedule not found' }, { status: 404 }) + return NextResponse.json({ message: 'Schedule disabled successfully' }) } - const authorization = await authorizeWorkflowByWorkspacePermission({ - workflowId: schedule.workflowId, - userId: session.user.id, - action: 'write', - }) + if (action === 'update') { + if (schedule.sourceType !== 'job') { + return NextResponse.json( + { error: 'Only standalone job schedules can be edited' }, + { status: 400 } + ) + } - if (!authorization.workflow) { - logger.warn(`[${requestId}] Workflow not found for schedule: ${scheduleId}`) - return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) - } + const updates = validation.data + const setFields: Record = { updatedAt: new Date() } + + if (updates.title !== undefined) setFields.jobTitle = updates.title.trim() + if (updates.prompt !== undefined) setFields.prompt = updates.prompt.trim() + if (updates.timezone !== undefined) setFields.timezone = updates.timezone + if (updates.lifecycle !== undefined) { + setFields.lifecycle = updates.lifecycle + if (updates.lifecycle === 'persistent') { + setFields.maxRuns = null + } + } + if (updates.maxRuns !== undefined) setFields.maxRuns = updates.maxRuns + + if (updates.cronExpression !== undefined) { + const tz = updates.timezone ?? schedule.timezone ?? 'UTC' + const cronResult = validateCronExpression(updates.cronExpression, tz) + if (!cronResult.isValid) { + return NextResponse.json( + { error: cronResult.error || 'Invalid cron expression' }, + { status: 400 } + ) + } + setFields.cronExpression = updates.cronExpression + if (schedule.status === 'active' && cronResult.nextRun) { + setFields.nextRunAt = cronResult.nextRun + } + } - if (!authorization.allowed) { - logger.warn(`[${requestId}] User not authorized to modify this schedule: ${scheduleId}`) - return NextResponse.json( - { error: authorization.message || 'Not authorized to modify this schedule' }, - { status: authorization.status } - ) + await db + .update(workflowSchedule) + .set(setFields) + .where(and(eq(workflowSchedule.id, scheduleId), isNull(workflowSchedule.archivedAt))) + + logger.info(`[${requestId}] Updated job schedule: ${scheduleId}`) + + recordAudit({ + workspaceId, + actorId: session.user.id, + action: AuditAction.SCHEDULE_UPDATED, + resourceType: AuditResourceType.SCHEDULE, + resourceId: scheduleId, + actorName: session.user.name ?? undefined, + actorEmail: session.user.email ?? undefined, + description: `Updated job schedule ${scheduleId}`, + metadata: {}, + request, + }) + + return NextResponse.json({ message: 'Schedule updated successfully' }) } + // reactivate if (schedule.status === 'active') { - return NextResponse.json({ message: 'Schedule is already active' }, { status: 200 }) + return NextResponse.json({ message: 'Schedule is already active' }) } if (!schedule.cronExpression) { @@ -96,35 +237,70 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ await db .update(workflowSchedule) - .set({ - status: 'active', - failedCount: 0, - updatedAt: now, - nextRunAt, - }) - .where(eq(workflowSchedule.id, scheduleId)) + .set({ status: 'active', failedCount: 0, updatedAt: now, nextRunAt }) + .where(and(eq(workflowSchedule.id, scheduleId), isNull(workflowSchedule.archivedAt))) logger.info(`[${requestId}] Reactivated schedule: ${scheduleId}`) recordAudit({ - workspaceId: authorization.workflow.workspaceId ?? null, + workspaceId, actorId: session.user.id, action: AuditAction.SCHEDULE_UPDATED, resourceType: AuditResourceType.SCHEDULE, resourceId: scheduleId, actorName: session.user.name ?? undefined, actorEmail: session.user.email ?? undefined, - description: `Reactivated schedule for workflow ${schedule.workflowId}`, + description: `Reactivated schedule ${scheduleId}`, metadata: { cronExpression: schedule.cronExpression, timezone: schedule.timezone }, request, }) - return NextResponse.json({ - message: 'Schedule activated successfully', - nextRunAt, - }) + return NextResponse.json({ message: 'Schedule activated successfully', nextRunAt }) } catch (error) { logger.error(`[${requestId}] Error updating schedule`, error) return NextResponse.json({ error: 'Failed to update schedule' }, { status: 500 }) } } + +export async function DELETE( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + const requestId = generateRequestId() + + try { + const { id: scheduleId } = await params + + const session = await getSession() + if (!session?.user?.id) { + logger.warn(`[${requestId}] Unauthorized schedule delete attempt`) + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const result = await fetchAndAuthorize(requestId, scheduleId, session.user.id, 'write') + if (result instanceof NextResponse) return result + const { schedule, workspaceId } = result + + await db.delete(workflowSchedule).where(eq(workflowSchedule.id, scheduleId)) + + logger.info(`[${requestId}] Deleted schedule: ${scheduleId}`) + + recordAudit({ + workspaceId, + actorId: session.user.id, + action: AuditAction.SCHEDULE_UPDATED, + resourceType: AuditResourceType.SCHEDULE, + resourceId: scheduleId, + actorName: session.user.name ?? undefined, + actorEmail: session.user.email ?? undefined, + description: `Deleted ${schedule.sourceType === 'job' ? 'job' : 'schedule'} ${scheduleId}`, + metadata: {}, + request, + }) + + return NextResponse.json({ message: 'Schedule deleted successfully' }) + } catch (error) { + logger.error(`[${requestId}] Error deleting schedule`, error) + return NextResponse.json({ error: 'Failed to delete schedule' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/schedules/execute/route.test.ts b/apps/sim/app/api/schedules/execute/route.test.ts index c4bb5415333..cfdf6c3877b 100644 --- a/apps/sim/app/api/schedules/execute/route.test.ts +++ b/apps/sim/app/api/schedules/execute/route.test.ts @@ -71,6 +71,7 @@ vi.mock('@/lib/core/async-jobs', () => ({ vi.mock('drizzle-orm', () => ({ and: vi.fn((...conditions: unknown[]) => ({ type: 'and', conditions })), eq: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'eq' })), + ne: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'ne' })), lte: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'lte' })), lt: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'lt' })), not: vi.fn((condition: unknown) => ({ type: 'not', condition })), @@ -94,6 +95,7 @@ vi.mock('@sim/db', () => ({ nextRunAt: 'nextRunAt', lastQueuedAt: 'lastQueuedAt', deploymentVersionId: 'deploymentVersionId', + sourceType: 'sourceType', }, workflowDeploymentVersion: { id: 'id', @@ -165,7 +167,7 @@ describe('Scheduled Workflow Execution API Route', () => { }) it('should execute scheduled workflows with Trigger.dev disabled', async () => { - mockDbReturning.mockReturnValue(SINGLE_SCHEDULE) + mockDbReturning.mockReturnValueOnce(SINGLE_SCHEDULE).mockReturnValueOnce([]) const response = await GET(createMockRequest()) @@ -178,7 +180,7 @@ describe('Scheduled Workflow Execution API Route', () => { it('should queue schedules to Trigger.dev when enabled', async () => { mockFeatureFlags.isTriggerDevEnabled = true - mockDbReturning.mockReturnValue(SINGLE_SCHEDULE) + mockDbReturning.mockReturnValueOnce(SINGLE_SCHEDULE).mockReturnValueOnce([]) const response = await GET(createMockRequest()) @@ -189,7 +191,7 @@ describe('Scheduled Workflow Execution API Route', () => { }) it('should handle case with no due schedules', async () => { - mockDbReturning.mockReturnValue([]) + mockDbReturning.mockReturnValueOnce([]).mockReturnValueOnce([]) const response = await GET(createMockRequest()) @@ -200,7 +202,7 @@ describe('Scheduled Workflow Execution API Route', () => { }) it('should execute multiple schedules in parallel', async () => { - mockDbReturning.mockReturnValue(MULTIPLE_SCHEDULES) + mockDbReturning.mockReturnValueOnce(MULTIPLE_SCHEDULES).mockReturnValueOnce([]) const response = await GET(createMockRequest()) diff --git a/apps/sim/app/api/schedules/execute/route.ts b/apps/sim/app/api/schedules/execute/route.ts index fc87b07833a..cef36bfb25b 100644 --- a/apps/sim/app/api/schedules/execute/route.ts +++ b/apps/sim/app/api/schedules/execute/route.ts @@ -1,17 +1,33 @@ import { db, workflowDeploymentVersion, workflowSchedule } from '@sim/db' import { createLogger } from '@sim/logger' -import { and, eq, isNull, lt, lte, not, or, sql } from 'drizzle-orm' +import { and, eq, isNull, lt, lte, ne, not, or, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { v4 as uuidv4 } from 'uuid' import { verifyCronAuth } from '@/lib/auth/internal' import { getJobQueue, shouldExecuteInline } from '@/lib/core/async-jobs' import { generateRequestId } from '@/lib/core/utils/request' -import { executeScheduleJob } from '@/background/schedule-execution' +import { + executeJobInline, + executeScheduleJob, + releaseScheduleLock, +} from '@/background/schedule-execution' export const dynamic = 'force-dynamic' const logger = createLogger('ScheduledExecuteAPI') +const dueFilter = (queuedAt: Date) => + and( + isNull(workflowSchedule.archivedAt), + lte(workflowSchedule.nextRunAt, queuedAt), + not(eq(workflowSchedule.status, 'disabled')), + ne(workflowSchedule.status, 'completed'), + or( + isNull(workflowSchedule.lastQueuedAt), + lt(workflowSchedule.lastQueuedAt, workflowSchedule.nextRunAt) + ) + ) + export async function GET(request: NextRequest) { const requestId = generateRequestId() logger.info(`[${requestId}] Scheduled execution triggered at ${new Date().toISOString()}`) @@ -24,20 +40,14 @@ export async function GET(request: NextRequest) { const queuedAt = new Date() try { + // Workflow schedules (require active deployment) const dueSchedules = await db .update(workflowSchedule) - .set({ - lastQueuedAt: queuedAt, - updatedAt: queuedAt, - }) + .set({ lastQueuedAt: queuedAt, updatedAt: queuedAt }) .where( and( - lte(workflowSchedule.nextRunAt, queuedAt), - not(eq(workflowSchedule.status, 'disabled')), - or( - isNull(workflowSchedule.lastQueuedAt), - lt(workflowSchedule.lastQueuedAt, workflowSchedule.nextRunAt) - ), + dueFilter(queuedAt), + or(eq(workflowSchedule.sourceType, 'workflow'), isNull(workflowSchedule.sourceType)), sql`${workflowSchedule.deploymentVersionId} = (select ${workflowDeploymentVersion.id} from ${workflowDeploymentVersion} where ${workflowDeploymentVersion.workflowId} = ${workflowSchedule.workflowId} and ${workflowDeploymentVersion.isActive} = true)` ) ) @@ -50,20 +60,37 @@ export async function GET(request: NextRequest) { failedCount: workflowSchedule.failedCount, nextRunAt: workflowSchedule.nextRunAt, lastQueuedAt: workflowSchedule.lastQueuedAt, + sourceType: workflowSchedule.sourceType, + }) + + // Jobs (no deployment, dispatch inline) + const dueJobs = await db + .update(workflowSchedule) + .set({ lastQueuedAt: queuedAt, updatedAt: queuedAt }) + .where(and(dueFilter(queuedAt), eq(workflowSchedule.sourceType, 'job'))) + .returning({ + id: workflowSchedule.id, + cronExpression: workflowSchedule.cronExpression, + failedCount: workflowSchedule.failedCount, + lastQueuedAt: workflowSchedule.lastQueuedAt, + sourceType: workflowSchedule.sourceType, }) - logger.info(`[${requestId}] Processing ${dueSchedules.length} due scheduled workflows`) + const totalCount = dueSchedules.length + dueJobs.length + logger.info( + `[${requestId}] Processing ${totalCount} due items (${dueSchedules.length} schedules, ${dueJobs.length} jobs)` + ) const jobQueue = await getJobQueue() - const queuePromises = dueSchedules.map(async (schedule) => { + const schedulePromises = dueSchedules.map(async (schedule) => { const queueTime = schedule.lastQueuedAt ?? queuedAt const executionId = uuidv4() const correlation = { executionId, requestId, source: 'schedule' as const, - workflowId: schedule.workflowId, + workflowId: schedule.workflowId!, scheduleId: schedule.id, triggerType: 'schedule', scheduledFor: schedule.nextRunAt?.toISOString(), @@ -71,7 +98,7 @@ export async function GET(request: NextRequest) { const payload = { scheduleId: schedule.id, - workflowId: schedule.workflowId, + workflowId: schedule.workflowId!, executionId, requestId, correlation, @@ -85,53 +112,91 @@ export async function GET(request: NextRequest) { try { const jobId = await jobQueue.enqueue('schedule-execution', payload, { - metadata: { workflowId: schedule.workflowId, correlation }, + metadata: { workflowId: schedule.workflowId ?? undefined, correlation }, }) logger.info( `[${requestId}] Queued schedule execution task ${jobId} for workflow ${schedule.workflowId}` ) if (shouldExecuteInline()) { - void (async () => { - try { - await jobQueue.startJob(jobId) - const output = await executeScheduleJob(payload) - await jobQueue.completeJob(jobId, output) - } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) - logger.error( - `[${requestId}] Schedule execution failed for workflow ${schedule.workflowId}`, - { jobId, error: errorMessage } - ) - try { - await jobQueue.markJobFailed(jobId, errorMessage) - } catch (markFailedError) { - logger.error(`[${requestId}] Failed to mark job as failed`, { - jobId, - error: - markFailedError instanceof Error - ? markFailedError.message - : String(markFailedError), - }) + try { + await jobQueue.startJob(jobId) + const output = await executeScheduleJob(payload) + await jobQueue.completeJob(jobId, output) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + logger.error( + `[${requestId}] Schedule execution failed for workflow ${schedule.workflowId}`, + { + jobId, + error: errorMessage, } + ) + try { + await jobQueue.markJobFailed(jobId, errorMessage) + } catch (markFailedError) { + logger.error(`[${requestId}] Failed to mark job as failed`, { + jobId, + error: + markFailedError instanceof Error + ? markFailedError.message + : String(markFailedError), + }) } - })() + await releaseScheduleLock( + schedule.id, + requestId, + queuedAt, + `Failed to release lock for schedule ${schedule.id} after inline execution failure` + ) + } } } catch (error) { logger.error( `[${requestId}] Failed to queue schedule execution for workflow ${schedule.workflowId}`, error ) + await releaseScheduleLock( + schedule.id, + requestId, + queuedAt, + `Failed to release lock for schedule ${schedule.id} after queue failure` + ) + } + }) + + // Jobs always execute inline (no TriggerDev) + const jobPromises = dueJobs.map(async (job) => { + const queueTime = job.lastQueuedAt ?? queuedAt + const payload = { + scheduleId: job.id, + cronExpression: job.cronExpression || undefined, + failedCount: job.failedCount || 0, + now: queueTime.toISOString(), + } + + try { + await executeJobInline(payload) + } catch (error) { + logger.error(`[${requestId}] Job execution failed for ${job.id}`, { + error: error instanceof Error ? error.message : String(error), + }) + await releaseScheduleLock( + job.id, + requestId, + queuedAt, + `Failed to release lock for job ${job.id}` + ) } }) - await Promise.allSettled(queuePromises) + await Promise.allSettled([...schedulePromises, ...jobPromises]) - logger.info(`[${requestId}] Queued ${dueSchedules.length} schedule executions`) + logger.info(`[${requestId}] Processed ${totalCount} items`) return NextResponse.json({ message: 'Scheduled workflow executions processed', - executedCount: dueSchedules.length, + executedCount: totalCount, }) } catch (error: any) { logger.error(`[${requestId}] Error in scheduled execution handler`, error) diff --git a/apps/sim/app/api/schedules/route.ts b/apps/sim/app/api/schedules/route.ts index 575acd45f9e..9c91530b985 100644 --- a/apps/sim/app/api/schedules/route.ts +++ b/apps/sim/app/api/schedules/route.ts @@ -1,21 +1,28 @@ import { db } from '@sim/db' -import { workflowDeploymentVersion, workflowSchedule } from '@sim/db/schema' +import { workflow, workflowDeploymentVersion, workflowSchedule } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq, isNull, or } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' +import { validateCronExpression } from '@/lib/workflows/schedules/utils' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' +import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' const logger = createLogger('ScheduledAPI') /** - * Get schedule information for a workflow + * Get schedule information for a workflow, or all schedules for a workspace. + * + * Query params (choose one): + * - workflowId + optional blockId → single schedule for one workflow + * - workspaceId → all schedules across the workspace */ export async function GET(req: NextRequest) { const requestId = generateRequestId() const url = new URL(req.url) const workflowId = url.searchParams.get('workflowId') + const workspaceId = url.searchParams.get('workspaceId') const blockId = url.searchParams.get('blockId') try { @@ -25,8 +32,15 @@ export async function GET(req: NextRequest) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } + if (workspaceId) { + return handleWorkspaceSchedules(requestId, session.user.id, workspaceId) + } + if (!workflowId) { - return NextResponse.json({ error: 'Missing workflowId parameter' }, { status: 400 }) + return NextResponse.json( + { error: 'Missing workflowId or workspaceId parameter' }, + { status: 400 } + ) } const authorization = await authorizeWorkflowByWorkspacePermission({ @@ -66,6 +80,7 @@ export async function GET(req: NextRequest) { .where( and( ...conditions, + isNull(workflowSchedule.archivedAt), or( eq(workflowSchedule.deploymentVersionId, workflowDeploymentVersion.id), and(isNull(workflowDeploymentVersion.id), isNull(workflowSchedule.deploymentVersionId)) @@ -99,3 +114,175 @@ export async function GET(req: NextRequest) { return NextResponse.json({ error: 'Failed to retrieve workflow schedule' }, { status: 500 }) } } + +async function handleWorkspaceSchedules(requestId: string, userId: string, workspaceId: string) { + const hasPermission = await verifyWorkspaceMembership(userId, workspaceId) + if (!hasPermission) { + return NextResponse.json({ error: 'Not authorized' }, { status: 403 }) + } + + logger.info(`[${requestId}] Getting all schedules for workspace ${workspaceId}`) + + const [workflowRows, jobRows] = await Promise.all([ + db + .select({ + schedule: workflowSchedule, + workflowName: workflow.name, + workflowColor: workflow.color, + }) + .from(workflowSchedule) + .innerJoin(workflow, eq(workflow.id, workflowSchedule.workflowId)) + .leftJoin( + workflowDeploymentVersion, + and( + eq(workflowDeploymentVersion.workflowId, workflowSchedule.workflowId), + eq(workflowDeploymentVersion.isActive, true) + ) + ) + .where( + and( + eq(workflow.workspaceId, workspaceId), + isNull(workflow.archivedAt), + eq(workflowSchedule.triggerType, 'schedule'), + isNull(workflowSchedule.archivedAt), + or(eq(workflowSchedule.sourceType, 'workflow'), isNull(workflowSchedule.sourceType)), + or( + eq(workflowSchedule.deploymentVersionId, workflowDeploymentVersion.id), + and(isNull(workflowDeploymentVersion.id), isNull(workflowSchedule.deploymentVersionId)) + ) + ) + ), + db + .select({ schedule: workflowSchedule }) + .from(workflowSchedule) + .where( + and( + eq(workflowSchedule.sourceWorkspaceId, workspaceId), + eq(workflowSchedule.sourceType, 'job'), + isNull(workflowSchedule.archivedAt) + ) + ), + ]) + + const headers = new Headers() + headers.set('Cache-Control', 'no-store, max-age=0') + + const schedules = [ + ...workflowRows.map((r) => ({ + ...r.schedule, + workflowName: r.workflowName, + workflowColor: r.workflowColor, + })), + ...jobRows.map((r) => ({ + ...r.schedule, + workflowName: null, + workflowColor: null, + })), + ] + + return NextResponse.json({ schedules }, { headers }) +} + +/** + * Create a standalone scheduled job. + * + * Body: { workspaceId, title, prompt, cronExpression, timezone, lifecycle?, maxRuns?, startDate? } + */ +export async function POST(req: NextRequest) { + const requestId = generateRequestId() + + try { + const session = await getSession() + if (!session?.user?.id) { + logger.warn(`[${requestId}] Unauthorized schedule creation attempt`) + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const body = await req.json() + const { + workspaceId, + title, + prompt, + cronExpression, + timezone = 'UTC', + lifecycle = 'persistent', + maxRuns, + startDate, + } = body as { + workspaceId: string + title: string + prompt: string + cronExpression: string + timezone?: string + lifecycle?: 'persistent' | 'until_complete' + maxRuns?: number + startDate?: string + } + + if (!workspaceId || !title?.trim() || !prompt?.trim() || !cronExpression?.trim()) { + return NextResponse.json( + { error: 'Missing required fields: workspaceId, title, prompt, cronExpression' }, + { status: 400 } + ) + } + + const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId) + if (!hasPermission) { + return NextResponse.json({ error: 'Not authorized' }, { status: 403 }) + } + + const validation = validateCronExpression(cronExpression, timezone) + if (!validation.isValid) { + return NextResponse.json( + { error: validation.error || 'Invalid cron expression' }, + { status: 400 } + ) + } + + let nextRunAt = validation.nextRun! + if (startDate) { + const start = new Date(startDate) + if (start > new Date()) { + nextRunAt = start + } + } + + const now = new Date() + const id = crypto.randomUUID() + + await db.insert(workflowSchedule).values({ + id, + cronExpression, + triggerType: 'schedule', + sourceType: 'job', + status: 'active', + timezone, + nextRunAt, + createdAt: now, + updatedAt: now, + failedCount: 0, + jobTitle: title.trim(), + prompt: prompt.trim(), + lifecycle, + maxRuns: maxRuns ?? null, + runCount: 0, + sourceWorkspaceId: workspaceId, + sourceUserId: session.user.id, + }) + + logger.info(`[${requestId}] Created job schedule ${id}`, { + title, + cronExpression, + timezone, + lifecycle, + }) + + return NextResponse.json( + { schedule: { id, status: 'active', cronExpression, nextRunAt } }, + { status: 201 } + ) + } catch (error) { + logger.error(`[${requestId}] Error creating schedule`, error) + return NextResponse.json({ error: 'Failed to create schedule' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/skills/route.ts b/apps/sim/app/api/skills/route.ts index cf0b76c84dc..224edf44dee 100644 --- a/apps/sim/app/api/skills/route.ts +++ b/apps/sim/app/api/skills/route.ts @@ -1,12 +1,9 @@ -import { db } from '@sim/db' -import { skill } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, desc, eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' -import { upsertSkills } from '@/lib/workflows/skills/operations' +import { deleteSkill, listSkills, upsertSkills } from '@/lib/workflows/skills/operations' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const logger = createLogger('SkillsAPI') @@ -53,11 +50,7 @@ export async function GET(request: NextRequest) { return NextResponse.json({ error: 'Access denied' }, { status: 403 }) } - const result = await db - .select() - .from(skill) - .where(eq(skill.workspaceId, workspaceId)) - .orderBy(desc(skill.createdAt)) + const result = await listSkills({ workspaceId }) return NextResponse.json({ data: result }, { status: 200 }) } catch (error) { @@ -159,20 +152,12 @@ export async function DELETE(request: NextRequest) { return NextResponse.json({ error: 'Write permission required' }, { status: 403 }) } - const existingSkill = await db.select().from(skill).where(eq(skill.id, skillId)).limit(1) - - if (existingSkill.length === 0) { + const deleted = await deleteSkill({ skillId, workspaceId }) + if (!deleted) { logger.warn(`[${requestId}] Skill not found: ${skillId}`) return NextResponse.json({ error: 'Skill not found' }, { status: 404 }) } - if (existingSkill[0].workspaceId !== workspaceId) { - logger.warn(`[${requestId}] Skill ${skillId} does not belong to workspace ${workspaceId}`) - return NextResponse.json({ error: 'Skill not found' }, { status: 404 }) - } - - await db.delete(skill).where(and(eq(skill.id, skillId), eq(skill.workspaceId, workspaceId))) - logger.info(`[${requestId}] Deleted skill: ${skillId}`) return NextResponse.json({ success: true }) } catch (error) { diff --git a/apps/sim/app/api/superuser/import-workflow/route.ts b/apps/sim/app/api/superuser/import-workflow/route.ts index 3998792993d..e9e4f5bbe5e 100644 --- a/apps/sim/app/api/superuser/import-workflow/route.ts +++ b/apps/sim/app/api/superuser/import-workflow/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { copilotChats, workflow, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { getSession } from '@/lib/auth' import { verifyEffectiveSuperUser } from '@/lib/templates/permissions' @@ -11,6 +11,7 @@ import { saveWorkflowToNormalizedTables, } from '@/lib/workflows/persistence/utils' import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer' +import { deduplicateWorkflowName } from '@/lib/workflows/utils' const logger = createLogger('SuperUserImportWorkflow') @@ -63,7 +64,7 @@ export async function POST(request: NextRequest) { const [targetWorkspace] = await db .select({ id: workspace.id, ownerId: workspace.ownerId }) .from(workspace) - .where(eq(workspace.id, targetWorkspaceId)) + .where(and(eq(workspace.id, targetWorkspaceId), isNull(workspace.archivedAt))) .limit(1) if (!targetWorkspace) { @@ -119,13 +120,18 @@ export async function POST(request: NextRequest) { // Create new workflow record const newWorkflowId = crypto.randomUUID() const now = new Date() + const dedupedName = await deduplicateWorkflowName( + `[Debug Import] ${sourceWorkflow.name}`, + targetWorkspaceId, + null + ) await db.insert(workflow).values({ id: newWorkflowId, userId: session.user.id, workspaceId: targetWorkspaceId, - folderId: null, // Don't copy folder association - name: `[Debug Import] ${sourceWorkflow.name}`, + folderId: null, + name: dedupedName, description: sourceWorkflow.description, color: sourceWorkflow.color, lastSynced: now, diff --git a/apps/sim/app/api/table/[tableId]/columns/route.ts b/apps/sim/app/api/table/[tableId]/columns/route.ts new file mode 100644 index 00000000000..de69649bf0a --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/columns/route.ts @@ -0,0 +1,231 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { + addTableColumn, + deleteColumn, + renameColumn, + updateColumnConstraints, + updateColumnType, +} from '@/lib/table' +import { + accessError, + CreateColumnSchema, + checkAccess, + DeleteColumnSchema, + normalizeColumn, + UpdateColumnSchema, +} from '@/app/api/table/utils' + +const logger = createLogger('TableColumnsAPI') + +interface ColumnsRouteParams { + params: Promise<{ tableId: string }> +} + +/** POST /api/table/[tableId]/columns - Adds a column to the table schema. */ +export async function POST(request: NextRequest, { params }: ColumnsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized column creation attempt`) + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body = await request.json() + const validated = CreateColumnSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updatedTable = await addTableColumn(tableId, validated.column, requestId) + + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + if (error.message.includes('already exists') || error.message.includes('maximum column')) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + if (error.message === 'Table not found') { + return NextResponse.json({ error: error.message }, { status: 404 }) + } + } + + logger.error(`[${requestId}] Error adding column to table ${tableId}:`, error) + return NextResponse.json({ error: 'Failed to add column' }, { status: 500 }) + } +} + +/** PATCH /api/table/[tableId]/columns - Updates a column (rename, type change, constraints). */ +export async function PATCH(request: NextRequest, { params }: ColumnsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized column update attempt`) + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body = await request.json() + const validated = UpdateColumnSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const { updates } = validated + let updatedTable = null + + if (updates.name) { + updatedTable = await renameColumn( + { tableId, oldName: validated.columnName, newName: updates.name }, + requestId + ) + } + + if (updates.type) { + updatedTable = await updateColumnType( + { tableId, columnName: updates.name ?? validated.columnName, newType: updates.type }, + requestId + ) + } + + if (updates.required !== undefined || updates.unique !== undefined) { + updatedTable = await updateColumnConstraints( + { + tableId, + columnName: updates.name ?? validated.columnName, + ...(updates.required !== undefined ? { required: updates.required } : {}), + ...(updates.unique !== undefined ? { unique: updates.unique } : {}), + }, + requestId + ) + } + + if (!updatedTable) { + return NextResponse.json({ error: 'No updates specified' }, { status: 400 }) + } + + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + const msg = error.message + if (msg.includes('not found') || msg.includes('Table not found')) { + return NextResponse.json({ error: msg }, { status: 404 }) + } + if ( + msg.includes('already exists') || + msg.includes('Cannot delete the last column') || + msg.includes('Cannot set column') || + msg.includes('Invalid column') || + msg.includes('exceeds maximum') || + msg.includes('incompatible') || + msg.includes('duplicate') + ) { + return NextResponse.json({ error: msg }, { status: 400 }) + } + } + + logger.error(`[${requestId}] Error updating column in table ${tableId}:`, error) + return NextResponse.json({ error: 'Failed to update column' }, { status: 500 }) + } +} + +/** DELETE /api/table/[tableId]/columns - Deletes a column from the table schema. */ +export async function DELETE(request: NextRequest, { params }: ColumnsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized column deletion attempt`) + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body = await request.json() + const validated = DeleteColumnSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updatedTable = await deleteColumn( + { tableId, columnName: validated.columnName }, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + if (error.message.includes('not found') || error.message === 'Table not found') { + return NextResponse.json({ error: error.message }, { status: 404 }) + } + if (error.message.includes('Cannot delete') || error.message.includes('last column')) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + } + + logger.error(`[${requestId}] Error deleting column from table ${tableId}:`, error) + return NextResponse.json({ error: 'Failed to delete column' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/table/[tableId]/metadata/route.ts b/apps/sim/app/api/table/[tableId]/metadata/route.ts new file mode 100644 index 00000000000..5ae158e3347 --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/metadata/route.ts @@ -0,0 +1,65 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import type { TableMetadata } from '@/lib/table' +import { updateTableMetadata } from '@/lib/table' +import { accessError, checkAccess } from '@/app/api/table/utils' + +const logger = createLogger('TableMetadataAPI') + +const MetadataSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + metadata: z.object({ + columnWidths: z.record(z.number().positive()).optional(), + }), +}) + +interface TableRouteParams { + params: Promise<{ tableId: string }> +} + +/** PUT /api/table/[tableId]/metadata - Update table UI metadata (column widths, etc.) */ +export async function PUT(request: NextRequest, { params }: TableRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized metadata update attempt`) + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body = await request.json() + const validated = MetadataSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updated = await updateTableMetadata( + tableId, + validated.metadata, + table.metadata as TableMetadata | null + ) + + return NextResponse.json({ success: true, data: { metadata: updated } }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error updating table metadata:`, error) + return NextResponse.json({ error: 'Failed to update metadata' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/table/[tableId]/restore/route.ts b/apps/sim/app/api/table/[tableId]/restore/route.ts new file mode 100644 index 00000000000..8622f849f1d --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/restore/route.ts @@ -0,0 +1,45 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { getTableById, restoreTable } from '@/lib/table' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('RestoreTableAPI') + +export async function POST( + request: NextRequest, + { params }: { params: Promise<{ tableId: string }> } +) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const table = await getTableById(tableId, { includeArchived: true }) + if (!table) { + return NextResponse.json({ error: 'Table not found' }, { status: 404 }) + } + + const permission = await getUserEntityPermissions(auth.userId, 'workspace', table.workspaceId) + if (permission !== 'admin' && permission !== 'write') { + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + + await restoreTable(tableId, requestId) + + logger.info(`[${requestId}] Restored table ${tableId}`) + + return NextResponse.json({ success: true }) + } catch (error) { + logger.error(`[${requestId}] Error restoring table ${tableId}`, error) + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/table/[tableId]/route.ts b/apps/sim/app/api/table/[tableId]/route.ts index 8f8e7f0df91..2341c9f8ad1 100644 --- a/apps/sim/app/api/table/[tableId]/route.ts +++ b/apps/sim/app/api/table/[tableId]/route.ts @@ -3,8 +3,8 @@ import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' -import { deleteTable, type TableSchema } from '@/lib/table' -import { accessError, checkAccess, normalizeColumn, verifyTableWorkspace } from '../utils' +import { deleteTable, NAME_PATTERN, renameTable, TABLE_LIMITS, type TableSchema } from '@/lib/table' +import { accessError, checkAccess, normalizeColumn } from '@/app/api/table/utils' const logger = createLogger('TableDetailAPI') @@ -38,11 +38,7 @@ export async function GET(request: NextRequest, { params }: TableRouteParams) { const { table } = result - const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) - if (!isValidWorkspace) { - logger.warn( - `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` - ) + if (table.workspaceId !== validated.workspaceId) { return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } @@ -60,6 +56,7 @@ export async function GET(request: NextRequest, { params }: TableRouteParams) { schema: { columns: schemaData.columns.map(normalizeColumn), }, + metadata: table.metadata ?? null, rowCount: table.rowCount, maxRows: table.maxRows, createdAt: @@ -86,7 +83,68 @@ export async function GET(request: NextRequest, { params }: TableRouteParams) { } } -/** DELETE /api/table/[tableId] - Deletes a table and all its rows. */ +const PatchTableSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + name: z + .string() + .min(1, 'Name is required') + .max( + TABLE_LIMITS.MAX_TABLE_NAME_LENGTH, + `Name must be at most ${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters` + ) + .regex( + NAME_PATTERN, + 'Name must start with letter or underscore, followed by alphanumeric or underscore' + ), +}) + +/** PATCH /api/table/[tableId] - Renames a table. */ +export async function PATCH(request: NextRequest, { params }: TableRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized table rename attempt`) + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body = await request.json() + const validated = PatchTableSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updated = await renameTable(tableId, validated.name, requestId) + + return NextResponse.json({ + success: true, + data: { table: updated }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error renaming table:`, error) + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Failed to rename table' }, + { status: 500 } + ) + } +} + +/** DELETE /api/table/[tableId] - Archives a table. */ export async function DELETE(request: NextRequest, { params }: TableRouteParams) { const requestId = generateRequestId() const { tableId } = await params @@ -108,11 +166,7 @@ export async function DELETE(request: NextRequest, { params }: TableRouteParams) const { table } = result - const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) - if (!isValidWorkspace) { - logger.warn( - `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` - ) + if (table.workspaceId !== validated.workspaceId) { return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } @@ -121,7 +175,7 @@ export async function DELETE(request: NextRequest, { params }: TableRouteParams) return NextResponse.json({ success: true, data: { - message: 'Table deleted successfully', + message: 'Table archived successfully', }, }) } catch (error) { diff --git a/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts b/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts index 15a4473283c..12326141c7d 100644 --- a/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts +++ b/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts @@ -6,9 +6,9 @@ import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' -import type { RowData, TableSchema } from '@/lib/table' -import { validateRowData } from '@/lib/table' -import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils' +import type { RowData } from '@/lib/table' +import { deleteRow, updateRow } from '@/lib/table' +import { accessError, checkAccess } from '@/app/api/table/utils' const logger = createLogger('TableRowAPI') @@ -50,11 +50,7 @@ export async function GET(request: NextRequest, { params }: RowRouteParams) { const { table } = result - const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) - if (!isValidWorkspace) { - logger.warn( - `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` - ) + if (table.workspaceId !== validated.workspaceId) { return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } @@ -62,6 +58,7 @@ export async function GET(request: NextRequest, { params }: RowRouteParams) { .select({ id: userTableRows.id, data: userTableRows.data, + position: userTableRows.position, createdAt: userTableRows.createdAt, updatedAt: userTableRows.updatedAt, }) @@ -87,8 +84,11 @@ export async function GET(request: NextRequest, { params }: RowRouteParams) { row: { id: row.id, data: row.data, - createdAt: row.createdAt.toISOString(), - updatedAt: row.updatedAt.toISOString(), + position: row.position, + createdAt: + row.createdAt instanceof Date ? row.createdAt.toISOString() : String(row.createdAt), + updatedAt: + row.updatedAt instanceof Date ? row.updatedAt.toISOString() : String(row.updatedAt), }, }, }) @@ -116,7 +116,13 @@ export async function PATCH(request: NextRequest, { params }: RowRouteParams) { return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + const validated = UpdateRowSchema.parse(body) const result = await checkAccess(tableId, authResult.userId, 'write') @@ -124,15 +130,10 @@ export async function PATCH(request: NextRequest, { params }: RowRouteParams) { const { table } = result - const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) - if (!isValidWorkspace) { - logger.warn( - `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` - ) + if (table.workspaceId !== validated.workspaceId) { return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - // Fetch existing row to support partial updates const [existingRow] = await db .select({ data: userTableRows.data }) .from(userTableRows) @@ -149,42 +150,21 @@ export async function PATCH(request: NextRequest, { params }: RowRouteParams) { return NextResponse.json({ error: 'Row not found' }, { status: 404 }) } - // Merge existing data with incoming partial data (incoming takes precedence) const mergedData = { ...(existingRow.data as RowData), ...(validated.data as RowData), } - const validation = await validateRowData({ - rowData: mergedData, - schema: table.schema as TableSchema, - tableId, - excludeRowId: rowId, - }) - if (!validation.valid) return validation.response - - const now = new Date() - - const [updatedRow] = await db - .update(userTableRows) - .set({ + const updatedRow = await updateRow( + { + tableId, + rowId, data: mergedData, - updatedAt: now, - }) - .where( - and( - eq(userTableRows.id, rowId), - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, validated.workspaceId) - ) - ) - .returning() - - if (!updatedRow) { - return NextResponse.json({ error: 'Row not found' }, { status: 404 }) - } - - logger.info(`[${requestId}] Updated row ${rowId} in table ${tableId}`) + workspaceId: validated.workspaceId, + }, + table, + requestId + ) return NextResponse.json({ success: true, @@ -192,8 +172,15 @@ export async function PATCH(request: NextRequest, { params }: RowRouteParams) { row: { id: updatedRow.id, data: updatedRow.data, - createdAt: updatedRow.createdAt.toISOString(), - updatedAt: updatedRow.updatedAt.toISOString(), + position: updatedRow.position, + createdAt: + updatedRow.createdAt instanceof Date + ? updatedRow.createdAt.toISOString() + : updatedRow.createdAt, + updatedAt: + updatedRow.updatedAt instanceof Date + ? updatedRow.updatedAt.toISOString() + : updatedRow.updatedAt, }, message: 'Row updated successfully', }, @@ -206,6 +193,22 @@ export async function PATCH(request: NextRequest, { params }: RowRouteParams) { ) } + const errorMessage = error instanceof Error ? error.message : String(error) + + if (errorMessage === 'Row not found') { + return NextResponse.json({ error: errorMessage }, { status: 404 }) + } + + if ( + errorMessage.includes('Row size exceeds') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('Cannot set unique column') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + logger.error(`[${requestId}] Error updating row:`, error) return NextResponse.json({ error: 'Failed to update row' }, { status: 500 }) } @@ -222,7 +225,13 @@ export async function DELETE(request: NextRequest, { params }: RowRouteParams) { return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + const validated = DeleteRowSchema.parse(body) const result = await checkAccess(tableId, authResult.userId, 'write') @@ -230,30 +239,11 @@ export async function DELETE(request: NextRequest, { params }: RowRouteParams) { const { table } = result - const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) - if (!isValidWorkspace) { - logger.warn( - `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` - ) + if (table.workspaceId !== validated.workspaceId) { return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - const [deletedRow] = await db - .delete(userTableRows) - .where( - and( - eq(userTableRows.id, rowId), - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, validated.workspaceId) - ) - ) - .returning() - - if (!deletedRow) { - return NextResponse.json({ error: 'Row not found' }, { status: 404 }) - } - - logger.info(`[${requestId}] Deleted row ${rowId} from table ${tableId}`) + await deleteRow(tableId, rowId, validated.workspaceId, requestId) return NextResponse.json({ success: true, @@ -270,6 +260,12 @@ export async function DELETE(request: NextRequest, { params }: RowRouteParams) { ) } + const errorMessage = error instanceof Error ? error.message : String(error) + + if (errorMessage === 'Row not found') { + return NextResponse.json({ error: errorMessage }, { status: 404 }) + } + logger.error(`[${requestId}] Error deleting row:`, error) return NextResponse.json({ error: 'Failed to delete row' }, { status: 500 }) } diff --git a/apps/sim/app/api/table/[tableId]/rows/route.ts b/apps/sim/app/api/table/[tableId]/rows/route.ts index 47bd0fe1a5f..305a55a8855 100644 --- a/apps/sim/app/api/table/[tableId]/rows/route.ts +++ b/apps/sim/app/api/table/[tableId]/rows/route.ts @@ -8,32 +8,44 @@ import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' import type { Filter, RowData, Sort, TableSchema } from '@/lib/table' import { - checkUniqueConstraintsDb, - getUniqueColumns, + batchInsertRows, + batchUpdateRows, + deleteRowsByFilter, + deleteRowsByIds, + insertRow, TABLE_LIMITS, USER_TABLE_ROWS_SQL_NAME, + updateRowsByFilter, validateBatchRows, - validateRowAgainstSchema, validateRowData, validateRowSize, } from '@/lib/table' import { buildFilterClause, buildSortClause } from '@/lib/table/sql' -import { accessError, checkAccess } from '../../utils' +import { accessError, checkAccess } from '@/app/api/table/utils' const logger = createLogger('TableRowsAPI') const InsertRowSchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), data: z.record(z.unknown(), { required_error: 'Row data is required' }), + position: z.number().int().min(0).optional(), }) -const BatchInsertRowsSchema = z.object({ - workspaceId: z.string().min(1, 'Workspace ID is required'), - rows: z - .array(z.record(z.unknown()), { required_error: 'Rows array is required' }) - .min(1, 'At least one row is required') - .max(1000, 'Cannot insert more than 1000 rows per batch'), -}) +const BatchInsertRowsSchema = z + .object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + rows: z + .array(z.record(z.unknown()), { required_error: 'Rows array is required' }) + .min(1, 'At least one row is required') + .max(1000, 'Cannot insert more than 1000 rows per batch'), + positions: z.array(z.number().int().min(0)).max(1000).optional(), + }) + .refine((d) => !d.positions || d.positions.length === d.rows.length, { + message: 'positions array length must match rows array length', + }) + .refine((d) => !d.positions || new Set(d.positions).size === d.positions.length, { + message: 'positions must not contain duplicates', + }) const QueryRowsSchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), @@ -54,27 +66,32 @@ const QueryRowsSchema = z.object({ .default(0), }) +const nonEmptyFilter = z + .record(z.unknown(), { required_error: 'Filter criteria is required' }) + .refine((f) => Object.keys(f).length > 0, { message: 'Filter must not be empty' }) + +const optionalPositiveLimit = (max: number, label: string) => + z.preprocess( + (val) => (val === null || val === undefined || val === '' ? undefined : Number(val)), + z + .number() + .int(`${label} must be an integer`) + .min(1, `${label} must be at least 1`) + .max(max, `Cannot ${label.toLowerCase()} more than ${max} rows per operation`) + .optional() + ) + const UpdateRowsByFilterSchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), - filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }), + filter: nonEmptyFilter, data: z.record(z.unknown(), { required_error: 'Update data is required' }), - limit: z.coerce - .number({ required_error: 'Limit must be a number' }) - .int('Limit must be an integer') - .min(1, 'Limit must be at least 1') - .max(1000, 'Cannot update more than 1000 rows per operation') - .optional(), + limit: optionalPositiveLimit(1000, 'Limit'), }) const DeleteRowsByFilterSchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), - filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }), - limit: z.coerce - .number({ required_error: 'Limit must be a number' }) - .int('Limit must be an integer') - .min(1, 'Limit must be at least 1') - .max(1000, 'Cannot delete more than 1000 rows per operation') - .optional(), + filter: nonEmptyFilter, + limit: optionalPositiveLimit(1000, 'Limit'), }) const DeleteRowsByIdsSchema = z.object({ @@ -87,6 +104,22 @@ const DeleteRowsByIdsSchema = z.object({ const DeleteRowsRequestSchema = z.union([DeleteRowsByFilterSchema, DeleteRowsByIdsSchema]) +const BatchUpdateByIdsSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + updates: z + .array( + z.object({ + rowId: z.string().min(1), + data: z.record(z.unknown()), + }) + ) + .min(1, 'At least one update is required') + .max(1000, 'Cannot update more than 1000 rows per batch') + .refine((d) => new Set(d.map((u) => u.rowId)).size === d.length, { + message: 'updates must not contain duplicate rowId values', + }), +}) + interface TableRowsRouteParams { params: Promise<{ tableId: string }> } @@ -111,18 +144,8 @@ async function handleBatchInsert( return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - const workspaceId = validated.workspaceId - - const remainingCapacity = table.maxRows - table.rowCount - if (remainingCapacity < validated.rows.length) { - return NextResponse.json( - { - error: `Insufficient capacity. Can only insert ${remainingCapacity} more rows (table has ${table.rowCount}/${table.maxRows} rows)`, - }, - { status: 400 } - ) - } - + // Validate rows before calling service (service also validates, but route-level + // validation returns structured HTTP responses) const validation = await validateBatchRows({ rows: validated.rows as RowData[], schema: table.schema as TableSchema, @@ -130,34 +153,50 @@ async function handleBatchInsert( }) if (!validation.valid) return validation.response - const now = new Date() - const rowsToInsert = validated.rows.map((data) => ({ - id: `row_${crypto.randomUUID().replace(/-/g, '')}`, - tableId, - workspaceId, - data, - createdAt: now, - updatedAt: now, - createdBy: userId, - })) - - const insertedRows = await db.insert(userTableRows).values(rowsToInsert).returning() - - logger.info(`[${requestId}] Batch inserted ${insertedRows.length} rows into table ${tableId}`) - - return NextResponse.json({ - success: true, - data: { - rows: insertedRows.map((r) => ({ - id: r.id, - data: r.data, - createdAt: r.createdAt.toISOString(), - updatedAt: r.updatedAt.toISOString(), - })), - insertedCount: insertedRows.length, - message: `Successfully inserted ${insertedRows.length} rows`, - }, - }) + try { + const insertedRows = await batchInsertRows( + { + tableId, + rows: validated.rows as RowData[], + workspaceId: validated.workspaceId, + userId, + positions: validated.positions, + }, + table, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + rows: insertedRows.map((r) => ({ + id: r.id, + data: r.data, + position: r.position, + createdAt: r.createdAt instanceof Date ? r.createdAt.toISOString() : r.createdAt, + updatedAt: r.updatedAt instanceof Date ? r.updatedAt.toISOString() : r.updatedAt, + })), + insertedCount: insertedRows.length, + message: `Successfully inserted ${insertedRows.length} rows`, + }, + }) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + + if ( + errorMessage.includes('row limit') || + errorMessage.includes('Insufficient capacity') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Row size exceeds') || + errorMessage.match(/^Row \d+:/) + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error batch inserting rows:`, error) + return NextResponse.json({ error: 'Failed to insert rows' }, { status: 500 }) + } } /** POST /api/table/[tableId]/rows - Inserts row(s). Supports single or batch insert. */ @@ -171,7 +210,12 @@ export async function POST(request: NextRequest, { params }: TableRowsRouteParam return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } if ( typeof body === 'object' && @@ -201,9 +245,9 @@ export async function POST(request: NextRequest, { params }: TableRowsRouteParam return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - const workspaceId = validated.workspaceId const rowData = validated.data as RowData + // Validate at route level for structured HTTP error responses const validation = await validateRowData({ rowData, schema: table.schema as TableSchema, @@ -211,30 +255,18 @@ export async function POST(request: NextRequest, { params }: TableRowsRouteParam }) if (!validation.valid) return validation.response - if (table.rowCount >= table.maxRows) { - return NextResponse.json( - { error: `Table row limit reached (${table.maxRows} rows max)` }, - { status: 400 } - ) - } - - const rowId = `row_${crypto.randomUUID().replace(/-/g, '')}` - const now = new Date() - - const [row] = await db - .insert(userTableRows) - .values({ - id: rowId, + // Service handles atomic capacity check + insert in a transaction + const row = await insertRow( + { tableId, - workspaceId, - data: validated.data, - createdAt: now, - updatedAt: now, - createdBy: authResult.userId, - }) - .returning() - - logger.info(`[${requestId}] Inserted row ${rowId} into table ${tableId}`) + data: rowData, + workspaceId: validated.workspaceId, + userId: authResult.userId, + position: validated.position, + }, + table, + requestId + ) return NextResponse.json({ success: true, @@ -242,8 +274,9 @@ export async function POST(request: NextRequest, { params }: TableRowsRouteParam row: { id: row.id, data: row.data, - createdAt: row.createdAt.toISOString(), - updatedAt: row.updatedAt.toISOString(), + position: row.position, + createdAt: row.createdAt instanceof Date ? row.createdAt.toISOString() : row.createdAt, + updatedAt: row.updatedAt instanceof Date ? row.updatedAt.toISOString() : row.updatedAt, }, message: 'Row inserted successfully', }, @@ -256,6 +289,18 @@ export async function POST(request: NextRequest, { params }: TableRowsRouteParam ) } + const errorMessage = error instanceof Error ? error.message : String(error) + + if ( + errorMessage.includes('row limit') || + errorMessage.includes('Insufficient capacity') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Row size exceeds') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + logger.error(`[${requestId}] Error inserting row:`, error) return NextResponse.json({ error: 'Failed to insert row' }, { status: 500 }) } @@ -329,6 +374,7 @@ export async function GET(request: NextRequest, { params }: TableRowsRouteParams .select({ id: userTableRows.id, data: userTableRows.data, + position: userTableRows.position, createdAt: userTableRows.createdAt, updatedAt: userTableRows.updatedAt, }) @@ -340,9 +386,11 @@ export async function GET(request: NextRequest, { params }: TableRowsRouteParams const sortClause = buildSortClause(validated.sort, USER_TABLE_ROWS_SQL_NAME, schema.columns) if (sortClause) { query = query.orderBy(sortClause) as typeof query + } else { + query = query.orderBy(userTableRows.position) as typeof query } } else { - query = query.orderBy(userTableRows.createdAt) as typeof query + query = query.orderBy(userTableRows.position) as typeof query } const countQuery = db @@ -364,8 +412,9 @@ export async function GET(request: NextRequest, { params }: TableRowsRouteParams rows: rows.map((r) => ({ id: r.id, data: r.data, - createdAt: r.createdAt.toISOString(), - updatedAt: r.updatedAt.toISOString(), + position: r.position, + createdAt: r.createdAt instanceof Date ? r.createdAt.toISOString() : String(r.createdAt), + updatedAt: r.updatedAt instanceof Date ? r.updatedAt.toISOString() : String(r.updatedAt), })), rowCount: rows.length, totalCount: Number(totalCount), @@ -397,7 +446,13 @@ export async function PUT(request: NextRequest, { params }: TableRowsRouteParams return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + const validated = UpdateRowsByFilterSchema.parse(body) const accessResult = await checkAccess(tableId, authResult.userId, 'write') @@ -412,9 +467,7 @@ export async function PUT(request: NextRequest, { params }: TableRowsRouteParams return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - const updateData = validated.data as RowData - - const sizeValidation = validateRowSize(updateData) + const sizeValidation = validateRowSize(validated.data as RowData) if (!sizeValidation.valid) { return NextResponse.json( { error: 'Invalid row data', details: sizeValidation.errors }, @@ -422,31 +475,19 @@ export async function PUT(request: NextRequest, { params }: TableRowsRouteParams ) } - const baseConditions = [ - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, validated.workspaceId), - ] - - const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME) - if (filterClause) { - baseConditions.push(filterClause) - } - - let matchingRowsQuery = db - .select({ - id: userTableRows.id, - data: userTableRows.data, - }) - .from(userTableRows) - .where(and(...baseConditions)) - - if (validated.limit) { - matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery - } - - const matchingRows = await matchingRowsQuery + const result = await updateRowsByFilter( + { + tableId, + filter: validated.filter as Filter, + data: validated.data as RowData, + limit: validated.limit, + workspaceId: validated.workspaceId, + }, + table, + requestId + ) - if (matchingRows.length === 0) { + if (result.affectedCount === 0) { return NextResponse.json( { success: true, @@ -459,103 +500,12 @@ export async function PUT(request: NextRequest, { params }: TableRowsRouteParams ) } - if (matchingRows.length > TABLE_LIMITS.MAX_BULK_OPERATION_SIZE) { - logger.warn(`[${requestId}] Updating ${matchingRows.length} rows. This may take some time.`) - } - - for (const row of matchingRows) { - const existingData = row.data as RowData - const mergedData = { ...existingData, ...updateData } - const rowValidation = validateRowAgainstSchema(mergedData, table.schema as TableSchema) - if (!rowValidation.valid) { - return NextResponse.json( - { - error: 'Updated data does not match schema', - details: rowValidation.errors, - affectedRowId: row.id, - }, - { status: 400 } - ) - } - } - - const uniqueColumns = getUniqueColumns(table.schema as TableSchema) - if (uniqueColumns.length > 0) { - // If updating multiple rows, check that updateData doesn't set any unique column - // (would cause all rows to have the same value, violating uniqueness) - if (matchingRows.length > 1) { - const uniqueColumnsInUpdate = uniqueColumns.filter((col) => col.name in updateData) - if (uniqueColumnsInUpdate.length > 0) { - return NextResponse.json( - { - error: 'Cannot set unique column values when updating multiple rows', - details: [ - `Columns with unique constraint: ${uniqueColumnsInUpdate.map((c) => c.name).join(', ')}. ` + - `Updating ${matchingRows.length} rows with the same value would violate uniqueness.`, - ], - }, - { status: 400 } - ) - } - } - - // Check unique constraints against database for each row - for (const row of matchingRows) { - const existingData = row.data as RowData - const mergedData = { ...existingData, ...updateData } - const uniqueValidation = await checkUniqueConstraintsDb( - tableId, - mergedData, - table.schema as TableSchema, - row.id - ) - - if (!uniqueValidation.valid) { - return NextResponse.json( - { - error: 'Unique constraint violation', - details: uniqueValidation.errors, - affectedRowId: row.id, - }, - { status: 400 } - ) - } - } - } - - const now = new Date() - - await db.transaction(async (trx) => { - let totalUpdated = 0 - - for (let i = 0; i < matchingRows.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) { - const batch = matchingRows.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE) - const updatePromises = batch.map((row) => { - const existingData = row.data as RowData - return trx - .update(userTableRows) - .set({ - data: { ...existingData, ...updateData }, - updatedAt: now, - }) - .where(eq(userTableRows.id, row.id)) - }) - await Promise.all(updatePromises) - totalUpdated += batch.length - logger.info( - `[${requestId}] Updated batch ${Math.floor(i / TABLE_LIMITS.UPDATE_BATCH_SIZE) + 1} (${totalUpdated}/${matchingRows.length} rows)` - ) - } - }) - - logger.info(`[${requestId}] Updated ${matchingRows.length} rows in table ${tableId}`) - return NextResponse.json({ success: true, data: { message: 'Rows updated successfully', - updatedCount: matchingRows.length, - updatedRowIds: matchingRows.map((r) => r.id), + updatedCount: result.affectedCount, + updatedRowIds: result.affectedRowIds, }, }) } catch (error) { @@ -566,16 +516,25 @@ export async function PUT(request: NextRequest, { params }: TableRowsRouteParams ) } - logger.error(`[${requestId}] Error updating rows by filter:`, error) - const errorMessage = error instanceof Error ? error.message : String(error) - const detailedError = `Failed to update rows: ${errorMessage}` - return NextResponse.json({ error: detailedError }, { status: 500 }) + if ( + errorMessage.includes('Row size exceeds') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('Cannot set unique column') || + errorMessage.includes('Filter is required') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error updating rows by filter:`, error) + return NextResponse.json({ error: 'Failed to update rows' }, { status: 500 }) } } -/** DELETE /api/table/[tableId]/rows - Deletes rows matching filter criteria. */ +/** DELETE /api/table/[tableId]/rows - Deletes rows matching filter criteria or by IDs. */ export async function DELETE(request: NextRequest, { params }: TableRowsRouteParams) { const requestId = generateRequestId() const { tableId } = await params @@ -586,7 +545,13 @@ export async function DELETE(request: NextRequest, { params }: TableRowsRoutePar return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + const validated = DeleteRowsRequestSchema.parse(body) const accessResult = await checkAccess(tableId, authResult.userId, 'write') @@ -601,110 +566,115 @@ export async function DELETE(request: NextRequest, { params }: TableRowsRoutePar return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - const baseConditions = [ - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, validated.workspaceId), - ] + if ('rowIds' in validated) { + const result = await deleteRowsByIds( + { tableId, rowIds: validated.rowIds, workspaceId: validated.workspaceId }, + requestId + ) - let rowIds: string[] = [] - let missingRowIds: string[] | undefined - let requestedCount: number | undefined + return NextResponse.json({ + success: true, + data: { + message: + result.deletedCount === 0 + ? 'No matching rows found for the provided IDs' + : 'Rows deleted successfully', + deletedCount: result.deletedCount, + deletedRowIds: result.deletedRowIds, + requestedCount: result.requestedCount, + ...(result.missingRowIds.length > 0 ? { missingRowIds: result.missingRowIds } : {}), + }, + }) + } - if ('rowIds' in validated) { - const uniqueRequestedRowIds = Array.from(new Set(validated.rowIds)) - requestedCount = uniqueRequestedRowIds.length - - const matchingRows = await db - .select({ id: userTableRows.id }) - .from(userTableRows) - .where( - and( - ...baseConditions, - sql`${userTableRows.id} = ANY(ARRAY[${sql.join( - uniqueRequestedRowIds.map((id) => sql`${id}`), - sql`, ` - )}])` - ) - ) - - const matchedRowIds = matchingRows.map((r) => r.id) - const matchedIdSet = new Set(matchedRowIds) - missingRowIds = uniqueRequestedRowIds.filter((id) => !matchedIdSet.has(id)) - rowIds = matchedRowIds - } else { - const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME) - if (filterClause) { - baseConditions.push(filterClause) - } + const result = await deleteRowsByFilter( + { + tableId, + filter: validated.filter as Filter, + limit: validated.limit, + workspaceId: validated.workspaceId, + }, + requestId + ) - let matchingRowsQuery = db - .select({ id: userTableRows.id }) - .from(userTableRows) - .where(and(...baseConditions)) + return NextResponse.json({ + success: true, + data: { + message: + result.affectedCount === 0 + ? 'No rows matched the filter criteria' + : 'Rows deleted successfully', + deletedCount: result.affectedCount, + deletedRowIds: result.affectedRowIds, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } - if (validated.limit) { - matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery - } + const errorMessage = error instanceof Error ? error.message : String(error) - const matchingRows = await matchingRowsQuery - rowIds = matchingRows.map((r) => r.id) + if (errorMessage.includes('Filter is required')) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) } - if (rowIds.length === 0) { - return NextResponse.json( - { - success: true, - data: { - message: - 'rowIds' in validated - ? 'No matching rows found for the provided IDs' - : 'No rows matched the filter criteria', - deletedCount: 0, - deletedRowIds: [], - ...(requestedCount !== undefined ? { requestedCount } : {}), - ...(missingRowIds ? { missingRowIds } : {}), - }, - }, - { status: 200 } - ) + logger.error(`[${requestId}] Error deleting rows:`, error) + return NextResponse.json({ error: 'Failed to delete rows' }, { status: 500 }) + } +} + +/** PATCH /api/table/[tableId]/rows - Batch updates rows by ID. */ +export async function PATCH(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - if (rowIds.length > TABLE_LIMITS.DELETE_BATCH_SIZE) { - logger.warn(`[${requestId}] Deleting ${rowIds.length} rows. This may take some time.`) + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) } - await db.transaction(async (trx) => { - let totalDeleted = 0 - - for (let i = 0; i < rowIds.length; i += TABLE_LIMITS.DELETE_BATCH_SIZE) { - const batch = rowIds.slice(i, i + TABLE_LIMITS.DELETE_BATCH_SIZE) - await trx.delete(userTableRows).where( - and( - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, validated.workspaceId), - sql`${userTableRows.id} = ANY(ARRAY[${sql.join( - batch.map((id) => sql`${id}`), - sql`, ` - )}])` - ) - ) - totalDeleted += batch.length - logger.info( - `[${requestId}] Deleted batch ${Math.floor(i / TABLE_LIMITS.DELETE_BATCH_SIZE) + 1} (${totalDeleted}/${rowIds.length} rows)` - ) - } - }) + const validated = BatchUpdateByIdsSchema.parse(body) + + const accessResult = await checkAccess(tableId, authResult.userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } - logger.info(`[${requestId}] Deleted ${rowIds.length} rows from table ${tableId}`) + const result = await batchUpdateRows( + { + tableId, + updates: validated.updates as Array<{ rowId: string; data: RowData }>, + workspaceId: validated.workspaceId, + }, + table, + requestId + ) return NextResponse.json({ success: true, data: { - message: 'Rows deleted successfully', - deletedCount: rowIds.length, - deletedRowIds: rowIds, - ...(requestedCount !== undefined ? { requestedCount } : {}), - ...(missingRowIds ? { missingRowIds } : {}), + message: 'Rows updated successfully', + updatedCount: result.affectedCount, + updatedRowIds: result.affectedRowIds, }, }) } catch (error) { @@ -715,11 +685,24 @@ export async function DELETE(request: NextRequest, { params }: TableRowsRoutePar ) } - logger.error(`[${requestId}] Error deleting rows by filter:`, error) - const errorMessage = error instanceof Error ? error.message : String(error) - const detailedError = `Failed to delete rows: ${errorMessage}` - return NextResponse.json({ error: detailedError }, { status: 500 }) + if ( + errorMessage.includes('Row size exceeds') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be valid') || + errorMessage.includes('must be string') || + errorMessage.includes('must be number') || + errorMessage.includes('must be boolean') || + errorMessage.includes('must be unique') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('Cannot set unique column') || + errorMessage.includes('Rows not found') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error batch updating rows:`, error) + return NextResponse.json({ error: 'Failed to update rows' }, { status: 500 }) } } diff --git a/apps/sim/app/api/table/[tableId]/rows/upsert/route.ts b/apps/sim/app/api/table/[tableId]/rows/upsert/route.ts index a7b9e814683..f78c90b2e0c 100644 --- a/apps/sim/app/api/table/[tableId]/rows/upsert/route.ts +++ b/apps/sim/app/api/table/[tableId]/rows/upsert/route.ts @@ -1,20 +1,18 @@ -import { db } from '@sim/db' -import { userTableRows } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq, or, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' -import type { RowData, TableSchema } from '@/lib/table' -import { getUniqueColumns, validateRowData } from '@/lib/table' -import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils' +import type { RowData } from '@/lib/table' +import { upsertRow } from '@/lib/table' +import { accessError, checkAccess } from '@/app/api/table/utils' const logger = createLogger('TableUpsertAPI') const UpsertRowSchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), data: z.record(z.unknown(), { required_error: 'Row data is required' }), + conflictTarget: z.string().optional(), }) interface UpsertRouteParams { @@ -32,7 +30,13 @@ export async function POST(request: NextRequest, { params }: UpsertRouteParams) return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + const validated = UpsertRowSchema.parse(body) const result = await checkAccess(tableId, authResult.userId, 'write') @@ -40,115 +44,20 @@ export async function POST(request: NextRequest, { params }: UpsertRouteParams) const { table } = result - const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) - if (!isValidWorkspace) { - logger.warn( - `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` - ) + if (table.workspaceId !== validated.workspaceId) { return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - const schema = table.schema as TableSchema - const rowData = validated.data as RowData - - const validation = await validateRowData({ - rowData, - schema, - tableId, - checkUnique: false, - }) - if (!validation.valid) return validation.response - - const uniqueColumns = getUniqueColumns(schema) - - if (uniqueColumns.length === 0) { - return NextResponse.json( - { - error: - 'Upsert requires at least one unique column in the schema. Please add a unique constraint to a column or use insert instead.', - }, - { status: 400 } - ) - } - - const uniqueFilters = uniqueColumns.map((col) => { - const value = rowData[col.name] - if (value === undefined || value === null) { - return null - } - return sql`${userTableRows.data}->>${col.name} = ${String(value)}` - }) - - const validUniqueFilters = uniqueFilters.filter((f): f is Exclude => f !== null) - - if (validUniqueFilters.length === 0) { - return NextResponse.json( - { - error: `Upsert requires values for at least one unique field: ${uniqueColumns.map((c) => c.name).join(', ')}`, - }, - { status: 400 } - ) - } - - const [existingRow] = await db - .select() - .from(userTableRows) - .where( - and( - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, validated.workspaceId), - or(...validUniqueFilters) - ) - ) - .limit(1) - - const now = new Date() - - if (!existingRow && table.rowCount >= table.maxRows) { - return NextResponse.json( - { error: `Table row limit reached (${table.maxRows} rows max)` }, - { status: 400 } - ) - } - - const upsertResult = await db.transaction(async (trx) => { - if (existingRow) { - const [updatedRow] = await trx - .update(userTableRows) - .set({ - data: validated.data, - updatedAt: now, - }) - .where(eq(userTableRows.id, existingRow.id)) - .returning() - - return { - row: updatedRow, - operation: 'update' as const, - } - } - - const [insertedRow] = await trx - .insert(userTableRows) - .values({ - id: `row_${crypto.randomUUID().replace(/-/g, '')}`, - tableId, - workspaceId: validated.workspaceId, - data: validated.data, - createdAt: now, - updatedAt: now, - createdBy: authResult.userId, - }) - .returning() - - return { - row: insertedRow, - operation: 'insert' as const, - } - }) - - logger.info( - `[${requestId}] Upserted (${upsertResult.operation}) row ${upsertResult.row.id} in table ${tableId}` + const upsertResult = await upsertRow( + { + tableId, + workspaceId: validated.workspaceId, + data: validated.data as RowData, + userId: authResult.userId, + conflictTarget: validated.conflictTarget, + }, + table, + requestId ) return NextResponse.json({ @@ -157,8 +66,14 @@ export async function POST(request: NextRequest, { params }: UpsertRouteParams) row: { id: upsertResult.row.id, data: upsertResult.row.data, - createdAt: upsertResult.row.createdAt.toISOString(), - updatedAt: upsertResult.row.updatedAt.toISOString(), + createdAt: + upsertResult.row.createdAt instanceof Date + ? upsertResult.row.createdAt.toISOString() + : upsertResult.row.createdAt, + updatedAt: + upsertResult.row.updatedAt instanceof Date + ? upsertResult.row.updatedAt.toISOString() + : upsertResult.row.updatedAt, }, operation: upsertResult.operation, message: `Row ${upsertResult.operation === 'update' ? 'updated' : 'inserted'} successfully`, @@ -172,11 +87,22 @@ export async function POST(request: NextRequest, { params }: UpsertRouteParams) ) } - logger.error(`[${requestId}] Error upserting row:`, error) - const errorMessage = error instanceof Error ? error.message : String(error) - const detailedError = `Failed to upsert row: ${errorMessage}` - return NextResponse.json({ error: detailedError }, { status: 500 }) + // Service layer throws descriptive errors for validation/capacity issues + if ( + errorMessage.includes('unique column') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('conflictTarget') || + errorMessage.includes('row limit') || + errorMessage.includes('Schema validation') || + errorMessage.includes('Upsert requires') || + errorMessage.includes('Row size exceeds') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error upserting row:`, error) + return NextResponse.json({ error: 'Failed to upsert row' }, { status: 500 }) } } diff --git a/apps/sim/app/api/table/route.ts b/apps/sim/app/api/table/route.ts index cd9aa7ff3e4..18387ea80d8 100644 --- a/apps/sim/app/api/table/route.ts +++ b/apps/sim/app/api/table/route.ts @@ -4,15 +4,15 @@ import { z } from 'zod' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' import { - canCreateTable, createTable, getWorkspaceTableLimits, listTables, TABLE_LIMITS, type TableSchema, + type TableScope, } from '@/lib/table' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' -import { normalizeColumn } from './utils' +import { normalizeColumn } from '@/app/api/table/utils' const logger = createLogger('TableAPI') @@ -66,10 +66,12 @@ const CreateTableSchema = z.object({ ), }), workspaceId: z.string().min(1, 'Workspace ID is required'), + initialRowCount: z.number().int().min(0).max(100).optional(), }) const ListTablesSchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), + scope: z.enum(['active', 'archived', 'all']).optional().default('active'), }) interface WorkspaceAccessResult { @@ -101,7 +103,13 @@ export async function POST(request: NextRequest) { return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + const params = CreateTableSchema.parse(body) const { hasAccess, canWrite } = await checkWorkspaceAccess( @@ -113,22 +121,7 @@ export async function POST(request: NextRequest) { return NextResponse.json({ error: 'Access denied' }, { status: 403 }) } - // Check billing plan limits - const existingTables = await listTables(params.workspaceId) - const { canCreate, maxTables } = await canCreateTable(params.workspaceId, existingTables.length) - - if (!canCreate) { - return NextResponse.json( - { - error: `Workspace has reached the maximum table limit (${maxTables}) for your plan. Please upgrade to create more tables.`, - }, - { status: 403 } - ) - } - - // Get plan-based row limits const planLimits = await getWorkspaceTableLimits(params.workspaceId) - const maxRowsPerTable = planLimits.maxRowsPerTable const normalizedSchema: TableSchema = { columns: params.schema.columns.map(normalizeColumn), @@ -141,7 +134,9 @@ export async function POST(request: NextRequest) { schema: normalizedSchema, workspaceId: params.workspaceId, userId: authResult.userId, - maxRows: maxRowsPerTable, + maxRows: planLimits.maxRowsPerTable, + maxTables: planLimits.maxTables, + initialRowCount: params.initialRowCount, }, requestId ) @@ -153,7 +148,9 @@ export async function POST(request: NextRequest) { id: table.id, name: table.name, description: table.description, - schema: table.schema, + schema: { + columns: (table.schema as TableSchema).columns.map(normalizeColumn), + }, rowCount: table.rowCount, maxRows: table.maxRows, createdAt: @@ -177,11 +174,13 @@ export async function POST(request: NextRequest) { } if (error instanceof Error) { + if (error.message.includes('maximum table limit')) { + return NextResponse.json({ error: error.message }, { status: 403 }) + } if ( error.message.includes('Invalid table name') || error.message.includes('Invalid schema') || - error.message.includes('already exists') || - error.message.includes('maximum table limit') + error.message.includes('already exists') ) { return NextResponse.json({ error: error.message }, { status: 400 }) } @@ -204,8 +203,9 @@ export async function GET(request: NextRequest) { const { searchParams } = new URL(request.url) const workspaceId = searchParams.get('workspaceId') + const scope = searchParams.get('scope') - const validation = ListTablesSchema.safeParse({ workspaceId }) + const validation = ListTablesSchema.safeParse({ workspaceId, scope }) if (!validation.success) { return NextResponse.json( { error: 'Validation error', details: validation.error.errors }, @@ -221,7 +221,7 @@ export async function GET(request: NextRequest) { return NextResponse.json({ error: 'Access denied' }, { status: 403 }) } - const tables = await listTables(params.workspaceId) + const tables = await listTables(params.workspaceId, { scope: params.scope as TableScope }) logger.info(`[${requestId}] Listed ${tables.length} tables in workspace ${params.workspaceId}`) @@ -231,10 +231,15 @@ export async function GET(request: NextRequest) { tables: tables.map((t) => { const schemaData = t.schema as TableSchema return { - ...t, + id: t.id, + name: t.name, + description: t.description, schema: { columns: schemaData.columns.map(normalizeColumn), }, + rowCount: t.rowCount, + maxRows: t.maxRows, + createdBy: t.createdBy, createdAt: t.createdAt instanceof Date ? t.createdAt.toISOString() : String(t.createdAt), updatedAt: diff --git a/apps/sim/app/api/table/utils.ts b/apps/sim/app/api/table/utils.ts index 5aee66315a3..091fc9f8985 100644 --- a/apps/sim/app/api/table/utils.ts +++ b/apps/sim/app/api/table/utils.ts @@ -1,7 +1,8 @@ import { createLogger } from '@sim/logger' import { NextResponse } from 'next/server' +import { z } from 'zod' import type { ColumnDefinition, TableDefinition } from '@/lib/table' -import { getTableById } from '@/lib/table' +import { COLUMN_TYPES, getTableById } from '@/lib/table' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const logger = createLogger('TableUtils') @@ -154,6 +155,37 @@ export function serverErrorResponse(message = 'Internal server error') { return errorResponse(message, 500) } +const columnTypeEnum = z.enum( + COLUMN_TYPES as unknown as [(typeof COLUMN_TYPES)[number], ...(typeof COLUMN_TYPES)[number][]] +) + +export const CreateColumnSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + column: z.object({ + name: z.string().min(1, 'Column name is required'), + type: columnTypeEnum, + required: z.boolean().optional(), + unique: z.boolean().optional(), + position: z.number().int().min(0).optional(), + }), +}) + +export const UpdateColumnSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + columnName: z.string().min(1, 'Column name is required'), + updates: z.object({ + name: z.string().min(1).optional(), + type: columnTypeEnum.optional(), + required: z.boolean().optional(), + unique: z.boolean().optional(), + }), +}) + +export const DeleteColumnSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + columnName: z.string().min(1, 'Column name is required'), +}) + export function normalizeColumn(col: ColumnDefinition): ColumnDefinition { return { name: col.name, diff --git a/apps/sim/app/api/templates/[id]/route.ts b/apps/sim/app/api/templates/[id]/route.ts index 2ea215566e2..260b64f582b 100644 --- a/apps/sim/app/api/templates/[id]/route.ts +++ b/apps/sim/app/api/templates/[id]/route.ts @@ -7,6 +7,7 @@ import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' +import { canAccessTemplate } from '@/lib/templates/permissions' import { extractRequiredCredentials, sanitizeCredentials, @@ -24,6 +25,12 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ try { const session = await getSession() + const access = await canAccessTemplate(id, session?.user?.id) + if (!access.allowed || !access.template) { + logger.warn(`[${requestId}] Template not found: ${id}`) + return NextResponse.json({ error: 'Template not found' }, { status: 404 }) + } + const result = await db .select({ template: templates, @@ -34,21 +41,12 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ .where(eq(templates.id, id)) .limit(1) - if (result.length === 0) { - logger.warn(`[${requestId}] Template not found: ${id}`) - return NextResponse.json({ error: 'Template not found' }, { status: 404 }) - } - const { template, creator } = result[0] const templateWithCreator = { ...template, creator: creator || undefined, } - if (!session?.user?.id && template.status !== 'approved') { - return NextResponse.json({ error: 'Template not found' }, { status: 404 }) - } - let isStarred = false if (session?.user?.id) { const { templateStars } = await import('@sim/db/schema') diff --git a/apps/sim/app/api/templates/[id]/use/route.ts b/apps/sim/app/api/templates/[id]/use/route.ts index b08d6dfb8fd..ecbbb850dd3 100644 --- a/apps/sim/app/api/templates/[id]/use/route.ts +++ b/apps/sim/app/api/templates/[id]/use/route.ts @@ -7,10 +7,13 @@ import { v4 as uuidv4 } from 'uuid' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' import { getInternalApiBaseUrl } from '@/lib/core/utils/urls' +import { canAccessTemplate, verifyTemplateOwnership } from '@/lib/templates/permissions' import { type RegenerateStateInput, regenerateWorkflowStateIds, } from '@/lib/workflows/persistence/utils' +import { deduplicateWorkflowName } from '@/lib/workflows/utils' +import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils' const logger = createLogger('TemplateUseAPI') @@ -44,11 +47,37 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ return NextResponse.json({ error: 'Workspace ID is required' }, { status: 400 }) } + const workspace = await getWorkspaceById(workspaceId) + if (!workspace) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + + const permission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) + if (permission !== 'admin' && permission !== 'write') { + return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + } + logger.debug( `[${requestId}] Using template: ${id}, user: ${session.user.id}, workspace: ${workspaceId}, connect: ${connectToTemplate}` ) // Get the template + const templateAccess = await canAccessTemplate(id, session.user.id) + if (!templateAccess.allowed) { + logger.warn(`[${requestId}] Template not found: ${id}`) + return NextResponse.json({ error: 'Template not found' }, { status: 404 }) + } + + if (connectToTemplate) { + const ownership = await verifyTemplateOwnership(id, session.user.id, 'admin') + if (!ownership.authorized) { + return NextResponse.json( + { error: ownership.error || 'Access denied' }, + { status: ownership.status || 403 } + ) + } + } + const template = await db .select({ id: templates.id, @@ -61,11 +90,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ .where(eq(templates.id, id)) .limit(1) - if (template.length === 0) { - logger.warn(`[${requestId}] Template not found: ${id}`) - return NextResponse.json({ error: 'Template not found' }, { status: 404 }) - } - const templateData = template[0] // Create a new workflow ID @@ -86,14 +110,16 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ return mapped })() - // Step 1: Create the workflow record (like imports do) + const rawName = + connectToTemplate && !templateData.workflowId + ? templateData.name + : `${templateData.name} (copy)` + const dedupedName = await deduplicateWorkflowName(rawName, workspaceId, null) + await db.insert(workflow).values({ id: newWorkflowId, workspaceId: workspaceId, - name: - connectToTemplate && !templateData.workflowId - ? templateData.name - : `${templateData.name} (copy)`, + name: dedupedName, description: (templateData.details as TemplateDetails | null)?.tagline || null, userId: session.user.id, variables: remappedVariables, // Remap variable IDs and workflowId for the new workflow diff --git a/apps/sim/app/api/templates/route.ts b/apps/sim/app/api/templates/route.ts index 55628bfc7cf..e12cc47ad9a 100644 --- a/apps/sim/app/api/templates/route.ts +++ b/apps/sim/app/api/templates/route.ts @@ -14,11 +14,12 @@ import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' -import { verifyEffectiveSuperUser } from '@/lib/templates/permissions' +import { canAccessTemplate, verifyEffectiveSuperUser } from '@/lib/templates/permissions' import { extractRequiredCredentials, sanitizeCredentials, } from '@/lib/workflows/credentials/credential-extractor' +import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' const logger = createLogger('TemplatesAPI') @@ -79,12 +80,45 @@ export async function GET(request: NextRequest) { // When fetching by workflowId, we want to get the template regardless of status // This is used by the deploy modal to check if a template exists if (params.workflowId) { + const authorization = await authorizeWorkflowByWorkspacePermission({ + workflowId: params.workflowId, + userId: session.user.id, + action: 'write', + }) + if (!authorization.allowed) { + return NextResponse.json( + { + data: [], + pagination: { + total: 0, + limit: params.limit, + offset: params.offset, + page: 1, + totalPages: 0, + }, + }, + { status: 200 } + ) + } conditions.push(eq(templates.workflowId, params.workflowId)) - // Don't apply status filter when fetching by workflowId - we want to show - // the template to its owner even if it's pending } else { // Apply status filter - only approved templates for non-super users if (params.status) { + if (!isSuperUser && params.status !== 'approved') { + return NextResponse.json( + { + data: [], + pagination: { + total: 0, + limit: params.limit, + offset: params.offset, + page: 1, + totalPages: 0, + }, + }, + { status: 200 } + ) + } conditions.push(eq(templates.status, params.status)) } else if (!isSuperUser || !params.includeAllStatuses) { // Non-super users and super users without includeAllStatuses flag see only approved templates @@ -145,16 +179,33 @@ export async function GET(request: NextRequest) { const total = totalCount[0]?.count || 0 - logger.info(`[${requestId}] Successfully retrieved ${results.length} templates`) + const visibleResults = + params.workflowId && !isSuperUser + ? ( + await Promise.all( + results.map(async (template) => { + if (template.status === 'approved') { + return template + } + const access = await canAccessTemplate(template.id, session.user.id) + return access.allowed ? template : null + }) + ) + ).filter((template): template is (typeof results)[number] => template !== null) + : results + + logger.info(`[${requestId}] Successfully retrieved ${visibleResults.length} templates`) return NextResponse.json({ - data: results, + data: visibleResults, pagination: { - total, + total: params.workflowId && !isSuperUser ? visibleResults.length : total, limit: params.limit, offset: params.offset, page: Math.floor(params.offset / params.limit) + 1, - totalPages: Math.ceil(total / params.limit), + totalPages: Math.ceil( + (params.workflowId && !isSuperUser ? visibleResults.length : total) / params.limit + ), }, }) } catch (error: any) { @@ -185,18 +236,25 @@ export async function POST(request: NextRequest) { const body = await request.json() const data = CreateTemplateSchema.parse(body) - // Verify the workflow exists and belongs to the user - const workflowExists = await db - .select({ id: workflow.id }) - .from(workflow) - .where(eq(workflow.id, data.workflowId)) - .limit(1) + const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({ + workflowId: data.workflowId, + userId: session.user.id, + action: 'write', + }) - if (workflowExists.length === 0) { + if (!workflowAuthorization.workflow) { logger.warn(`[${requestId}] Workflow not found: ${data.workflowId}`) return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) } + if (!workflowAuthorization.allowed) { + logger.warn(`[${requestId}] User denied permission to template workflow ${data.workflowId}`) + return NextResponse.json( + { error: workflowAuthorization.message || 'Access denied' }, + { status: workflowAuthorization.status || 403 } + ) + } + const { verifyCreatorPermission } = await import('@/lib/templates/permissions') const { hasPermission, error: permissionError } = await verifyCreatorPermission( session.user.id, diff --git a/apps/sim/app/api/users/me/api-keys/[id]/route.ts b/apps/sim/app/api/users/me/api-keys/[id]/route.ts index e9344b86dd2..d5a88315442 100644 --- a/apps/sim/app/api/users/me/api-keys/[id]/route.ts +++ b/apps/sim/app/api/users/me/api-keys/[id]/route.ts @@ -33,7 +33,7 @@ export async function DELETE( // Delete the API key, ensuring it belongs to the current user const result = await db .delete(apiKey) - .where(and(eq(apiKey.id, keyId), eq(apiKey.userId, userId))) + .where(and(eq(apiKey.id, keyId), eq(apiKey.userId, userId), eq(apiKey.type, 'personal'))) .returning({ id: apiKey.id, name: apiKey.name }) if (!result.length) { diff --git a/apps/sim/app/api/users/me/settings/route.ts b/apps/sim/app/api/users/me/settings/route.ts index c8de2b05685..78db186d483 100644 --- a/apps/sim/app/api/users/me/settings/route.ts +++ b/apps/sim/app/api/users/me/settings/route.ts @@ -31,7 +31,7 @@ const SettingsSchema = z.object({ }) const defaultSettings = { - theme: 'dark', + theme: 'system', autoConnect: true, telemetryEnabled: true, emailPreferences: {}, diff --git a/apps/sim/app/api/users/me/usage-logs/route.ts b/apps/sim/app/api/users/me/usage-logs/route.ts index 038cf2ece37..e95b6fc03ae 100644 --- a/apps/sim/app/api/users/me/usage-logs/route.ts +++ b/apps/sim/app/api/users/me/usage-logs/route.ts @@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { getUserUsageLogs, type UsageLogSource } from '@/lib/billing/core/usage-log' +import { dollarsToCredits } from '@/lib/billing/credits/conversion' const logger = createLogger('UsageLogsAPI') @@ -78,6 +79,16 @@ export async function GET(req: NextRequest) { cursor, }) + const logsWithCredits = result.logs.map((log) => ({ + ...log, + creditCost: dollarsToCredits(log.cost), + })) + + const bySourceCredits: Record = {} + for (const [src, cost] of Object.entries(result.summary.bySource)) { + bySourceCredits[src] = dollarsToCredits(cost) + } + logger.debug('Retrieved usage logs', { userId, source, @@ -88,7 +99,13 @@ export async function GET(req: NextRequest) { return NextResponse.json({ success: true, - ...result, + logs: logsWithCredits, + summary: { + ...result.summary, + totalCostCredits: dollarsToCredits(result.summary.totalCost), + bySourceCredits, + }, + pagination: result.pagination, }) } catch (error) { logger.error('Failed to get usage logs', { diff --git a/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts b/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts index 3eab0374dbf..9a6eeba491b 100644 --- a/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts @@ -1,4 +1,4 @@ -import { db, workflow, workflowDeploymentVersion } from '@sim/db' +import { db, workflowDeploymentVersion } from '@sim/db' import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' import { generateRequestId } from '@/lib/core/utils/request' @@ -8,7 +8,9 @@ import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy, } from '@/lib/webhooks/deploy' +import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' import { + activateWorkflowVersionById, deployWorkflow, loadWorkflowFromNormalizedTables, undeployWorkflow, @@ -40,11 +42,7 @@ export const POST = withAdminAuthParams(async (request, context) => const requestId = generateRequestId() try { - const [workflowRecord] = await db - .select() - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const workflowRecord = await getActiveWorkflowRecord(workflowId) if (!workflowRecord) { return notFoundResponse('Workflow') @@ -72,6 +70,27 @@ export const POST = withAdminAuthParams(async (request, context) => .limit(1) const previousVersionId = currentActiveVersion?.id + const rollbackDeployment = async () => { + if (previousVersionId) { + await restorePreviousVersionWebhooks({ + request, + workflow: workflowData, + userId: workflowRecord.userId, + previousVersionId, + requestId, + }) + const reactivateResult = await activateWorkflowVersionById({ + workflowId, + deploymentVersionId: previousVersionId, + }) + if (reactivateResult.success) { + return + } + } + + await undeployWorkflow({ workflowId }) + } + const deployResult = await deployWorkflow({ workflowId, deployedBy: ADMIN_ACTOR_ID, @@ -107,7 +126,7 @@ export const POST = withAdminAuthParams(async (request, context) => requestId, deploymentVersionId: deployResult.deploymentVersionId, }) - await undeployWorkflow({ workflowId }) + await rollbackDeployment() return internalErrorResponse( triggerSaveResult.error?.message || 'Failed to sync trigger configuration' ) @@ -129,16 +148,7 @@ export const POST = withAdminAuthParams(async (request, context) => requestId, deploymentVersionId: deployResult.deploymentVersionId, }) - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow: workflowData, - userId: workflowRecord.userId, - previousVersionId, - requestId, - }) - } - await undeployWorkflow({ workflowId }) + await rollbackDeployment() return internalErrorResponse(scheduleResult.error || 'Failed to create schedule') } @@ -186,27 +196,23 @@ export const DELETE = withAdminAuthParams(async (request, context) const requestId = generateRequestId() try { - const [workflowRecord] = await db - .select() - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const workflowRecord = await getActiveWorkflowRecord(workflowId) if (!workflowRecord) { return notFoundResponse('Workflow') } + const result = await undeployWorkflow({ workflowId }) + if (!result.success) { + return internalErrorResponse(result.error || 'Failed to undeploy workflow') + } + await cleanupWebhooksForWorkflow( workflowId, workflowRecord as Record, requestId ) - const result = await undeployWorkflow({ workflowId }) - if (!result.success) { - return internalErrorResponse(result.error || 'Failed to undeploy workflow') - } - await removeMcpToolsForWorkflow(workflowId, requestId) logger.info(`Admin API: Undeployed workflow ${workflowId}`) diff --git a/apps/sim/app/api/v1/admin/workflows/[id]/route.ts b/apps/sim/app/api/v1/admin/workflows/[id]/route.ts index ca596d6afd8..ad8644aa49a 100644 --- a/apps/sim/app/api/v1/admin/workflows/[id]/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/[id]/route.ts @@ -13,10 +13,12 @@ */ import { db } from '@sim/db' -import { workflow, workflowBlocks, workflowEdges, workflowSchedule } from '@sim/db/schema' +import { templates, workflowBlocks, workflowEdges } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { count, eq } from 'drizzle-orm' import { NextResponse } from 'next/server' +import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' +import { archiveWorkflow } from '@/lib/workflows/lifecycle' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { internalErrorResponse, @@ -35,11 +37,7 @@ export const GET = withAdminAuthParams(async (request, context) => const { id: workflowId } = await context.params try { - const [workflowData] = await db - .select() - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const workflowData = await getActiveWorkflowRecord(workflowId) if (!workflowData) { return notFoundResponse('Workflow') @@ -75,24 +73,16 @@ export const DELETE = withAdminAuthParams(async (request, context) const { id: workflowId } = await context.params try { - const [workflowData] = await db - .select({ id: workflow.id, name: workflow.name }) - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const workflowData = await getActiveWorkflowRecord(workflowId) if (!workflowData) { return notFoundResponse('Workflow') } - await db.transaction(async (tx) => { - await Promise.all([ - tx.delete(workflowBlocks).where(eq(workflowBlocks.workflowId, workflowId)), - tx.delete(workflowEdges).where(eq(workflowEdges.workflowId, workflowId)), - tx.delete(workflowSchedule).where(eq(workflowSchedule.workflowId, workflowId)), - ]) + await db.update(templates).set({ workflowId: null }).where(eq(templates.workflowId, workflowId)) - await tx.delete(workflow).where(eq(workflow.id, workflowId)) + await archiveWorkflow(workflowId, { + requestId: `admin-workflow-${workflowId}`, }) logger.info(`Admin API: Deleted workflow ${workflowId} (${workflowData.name})`) diff --git a/apps/sim/app/api/v1/admin/workflows/[id]/versions/[versionId]/activate/route.ts b/apps/sim/app/api/v1/admin/workflows/[id]/versions/[versionId]/activate/route.ts index a1406ca8300..1824c6508f4 100644 --- a/apps/sim/app/api/v1/admin/workflows/[id]/versions/[versionId]/activate/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/[id]/versions/[versionId]/activate/route.ts @@ -1,9 +1,10 @@ -import { db, workflow, workflowDeploymentVersion } from '@sim/db' +import { db, workflowDeploymentVersion } from '@sim/db' import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' import { generateRequestId } from '@/lib/core/utils/request' import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync' import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy' +import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils' import { cleanupDeploymentVersion, @@ -31,11 +32,7 @@ export const POST = withAdminAuthParams(async (request, context) => const { id: workflowId, versionId } = await context.params try { - const [workflowRecord] = await db - .select() - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const workflowRecord = await getActiveWorkflowRecord(workflowId) if (!workflowRecord) { return notFoundResponse('Workflow') diff --git a/apps/sim/app/api/v1/admin/workflows/[id]/versions/route.ts b/apps/sim/app/api/v1/admin/workflows/[id]/versions/route.ts index 004e4c15b07..846f4c7f48f 100644 --- a/apps/sim/app/api/v1/admin/workflows/[id]/versions/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/[id]/versions/route.ts @@ -1,6 +1,5 @@ -import { db, workflow } from '@sim/db' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' import { listWorkflowVersions } from '@/lib/workflows/persistence/utils' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { @@ -20,11 +19,7 @@ export const GET = withAdminAuthParams(async (request, context) => const { id: workflowId } = await context.params try { - const [workflowRecord] = await db - .select({ id: workflow.id }) - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const workflowRecord = await getActiveWorkflowRecord(workflowId) if (!workflowRecord) { return notFoundResponse('Workflow') diff --git a/apps/sim/app/api/v1/admin/workflows/import/route.ts b/apps/sim/app/api/v1/admin/workflows/import/route.ts index 7c3dd58ad69..d5907ce3995 100644 --- a/apps/sim/app/api/v1/admin/workflows/import/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/import/route.ts @@ -17,10 +17,11 @@ import { db } from '@sim/db' import { workflow, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { NextResponse } from 'next/server' import { parseWorkflowJson } from '@/lib/workflows/operations/import-export' import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' +import { deduplicateWorkflowName } from '@/lib/workflows/utils' import { withAdminAuth } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -58,7 +59,7 @@ export const POST = withAdminAuth(async (request) => { const [workspaceData] = await db .select({ id: workspace.id, ownerId: workspace.ownerId }) .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .limit(1) if (!workspaceData) { @@ -93,13 +94,14 @@ export const POST = withAdminAuth(async (request) => { const workflowId = crypto.randomUUID() const now = new Date() + const dedupedName = await deduplicateWorkflowName(workflowName, workspaceId, folderId || null) await db.insert(workflow).values({ id: workflowId, userId: workspaceData.ownerId, workspaceId, folderId: folderId || null, - name: workflowName, + name: dedupedName, description: workflowDescription, color: workflowColor, lastSynced: now, @@ -136,12 +138,12 @@ export const POST = withAdminAuth(async (request) => { } logger.info( - `Admin API: Imported workflow ${workflowId} (${workflowName}) into workspace ${workspaceId}` + `Admin API: Imported workflow ${workflowId} (${dedupedName}) into workspace ${workspaceId}` ) const response: ImportSuccessResponse = { workflowId, - name: workflowName, + name: dedupedName, success: true, } diff --git a/apps/sim/app/api/v1/admin/workspaces/[id]/import/route.ts b/apps/sim/app/api/v1/admin/workspaces/[id]/import/route.ts index 6bb6a4db66c..15830d2e6dd 100644 --- a/apps/sim/app/api/v1/admin/workspaces/[id]/import/route.ts +++ b/apps/sim/app/api/v1/admin/workspaces/[id]/import/route.ts @@ -24,7 +24,7 @@ */ import { db } from '@sim/db' -import { workflow, workflowFolder, workspace } from '@sim/db/schema' +import { workflow, workflowFolder } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { eq } from 'drizzle-orm' import { NextResponse } from 'next/server' @@ -34,6 +34,8 @@ import { parseWorkflowJson, } from '@/lib/workflows/operations/import-export' import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' +import { deduplicateWorkflowName } from '@/lib/workflows/utils' +import { getWorkspaceWithOwner } from '@/lib/workspaces/permissions/utils' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -67,11 +69,7 @@ export const POST = withAdminAuthParams(async (request, context) => const rootFolderName = url.searchParams.get('rootFolderName') try { - const [workspaceData] = await db - .select({ id: workspace.id, ownerId: workspace.ownerId }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceWithOwner(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') @@ -238,13 +236,14 @@ async function importSingleWorkflow( const { color: workflowColor } = extractWorkflowMetadata(parsedContent) const workflowId = crypto.randomUUID() const now = new Date() + const dedupedName = await deduplicateWorkflowName(workflowName, workspaceId, targetFolderId) await db.insert(workflow).values({ id: workflowId, userId: ownerId, workspaceId, folderId: targetFolderId, - name: workflowName, + name: dedupedName, description: workflowData.metadata?.description || 'Imported via Admin API', color: workflowColor, lastSynced: now, @@ -261,7 +260,7 @@ async function importSingleWorkflow( await db.delete(workflow).where(eq(workflow.id, workflowId)) return { workflowId: '', - name: workflowName, + name: dedupedName, success: false, error: `Failed to save state: ${saveResult.error}`, } @@ -287,7 +286,7 @@ async function importSingleWorkflow( return { workflowId, - name: workflowName, + name: dedupedName, success: true, } } catch (error) { diff --git a/apps/sim/app/api/v1/admin/workspaces/[id]/members/[memberId]/route.ts b/apps/sim/app/api/v1/admin/workspaces/[id]/members/[memberId]/route.ts index 30afdda571d..07da5734245 100644 --- a/apps/sim/app/api/v1/admin/workspaces/[id]/members/[memberId]/route.ts +++ b/apps/sim/app/api/v1/admin/workspaces/[id]/members/[memberId]/route.ts @@ -22,10 +22,11 @@ */ import { db } from '@sim/db' -import { permissions, user, workspace } from '@sim/db/schema' +import { permissions, user } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' import { revokeWorkspaceCredentialMemberships } from '@/lib/credentials/access' +import { getWorkspaceById } from '@/lib/workspaces/permissions/utils' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -46,11 +47,7 @@ export const GET = withAdminAuthParams(async (_, context) => { const { id: workspaceId, memberId } = await context.params try { - const [workspaceData] = await db - .select({ id: workspace.id }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceById(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') @@ -113,11 +110,7 @@ export const PATCH = withAdminAuthParams(async (request, context) = return badRequestResponse('permissions must be "admin", "write", or "read"') } - const [workspaceData] = await db - .select({ id: workspace.id }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceById(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') @@ -185,11 +178,7 @@ export const DELETE = withAdminAuthParams(async (_, context) => { const { id: workspaceId, memberId } = await context.params try { - const [workspaceData] = await db - .select({ id: workspace.id }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceById(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') diff --git a/apps/sim/app/api/v1/admin/workspaces/[id]/members/route.ts b/apps/sim/app/api/v1/admin/workspaces/[id]/members/route.ts index 78298feb490..78b70b7d52e 100644 --- a/apps/sim/app/api/v1/admin/workspaces/[id]/members/route.ts +++ b/apps/sim/app/api/v1/admin/workspaces/[id]/members/route.ts @@ -32,10 +32,11 @@ import crypto from 'crypto' import { db } from '@sim/db' -import { permissions, user, workspace, workspaceEnvironment } from '@sim/db/schema' +import { permissions, user, workspaceEnvironment } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, count, eq } from 'drizzle-orm' import { syncWorkspaceEnvCredentials } from '@/lib/credentials/environment' +import { getWorkspaceById } from '@/lib/workspaces/permissions/utils' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -62,11 +63,7 @@ export const GET = withAdminAuthParams(async (request, context) => const { limit, offset } = parsePaginationParams(url) try { - const [workspaceData] = await db - .select({ id: workspace.id }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceById(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') @@ -134,11 +131,7 @@ export const POST = withAdminAuthParams(async (request, context) => return badRequestResponse('permissions must be "admin", "write", or "read"') } - const [workspaceData] = await db - .select({ id: workspace.id, name: workspace.name }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceById(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') @@ -275,11 +268,7 @@ export const DELETE = withAdminAuthParams(async (request, context) return badRequestResponse('userId query parameter is required') } - const [workspaceData] = await db - .select({ id: workspace.id }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceById(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') diff --git a/apps/sim/app/api/v1/admin/workspaces/[id]/workflows/route.ts b/apps/sim/app/api/v1/admin/workspaces/[id]/workflows/route.ts index ea1ab87fc50..896af40d6a7 100644 --- a/apps/sim/app/api/v1/admin/workspaces/[id]/workflows/route.ts +++ b/apps/sim/app/api/v1/admin/workspaces/[id]/workflows/route.ts @@ -17,16 +17,11 @@ */ import { db } from '@sim/db' -import { - workflow, - workflowBlocks, - workflowEdges, - workflowSchedule, - workspace, -} from '@sim/db/schema' +import { workflow, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { count, eq, inArray } from 'drizzle-orm' +import { and, count, eq, isNull } from 'drizzle-orm' import { NextResponse } from 'next/server' +import { archiveWorkflowsForWorkspace } from '@/lib/workflows/lifecycle' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { internalErrorResponse, listResponse, notFoundResponse } from '@/app/api/v1/admin/responses' import { @@ -51,7 +46,7 @@ export const GET = withAdminAuthParams(async (request, context) => const [workspaceData] = await db .select({ id: workspace.id }) .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .limit(1) if (!workspaceData) { @@ -59,11 +54,14 @@ export const GET = withAdminAuthParams(async (request, context) => } const [countResult, workflows] = await Promise.all([ - db.select({ total: count() }).from(workflow).where(eq(workflow.workspaceId, workspaceId)), + db + .select({ total: count() }) + .from(workflow) + .where(and(eq(workflow.workspaceId, workspaceId), isNull(workflow.archivedAt))), db .select() .from(workflow) - .where(eq(workflow.workspaceId, workspaceId)) + .where(and(eq(workflow.workspaceId, workspaceId), isNull(workflow.archivedAt))) .orderBy(workflow.name) .limit(limit) .offset(offset), @@ -91,7 +89,7 @@ export const DELETE = withAdminAuthParams(async (request, context) const [workspaceData] = await db .select({ id: workspace.id }) .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .limit(1) if (!workspaceData) { @@ -101,27 +99,19 @@ export const DELETE = withAdminAuthParams(async (request, context) const workflowsToDelete = await db .select({ id: workflow.id }) .from(workflow) - .where(eq(workflow.workspaceId, workspaceId)) + .where(and(eq(workflow.workspaceId, workspaceId), isNull(workflow.archivedAt))) if (workflowsToDelete.length === 0) { return NextResponse.json({ success: true, deleted: 0 }) } - const workflowIds = workflowsToDelete.map((w) => w.id) - - await db.transaction(async (tx) => { - await Promise.all([ - tx.delete(workflowBlocks).where(inArray(workflowBlocks.workflowId, workflowIds)), - tx.delete(workflowEdges).where(inArray(workflowEdges.workflowId, workflowIds)), - tx.delete(workflowSchedule).where(inArray(workflowSchedule.workflowId, workflowIds)), - ]) - - await tx.delete(workflow).where(eq(workflow.workspaceId, workspaceId)) + const deletedCount = await archiveWorkflowsForWorkspace(workspaceId, { + requestId: `admin-workspace-${workspaceId}`, }) - logger.info(`Admin API: Deleted ${workflowIds.length} workflows from workspace ${workspaceId}`) + logger.info(`Admin API: Deleted ${deletedCount} workflows from workspace ${workspaceId}`) - return NextResponse.json({ success: true, deleted: workflowIds.length }) + return NextResponse.json({ success: true, deleted: deletedCount }) } catch (error) { logger.error('Admin API: Failed to delete workspace workflows', { error, workspaceId }) return internalErrorResponse('Failed to delete workflows') diff --git a/apps/sim/app/api/v1/copilot/chat/route.ts b/apps/sim/app/api/v1/copilot/chat/route.ts index 6a3817385be..e3acb50a10a 100644 --- a/apps/sim/app/api/v1/copilot/chat/route.ts +++ b/apps/sim/app/api/v1/copilot/chat/route.ts @@ -1,10 +1,9 @@ import { createLogger } from '@sim/logger' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' -import { SIM_AGENT_VERSION } from '@/lib/copilot/constants' import { COPILOT_REQUEST_MODES } from '@/lib/copilot/models' import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator' -import { resolveWorkflowIdForUser } from '@/lib/workflows/utils' +import { getWorkflowById, resolveWorkflowIdForUser } from '@/lib/workflows/utils' import { authenticateV1Request } from '@/app/api/v1/auth' const logger = createLogger('CopilotHeadlessAPI') @@ -48,7 +47,8 @@ export async function POST(req: NextRequest) { const resolved = await resolveWorkflowIdForUser( auth.userId, parsed.workflowId, - parsed.workflowName + parsed.workflowName, + auth.keyType === 'workspace' ? auth.workspaceId : undefined ) if (!resolved) { return NextResponse.json( @@ -60,6 +60,16 @@ export async function POST(req: NextRequest) { ) } + if (auth.keyType === 'workspace' && auth.workspaceId) { + const workflow = await getWorkflowById(resolved.workflowId) + if (!workflow?.workspaceId || workflow.workspaceId !== auth.workspaceId) { + return NextResponse.json( + { success: false, error: 'API key is not authorized for this workspace' }, + { status: 403 } + ) + } + } + // Transform mode to transport mode (same as client API) // build and agent both map to 'agent' on the backend const effectiveMode = parsed.mode === 'agent' ? 'build' : parsed.mode @@ -75,8 +85,6 @@ export async function POST(req: NextRequest) { model: selectedModel, mode: transportMode, messageId: crypto.randomUUID(), - version: SIM_AGENT_VERSION, - headless: true, chatId, } @@ -84,6 +92,7 @@ export async function POST(req: NextRequest) { userId: auth.userId, workflowId: resolved.workflowId, chatId, + goRoute: '/api/mcp', autoExecuteTools: parsed.autoExecuteTools, timeout: parsed.timeout, interactive: false, @@ -93,8 +102,7 @@ export async function POST(req: NextRequest) { success: result.success, content: result.content, toolCalls: result.toolCalls, - chatId: result.chatId || chatId, // Return the chatId for conversation continuity - conversationId: result.conversationId, + chatId: result.chatId || chatId, error: result.error, }) } catch (error) { diff --git a/apps/sim/app/api/v1/files/[fileId]/route.ts b/apps/sim/app/api/v1/files/[fileId]/route.ts new file mode 100644 index 00000000000..7007053681b --- /dev/null +++ b/apps/sim/app/api/v1/files/[fileId]/route.ts @@ -0,0 +1,158 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { generateRequestId } from '@/lib/core/utils/request' +import { + deleteWorkspaceFile, + downloadWorkspaceFile, + getWorkspaceFile, +} from '@/lib/uploads/contexts/workspace' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1FileDetailAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const WorkspaceIdSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), +}) + +interface FileRouteParams { + params: Promise<{ fileId: string }> +} + +/** GET /api/v1/files/[fileId] — Download file content. */ +export async function GET(request: NextRequest, { params }: FileRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'file-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { fileId } = await params + const { searchParams } = new URL(request.url) + + const validation = WorkspaceIdSchema.safeParse({ + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) { + return NextResponse.json( + { error: 'Validation error', details: validation.error.errors }, + { status: 400 } + ) + } + + const { workspaceId } = validation.data + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission === null) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const fileRecord = await getWorkspaceFile(workspaceId, fileId) + if (!fileRecord) { + return NextResponse.json({ error: 'File not found' }, { status: 404 }) + } + + const buffer = await downloadWorkspaceFile(fileRecord) + + return new Response(new Uint8Array(buffer), { + status: 200, + headers: { + 'Content-Type': fileRecord.type || 'application/octet-stream', + 'Content-Disposition': `attachment; filename="${fileRecord.name.replace(/[^\w.-]/g, '_')}"; filename*=UTF-8''${encodeURIComponent(fileRecord.name)}`, + 'Content-Length': String(buffer.length), + 'X-File-Id': fileRecord.id, + 'X-File-Name': encodeURIComponent(fileRecord.name), + 'X-Uploaded-At': + fileRecord.uploadedAt instanceof Date + ? fileRecord.uploadedAt.toISOString() + : String(fileRecord.uploadedAt), + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error downloading file:`, error) + return NextResponse.json({ error: 'Failed to download file' }, { status: 500 }) + } +} + +/** DELETE /api/v1/files/[fileId] — Archive a file. */ +export async function DELETE(request: NextRequest, { params }: FileRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'file-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { fileId } = await params + const { searchParams } = new URL(request.url) + + const validation = WorkspaceIdSchema.safeParse({ + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) { + return NextResponse.json( + { error: 'Validation error', details: validation.error.errors }, + { status: 400 } + ) + } + + const { workspaceId } = validation.data + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission === null || permission === 'read') { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const fileRecord = await getWorkspaceFile(workspaceId, fileId) + if (!fileRecord) { + return NextResponse.json({ error: 'File not found' }, { status: 404 }) + } + + await deleteWorkspaceFile(workspaceId, fileId) + + logger.info( + `[${requestId}] Archived file: ${fileRecord.name} (${fileId}) from workspace ${workspaceId}` + ) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.FILE_DELETED, + resourceType: AuditResourceType.FILE, + resourceId: fileId, + resourceName: fileRecord.name, + description: `Archived file "${fileRecord.name}" via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + message: 'File archived successfully', + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error deleting file:`, error) + return NextResponse.json({ error: 'Failed to delete file' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/files/route.ts b/apps/sim/app/api/v1/files/route.ts new file mode 100644 index 00000000000..f9d8228e4f1 --- /dev/null +++ b/apps/sim/app/api/v1/files/route.ts @@ -0,0 +1,194 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { generateRequestId } from '@/lib/core/utils/request' +import { + getWorkspaceFile, + listWorkspaceFiles, + uploadWorkspaceFile, +} from '@/lib/uploads/contexts/workspace' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1FilesAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const MAX_FILE_SIZE = 100 * 1024 * 1024 // 100MB + +const ListFilesSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), +}) + +/** GET /api/v1/files — List all files in a workspace. */ +export async function GET(request: NextRequest) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'files') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { searchParams } = new URL(request.url) + + const validation = ListFilesSchema.safeParse({ + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) { + return NextResponse.json( + { error: 'Validation error', details: validation.error.errors }, + { status: 400 } + ) + } + + const { workspaceId } = validation.data + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission === null) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const files = await listWorkspaceFiles(workspaceId) + + return NextResponse.json({ + success: true, + data: { + files: files.map((f) => ({ + id: f.id, + name: f.name, + size: f.size, + type: f.type, + key: f.key, + uploadedBy: f.uploadedBy, + uploadedAt: + f.uploadedAt instanceof Date ? f.uploadedAt.toISOString() : String(f.uploadedAt), + })), + totalCount: files.length, + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error listing files:`, error) + return NextResponse.json({ error: 'Failed to list files' }, { status: 500 }) + } +} + +/** POST /api/v1/files — Upload a file to a workspace. */ +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'files') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + + let formData: FormData + try { + formData = await request.formData() + } catch { + return NextResponse.json( + { error: 'Request body must be valid multipart form data' }, + { status: 400 } + ) + } + const rawFile = formData.get('file') + const file = rawFile instanceof File ? rawFile : null + const rawWorkspaceId = formData.get('workspaceId') + const workspaceId = typeof rawWorkspaceId === 'string' ? rawWorkspaceId : null + + if (!workspaceId) { + return NextResponse.json({ error: 'workspaceId form field is required' }, { status: 400 }) + } + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + if (!file) { + return NextResponse.json({ error: 'file form field is required' }, { status: 400 }) + } + + if (file.size > MAX_FILE_SIZE) { + return NextResponse.json( + { + error: `File size exceeds 100MB limit (${(file.size / (1024 * 1024)).toFixed(2)}MB)`, + }, + { status: 400 } + ) + } + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission === null || permission === 'read') { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const buffer = Buffer.from(await file.arrayBuffer()) + + const userFile = await uploadWorkspaceFile( + workspaceId, + userId, + buffer, + file.name, + file.type || 'application/octet-stream' + ) + + logger.info(`[${requestId}] Uploaded file: ${file.name} to workspace ${workspaceId}`) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.FILE_UPLOADED, + resourceType: AuditResourceType.FILE, + resourceId: userFile.id, + resourceName: file.name, + description: `Uploaded file "${file.name}" via API`, + request, + }) + + const fileRecord = await getWorkspaceFile(workspaceId, userFile.id) + const uploadedAt = + fileRecord?.uploadedAt instanceof Date + ? fileRecord.uploadedAt.toISOString() + : fileRecord?.uploadedAt + ? String(fileRecord.uploadedAt) + : new Date().toISOString() + + return NextResponse.json({ + success: true, + data: { + file: { + id: userFile.id, + name: userFile.name, + size: userFile.size, + type: userFile.type, + key: userFile.key, + uploadedBy: userId, + uploadedAt, + }, + message: 'File uploaded successfully', + }, + }) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Failed to upload file' + const isDuplicate = errorMessage.includes('already exists') + + if (isDuplicate) { + return NextResponse.json({ error: errorMessage }, { status: 409 }) + } + + logger.error(`[${requestId}] Error uploading file:`, error) + return NextResponse.json({ error: 'Failed to upload file' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/knowledge/[id]/documents/[documentId]/route.ts b/apps/sim/app/api/v1/knowledge/[id]/documents/[documentId]/route.ts new file mode 100644 index 00000000000..b69721329a4 --- /dev/null +++ b/apps/sim/app/api/v1/knowledge/[id]/documents/[documentId]/route.ts @@ -0,0 +1,182 @@ +import { db } from '@sim/db' +import { document, knowledgeConnector } from '@sim/db/schema' +import { and, eq, isNull } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { deleteDocument } from '@/lib/knowledge/documents/service' +import { + authenticateRequest, + handleError, + resolveKnowledgeBase, + serializeDate, + validateSchema, +} from '@/app/api/v1/knowledge/utils' + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +interface DocumentDetailRouteParams { + params: Promise<{ id: string; documentId: string }> +} + +const WorkspaceIdSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), +}) + +/** GET /api/v1/knowledge/[id]/documents/[documentId] — Get document details. */ +export async function GET(request: NextRequest, { params }: DocumentDetailRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id: knowledgeBaseId, documentId } = await params + const { searchParams } = new URL(request.url) + + const validation = validateSchema(WorkspaceIdSchema, { + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) return validation.response + + const result = await resolveKnowledgeBase( + knowledgeBaseId, + validation.data.workspaceId, + userId, + rateLimit + ) + if (result instanceof NextResponse) return result + + const docs = await db + .select({ + id: document.id, + knowledgeBaseId: document.knowledgeBaseId, + filename: document.filename, + fileSize: document.fileSize, + mimeType: document.mimeType, + processingStatus: document.processingStatus, + processingError: document.processingError, + processingStartedAt: document.processingStartedAt, + processingCompletedAt: document.processingCompletedAt, + chunkCount: document.chunkCount, + tokenCount: document.tokenCount, + characterCount: document.characterCount, + enabled: document.enabled, + uploadedAt: document.uploadedAt, + connectorId: document.connectorId, + connectorType: knowledgeConnector.connectorType, + sourceUrl: document.sourceUrl, + }) + .from(document) + .leftJoin(knowledgeConnector, eq(document.connectorId, knowledgeConnector.id)) + .where( + and( + eq(document.id, documentId), + eq(document.knowledgeBaseId, knowledgeBaseId), + eq(document.userExcluded, false), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) + .limit(1) + + if (docs.length === 0) { + return NextResponse.json({ error: 'Document not found' }, { status: 404 }) + } + + const doc = docs[0] + + return NextResponse.json({ + success: true, + data: { + document: { + id: doc.id, + knowledgeBaseId: doc.knowledgeBaseId, + filename: doc.filename, + fileSize: doc.fileSize, + mimeType: doc.mimeType, + processingStatus: doc.processingStatus, + processingError: doc.processingError, + processingStartedAt: serializeDate(doc.processingStartedAt), + processingCompletedAt: serializeDate(doc.processingCompletedAt), + chunkCount: doc.chunkCount, + tokenCount: doc.tokenCount, + characterCount: doc.characterCount, + enabled: doc.enabled, + connectorId: doc.connectorId, + connectorType: doc.connectorType, + sourceUrl: doc.sourceUrl, + createdAt: serializeDate(doc.uploadedAt), + }, + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to get document') + } +} + +/** DELETE /api/v1/knowledge/[id]/documents/[documentId] — Delete a document. */ +export async function DELETE(request: NextRequest, { params }: DocumentDetailRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id: knowledgeBaseId, documentId } = await params + const { searchParams } = new URL(request.url) + + const validation = validateSchema(WorkspaceIdSchema, { + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) return validation.response + + const result = await resolveKnowledgeBase( + knowledgeBaseId, + validation.data.workspaceId, + userId, + rateLimit, + 'write' + ) + if (result instanceof NextResponse) return result + + const docs = await db + .select({ id: document.id, filename: document.filename }) + .from(document) + .where( + and( + eq(document.id, documentId), + eq(document.knowledgeBaseId, knowledgeBaseId), + eq(document.userExcluded, false), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) + .limit(1) + + if (docs.length === 0) { + return NextResponse.json({ error: 'Document not found' }, { status: 404 }) + } + + await deleteDocument(documentId, requestId) + + recordAudit({ + workspaceId: validation.data.workspaceId, + actorId: userId, + action: AuditAction.DOCUMENT_DELETED, + resourceType: AuditResourceType.DOCUMENT, + resourceId: documentId, + resourceName: docs[0].filename, + description: `Deleted document "${docs[0].filename}" from knowledge base via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + message: 'Document deleted successfully', + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to delete document') + } +} diff --git a/apps/sim/app/api/v1/knowledge/[id]/documents/route.ts b/apps/sim/app/api/v1/knowledge/[id]/documents/route.ts new file mode 100644 index 00000000000..193111ee20c --- /dev/null +++ b/apps/sim/app/api/v1/knowledge/[id]/documents/route.ts @@ -0,0 +1,248 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { + createSingleDocument, + type DocumentData, + getDocuments, + processDocumentsWithQueue, +} from '@/lib/knowledge/documents/service' +import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types' +import { uploadWorkspaceFile } from '@/lib/uploads/contexts/workspace' +import { validateFileType } from '@/lib/uploads/utils/validation' +import { + authenticateRequest, + handleError, + resolveKnowledgeBase, + serializeDate, + validateSchema, +} from '@/app/api/v1/knowledge/utils' + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const MAX_FILE_SIZE = 100 * 1024 * 1024 // 100MB + +interface DocumentsRouteParams { + params: Promise<{ id: string }> +} + +const ListDocumentsSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), + limit: z.coerce.number().int().min(1).max(100).default(50), + offset: z.coerce.number().int().min(0).default(0), + search: z.string().optional(), + enabledFilter: z.enum(['all', 'enabled', 'disabled']).default('all'), + sortBy: z + .enum([ + 'filename', + 'fileSize', + 'tokenCount', + 'chunkCount', + 'uploadedAt', + 'processingStatus', + 'enabled', + ]) + .default('uploadedAt'), + sortOrder: z.enum(['asc', 'desc']).default('desc'), +}) + +/** GET /api/v1/knowledge/[id]/documents — List documents in a knowledge base. */ +export async function GET(request: NextRequest, { params }: DocumentsRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id: knowledgeBaseId } = await params + const { searchParams } = new URL(request.url) + + const validation = validateSchema(ListDocumentsSchema, { + workspaceId: searchParams.get('workspaceId'), + limit: searchParams.get('limit') ?? undefined, + offset: searchParams.get('offset') ?? undefined, + search: searchParams.get('search') ?? undefined, + enabledFilter: searchParams.get('enabledFilter') ?? undefined, + sortBy: searchParams.get('sortBy') ?? undefined, + sortOrder: searchParams.get('sortOrder') ?? undefined, + }) + if (!validation.success) return validation.response + + const { workspaceId, limit, offset, search, enabledFilter, sortBy, sortOrder } = validation.data + + const result = await resolveKnowledgeBase(knowledgeBaseId, workspaceId, userId, rateLimit) + if (result instanceof NextResponse) return result + + const documentsResult = await getDocuments( + knowledgeBaseId, + { + enabledFilter: enabledFilter === 'all' ? undefined : enabledFilter, + search, + limit, + offset, + sortBy: sortBy as DocumentSortField, + sortOrder: sortOrder as SortOrder, + }, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + documents: documentsResult.documents.map((doc) => ({ + id: doc.id, + knowledgeBaseId, + filename: doc.filename, + fileSize: doc.fileSize, + mimeType: doc.mimeType, + processingStatus: doc.processingStatus, + chunkCount: doc.chunkCount, + tokenCount: doc.tokenCount, + characterCount: doc.characterCount, + enabled: doc.enabled, + createdAt: serializeDate(doc.uploadedAt), + })), + pagination: documentsResult.pagination, + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to list documents') + } +} + +/** POST /api/v1/knowledge/[id]/documents — Upload a document to a knowledge base. */ +export async function POST(request: NextRequest, { params }: DocumentsRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id: knowledgeBaseId } = await params + + let formData: FormData + try { + formData = await request.formData() + } catch { + return NextResponse.json( + { error: 'Request body must be valid multipart form data' }, + { status: 400 } + ) + } + + const rawFile = formData.get('file') + const file = rawFile instanceof File ? rawFile : null + const rawWorkspaceId = formData.get('workspaceId') + const workspaceId = typeof rawWorkspaceId === 'string' ? rawWorkspaceId : null + + if (!workspaceId) { + return NextResponse.json({ error: 'workspaceId form field is required' }, { status: 400 }) + } + + if (!file) { + return NextResponse.json({ error: 'file form field is required' }, { status: 400 }) + } + + if (file.size > MAX_FILE_SIZE) { + return NextResponse.json( + { + error: `File size exceeds 100MB limit (${(file.size / (1024 * 1024)).toFixed(2)}MB)`, + }, + { status: 413 } + ) + } + + const fileTypeError = validateFileType(file.name, file.type || '') + if (fileTypeError) { + return NextResponse.json({ error: fileTypeError.message }, { status: 415 }) + } + + const result = await resolveKnowledgeBase( + knowledgeBaseId, + workspaceId, + userId, + rateLimit, + 'write' + ) + if (result instanceof NextResponse) return result + + const buffer = Buffer.from(await file.arrayBuffer()) + const contentType = file.type || 'application/octet-stream' + + const uploadedFile = await uploadWorkspaceFile( + workspaceId, + userId, + buffer, + file.name, + contentType + ) + + const newDocument = await createSingleDocument( + { + filename: file.name, + fileUrl: uploadedFile.url, + fileSize: file.size, + mimeType: contentType, + }, + knowledgeBaseId, + requestId + ) + + const chunkingConfig = result.kb.chunkingConfig ?? { maxSize: 1024, minSize: 100, overlap: 200 } + + const documentData: DocumentData = { + documentId: newDocument.id, + filename: file.name, + fileUrl: uploadedFile.url, + fileSize: file.size, + mimeType: contentType, + } + + processDocumentsWithQueue( + [documentData], + knowledgeBaseId, + { + chunkSize: chunkingConfig.maxSize, + minCharactersPerChunk: chunkingConfig.minSize, + chunkOverlap: chunkingConfig.overlap, + recipe: 'default', + lang: 'en', + }, + requestId + ).catch(() => { + // Processing errors are logged internally + }) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.DOCUMENT_UPLOADED, + resourceType: AuditResourceType.DOCUMENT, + resourceId: newDocument.id, + resourceName: file.name, + description: `Uploaded document "${file.name}" to knowledge base via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + document: { + id: newDocument.id, + knowledgeBaseId, + filename: newDocument.filename, + fileSize: newDocument.fileSize, + mimeType: newDocument.mimeType, + processingStatus: 'pending', + chunkCount: 0, + tokenCount: 0, + characterCount: 0, + enabled: newDocument.enabled, + createdAt: serializeDate(newDocument.uploadedAt), + }, + message: 'Document uploaded successfully. Processing will begin shortly.', + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to upload document') + } +} diff --git a/apps/sim/app/api/v1/knowledge/[id]/route.ts b/apps/sim/app/api/v1/knowledge/[id]/route.ts new file mode 100644 index 00000000000..0b7012c8770 --- /dev/null +++ b/apps/sim/app/api/v1/knowledge/[id]/route.ts @@ -0,0 +1,175 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { deleteKnowledgeBase, updateKnowledgeBase } from '@/lib/knowledge/service' +import { + authenticateRequest, + formatKnowledgeBase, + handleError, + parseJsonBody, + resolveKnowledgeBase, + validateSchema, +} from '@/app/api/v1/knowledge/utils' + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +interface KnowledgeRouteParams { + params: Promise<{ id: string }> +} + +const WorkspaceIdSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), +}) + +const UpdateKBSchema = z + .object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + name: z.string().min(1).max(255, 'Name must be 255 characters or less').optional(), + description: z.string().max(1000, 'Description must be 1000 characters or less').optional(), + chunkingConfig: z + .object({ + maxSize: z.number().min(100).max(4000), + minSize: z.number().min(1).max(2000), + overlap: z.number().min(0).max(500), + }) + .optional(), + }) + .refine( + (data) => + data.name !== undefined || + data.description !== undefined || + data.chunkingConfig !== undefined, + { message: 'At least one of name, description, or chunkingConfig must be provided' } + ) + +/** GET /api/v1/knowledge/[id] — Get knowledge base details. */ +export async function GET(request: NextRequest, { params }: KnowledgeRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id } = await params + const { searchParams } = new URL(request.url) + + const validation = validateSchema(WorkspaceIdSchema, { + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) return validation.response + + const result = await resolveKnowledgeBase(id, validation.data.workspaceId, userId, rateLimit) + if (result instanceof NextResponse) return result + + return NextResponse.json({ + success: true, + data: { + knowledgeBase: formatKnowledgeBase(result.kb), + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to get knowledge base') + } +} + +/** PUT /api/v1/knowledge/[id] — Update a knowledge base. */ +export async function PUT(request: NextRequest, { params }: KnowledgeRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id } = await params + + const body = await parseJsonBody(request) + if (!body.success) return body.response + + const validation = validateSchema(UpdateKBSchema, body.data) + if (!validation.success) return validation.response + + const { workspaceId, name, description, chunkingConfig } = validation.data + + const result = await resolveKnowledgeBase(id, workspaceId, userId, rateLimit, 'write') + if (result instanceof NextResponse) return result + + const updates: { + name?: string + description?: string + chunkingConfig?: { maxSize: number; minSize: number; overlap: number } + } = {} + if (name !== undefined) updates.name = name + if (description !== undefined) updates.description = description + if (chunkingConfig !== undefined) updates.chunkingConfig = chunkingConfig + + const updatedKb = await updateKnowledgeBase(id, updates, requestId) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.KNOWLEDGE_BASE_UPDATED, + resourceType: AuditResourceType.KNOWLEDGE_BASE, + resourceId: id, + resourceName: updatedKb.name, + description: `Updated knowledge base "${updatedKb.name}" via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + knowledgeBase: formatKnowledgeBase(updatedKb), + message: 'Knowledge base updated successfully', + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to update knowledge base') + } +} + +/** DELETE /api/v1/knowledge/[id] — Delete a knowledge base. */ +export async function DELETE(request: NextRequest, { params }: KnowledgeRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id } = await params + const { searchParams } = new URL(request.url) + + const validation = validateSchema(WorkspaceIdSchema, { + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) return validation.response + + const result = await resolveKnowledgeBase( + id, + validation.data.workspaceId, + userId, + rateLimit, + 'write' + ) + if (result instanceof NextResponse) return result + + await deleteKnowledgeBase(id, requestId) + + recordAudit({ + workspaceId: validation.data.workspaceId, + actorId: userId, + action: AuditAction.KNOWLEDGE_BASE_DELETED, + resourceType: AuditResourceType.KNOWLEDGE_BASE, + resourceId: id, + resourceName: result.kb.name, + description: `Deleted knowledge base "${result.kb.name}" via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + message: 'Knowledge base deleted successfully', + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to delete knowledge base') + } +} diff --git a/apps/sim/app/api/v1/knowledge/route.ts b/apps/sim/app/api/v1/knowledge/route.ts new file mode 100644 index 00000000000..9d45e677bd3 --- /dev/null +++ b/apps/sim/app/api/v1/knowledge/route.ts @@ -0,0 +1,122 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { createKnowledgeBase, getKnowledgeBases } from '@/lib/knowledge/service' +import { + authenticateRequest, + formatKnowledgeBase, + handleError, + parseJsonBody, + validateSchema, + validateWorkspaceAccess, +} from '@/app/api/v1/knowledge/utils' + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const ListKBSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), +}) + +const ChunkingConfigSchema = z.object({ + maxSize: z.number().min(100).max(4000).default(1024), + minSize: z.number().min(1).max(2000).default(100), + overlap: z.number().min(0).max(500).default(200), +}) + +const CreateKBSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + name: z.string().min(1, 'Name is required').max(255, 'Name must be 255 characters or less'), + description: z.string().max(1000, 'Description must be 1000 characters or less').optional(), + chunkingConfig: ChunkingConfigSchema.optional().default({ + maxSize: 1024, + minSize: 100, + overlap: 200, + }), +}) + +/** GET /api/v1/knowledge — List knowledge bases in a workspace. */ +export async function GET(request: NextRequest) { + const auth = await authenticateRequest(request, 'knowledge') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { searchParams } = new URL(request.url) + const validation = validateSchema(ListKBSchema, { + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) return validation.response + + const { workspaceId } = validation.data + + const accessError = await validateWorkspaceAccess(rateLimit, userId, workspaceId) + if (accessError) return accessError + + const knowledgeBases = await getKnowledgeBases(userId, workspaceId) + + return NextResponse.json({ + success: true, + data: { + knowledgeBases: knowledgeBases.map(formatKnowledgeBase), + totalCount: knowledgeBases.length, + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to list knowledge bases') + } +} + +/** POST /api/v1/knowledge — Create a new knowledge base. */ +export async function POST(request: NextRequest) { + const auth = await authenticateRequest(request, 'knowledge') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const body = await parseJsonBody(request) + if (!body.success) return body.response + + const validation = validateSchema(CreateKBSchema, body.data) + if (!validation.success) return validation.response + + const { workspaceId, name, description, chunkingConfig } = validation.data + + const accessError = await validateWorkspaceAccess(rateLimit, userId, workspaceId, 'write') + if (accessError) return accessError + + const kb = await createKnowledgeBase( + { + name, + description, + workspaceId, + userId, + embeddingModel: 'text-embedding-3-small', + embeddingDimension: 1536, + chunkingConfig: chunkingConfig ?? { maxSize: 1024, minSize: 100, overlap: 200 }, + }, + requestId + ) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.KNOWLEDGE_BASE_CREATED, + resourceType: AuditResourceType.KNOWLEDGE_BASE, + resourceId: kb.id, + resourceName: kb.name, + description: `Created knowledge base "${kb.name}" via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + knowledgeBase: formatKnowledgeBase(kb), + message: 'Knowledge base created successfully', + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to create knowledge base') + } +} diff --git a/apps/sim/app/api/v1/knowledge/search/route.ts b/apps/sim/app/api/v1/knowledge/search/route.ts new file mode 100644 index 00000000000..1b50d5d8af4 --- /dev/null +++ b/apps/sim/app/api/v1/knowledge/search/route.ts @@ -0,0 +1,268 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { ALL_TAG_SLOTS } from '@/lib/knowledge/constants' +import { getDocumentTagDefinitions } from '@/lib/knowledge/tags/service' +import { buildUndefinedTagsError, validateTagValue } from '@/lib/knowledge/tags/utils' +import type { StructuredFilter } from '@/lib/knowledge/types' +import { + generateSearchEmbedding, + getDocumentNamesByIds, + getQueryStrategy, + handleTagAndVectorSearch, + handleTagOnlySearch, + handleVectorOnlySearch, + type SearchResult, +} from '@/app/api/knowledge/search/utils' +import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils' +import { + authenticateRequest, + handleError, + parseJsonBody, + validateSchema, + validateWorkspaceAccess, +} from '@/app/api/v1/knowledge/utils' + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const StructuredTagFilterSchema = z.object({ + tagName: z.string(), + fieldType: z.enum(['text', 'number', 'date', 'boolean']).optional(), + operator: z.string().default('eq'), + value: z.union([z.string(), z.number(), z.boolean()]), + valueTo: z.union([z.string(), z.number()]).optional(), +}) + +const SearchSchema = z + .object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + knowledgeBaseIds: z.union([ + z.string().min(1, 'Knowledge base ID is required'), + z + .array(z.string().min(1)) + .min(1, 'At least one knowledge base ID is required') + .max(20, 'Maximum 20 knowledge base IDs allowed'), + ]), + query: z.string().optional(), + topK: z.number().min(1).max(100).default(10), + tagFilters: z.array(StructuredTagFilterSchema).optional(), + }) + .refine( + (data) => { + const hasQuery = data.query && data.query.trim().length > 0 + const hasTagFilters = data.tagFilters && data.tagFilters.length > 0 + return hasQuery || hasTagFilters + }, + { + message: 'Either query or tagFilters must be provided', + } + ) + +/** POST /api/v1/knowledge/search — Vector search across knowledge bases. */ +export async function POST(request: NextRequest) { + const auth = await authenticateRequest(request, 'knowledge-search') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const body = await parseJsonBody(request) + if (!body.success) return body.response + + const validation = validateSchema(SearchSchema, body.data) + if (!validation.success) return validation.response + + const { workspaceId, topK, query, tagFilters } = validation.data + + const accessError = await validateWorkspaceAccess(rateLimit, userId, workspaceId) + if (accessError) return accessError + + const knowledgeBaseIds = Array.isArray(validation.data.knowledgeBaseIds) + ? validation.data.knowledgeBaseIds + : [validation.data.knowledgeBaseIds] + + const accessChecks = await Promise.all( + knowledgeBaseIds.map((kbId) => checkKnowledgeBaseAccess(kbId, userId)) + ) + const accessibleKbIds = knowledgeBaseIds.filter( + (_, idx) => + accessChecks[idx]?.hasAccess && + accessChecks[idx]?.knowledgeBase?.workspaceId === workspaceId + ) + + if (accessibleKbIds.length === 0) { + return NextResponse.json( + { error: 'Knowledge base not found or access denied' }, + { status: 404 } + ) + } + + const inaccessibleKbIds = knowledgeBaseIds.filter((id) => !accessibleKbIds.includes(id)) + if (inaccessibleKbIds.length > 0) { + return NextResponse.json( + { error: `Knowledge bases not found or access denied: ${inaccessibleKbIds.join(', ')}` }, + { status: 404 } + ) + } + + let structuredFilters: StructuredFilter[] = [] + const tagDefsCache = new Map>>() + + if (tagFilters && tagFilters.length > 0 && accessibleKbIds.length > 1) { + return NextResponse.json( + { error: 'Tag filters are only supported when searching a single knowledge base' }, + { status: 400 } + ) + } + + if (tagFilters && tagFilters.length > 0 && accessibleKbIds.length > 0) { + const kbId = accessibleKbIds[0] + const tagDefs = await getDocumentTagDefinitions(kbId) + tagDefsCache.set(kbId, tagDefs) + + const displayNameToTagDef: Record = {} + tagDefs.forEach((def) => { + displayNameToTagDef[def.displayName] = { + tagSlot: def.tagSlot, + fieldType: def.fieldType, + } + }) + + const undefinedTags: string[] = [] + const typeErrors: string[] = [] + + for (const filter of tagFilters) { + const tagDef = displayNameToTagDef[filter.tagName] + if (!tagDef) { + undefinedTags.push(filter.tagName) + continue + } + const validationError = validateTagValue( + filter.tagName, + String(filter.value), + tagDef.fieldType + ) + if (validationError) { + typeErrors.push(validationError) + } + } + + if (undefinedTags.length > 0 || typeErrors.length > 0) { + const errorParts: string[] = [] + if (undefinedTags.length > 0) { + errorParts.push(buildUndefinedTagsError(undefinedTags)) + } + if (typeErrors.length > 0) { + errorParts.push(...typeErrors) + } + return NextResponse.json({ error: errorParts.join('\n') }, { status: 400 }) + } + + structuredFilters = tagFilters.map((filter) => { + const tagDef = displayNameToTagDef[filter.tagName]! + return { + tagSlot: tagDef.tagSlot, + fieldType: tagDef.fieldType, + operator: filter.operator, + value: filter.value, + valueTo: filter.valueTo, + } + }) + } + + const hasQuery = query && query.trim().length > 0 + const hasFilters = structuredFilters.length > 0 + + let results: SearchResult[] + + if (!hasQuery && hasFilters) { + results = await handleTagOnlySearch({ + knowledgeBaseIds: accessibleKbIds, + topK, + structuredFilters, + }) + } else if (hasQuery && hasFilters) { + const strategy = getQueryStrategy(accessibleKbIds.length, topK) + const queryVector = JSON.stringify( + await generateSearchEmbedding(query!, undefined, workspaceId) + ) + results = await handleTagAndVectorSearch({ + knowledgeBaseIds: accessibleKbIds, + topK, + structuredFilters, + queryVector, + distanceThreshold: strategy.distanceThreshold, + }) + } else if (hasQuery) { + const strategy = getQueryStrategy(accessibleKbIds.length, topK) + const queryVector = JSON.stringify( + await generateSearchEmbedding(query!, undefined, workspaceId) + ) + results = await handleVectorOnlySearch({ + knowledgeBaseIds: accessibleKbIds, + topK, + queryVector, + distanceThreshold: strategy.distanceThreshold, + }) + } else { + return NextResponse.json( + { error: 'Either query or tagFilters must be provided' }, + { status: 400 } + ) + } + + const tagDefsResults = await Promise.all( + accessibleKbIds.map(async (kbId) => { + try { + const tagDefs = tagDefsCache.get(kbId) ?? (await getDocumentTagDefinitions(kbId)) + const map: Record = {} + tagDefs.forEach((def) => { + map[def.tagSlot] = def.displayName + }) + return { kbId, map } + } catch { + return { kbId, map: {} as Record } + } + }) + ) + const tagDefinitionsMap: Record> = {} + tagDefsResults.forEach(({ kbId, map }) => { + tagDefinitionsMap[kbId] = map + }) + + const documentIds = results.map((r) => r.documentId) + const documentNameMap = await getDocumentNamesByIds(documentIds) + + return NextResponse.json({ + success: true, + data: { + results: results.map((result) => { + const kbTagMap = tagDefinitionsMap[result.knowledgeBaseId] || {} + const tags: Record = {} + + ALL_TAG_SLOTS.forEach((slot) => { + const tagValue = result[slot as keyof SearchResult] + if (tagValue !== null && tagValue !== undefined) { + const displayName = kbTagMap[slot] || slot + tags[displayName] = tagValue as string | number | boolean | Date | null + } + }) + + return { + documentId: result.documentId, + documentName: documentNameMap[result.documentId] || undefined, + content: result.content, + chunkIndex: result.chunkIndex, + metadata: tags, + similarity: hasQuery ? 1 - result.distance : 1, + } + }), + query: query || '', + knowledgeBaseIds: accessibleKbIds, + topK, + totalResults: results.length, + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to perform search') + } +} diff --git a/apps/sim/app/api/v1/knowledge/utils.ts b/apps/sim/app/api/v1/knowledge/utils.ts new file mode 100644 index 00000000000..9908457054d --- /dev/null +++ b/apps/sim/app/api/v1/knowledge/utils.ts @@ -0,0 +1,187 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { generateRequestId } from '@/lib/core/utils/request' +import { getKnowledgeBaseById } from '@/lib/knowledge/service' +import type { KnowledgeBaseWithCounts } from '@/lib/knowledge/types' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, + type RateLimitResult, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1KnowledgeAPI') + +type EndpointKey = 'knowledge' | 'knowledge-detail' | 'knowledge-search' + +/** + * Successful authentication result with request context + */ +export interface AuthorizedRequest { + requestId: string + userId: string + rateLimit: RateLimitResult +} + +/** + * Authenticates and rate-limits a v1 knowledge API request. + * Returns NextResponse on failure, AuthorizedRequest on success. + */ +export async function authenticateRequest( + request: NextRequest, + endpoint: EndpointKey +): Promise { + const requestId = generateRequestId() + const rateLimit = await checkRateLimit(request, endpoint) + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + return { requestId, userId: rateLimit.userId!, rateLimit } +} + +/** + * Validates workspace scope and user permission level. + * Returns null on success, NextResponse on failure. + */ +export async function validateWorkspaceAccess( + rateLimit: RateLimitResult, + userId: string, + workspaceId: string, + level: 'read' | 'write' = 'read' +): Promise { + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission === null) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + if (level === 'write' && permission === 'read') { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + return null +} + +/** + * Fetches a KB by ID, validates it exists, belongs to the workspace, + * and the user has permission. Returns the KB or a NextResponse error. + */ +export async function resolveKnowledgeBase( + id: string, + workspaceId: string, + userId: string, + rateLimit: RateLimitResult, + level: 'read' | 'write' = 'read' +): Promise<{ kb: KnowledgeBaseWithCounts } | NextResponse> { + const accessError = await validateWorkspaceAccess(rateLimit, userId, workspaceId, level) + if (accessError) return accessError + + const kb = await getKnowledgeBaseById(id) + if (!kb) { + return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 }) + } + if (kb.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 }) + } + return { kb } +} + +/** + * Validates data against a Zod schema with consistent error response. + */ +export function validateSchema( + schema: S, + data: unknown +): { success: true; data: z.output } | { success: false; response: NextResponse } { + const result = schema.safeParse(data) + if (!result.success) { + return { + success: false, + response: NextResponse.json( + { error: 'Validation error', details: result.error.errors }, + { status: 400 } + ), + } + } + return { success: true, data: result.data } +} + +/** + * Safely parses a JSON request body with consistent error response. + */ +export async function parseJsonBody( + request: NextRequest +): Promise<{ success: true; data: unknown } | { success: false; response: NextResponse }> { + try { + const data = await request.json() + return { success: true, data } + } catch { + return { + success: false, + response: NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }), + } + } +} + +/** + * Serializes a date value for JSON responses. + */ +export function serializeDate(date: Date | string | null | undefined): string | null { + if (date === null || date === undefined) return null + if (date instanceof Date) return date.toISOString() + return String(date) +} + +/** + * Formats a KnowledgeBaseWithCounts into the API response shape. + */ +export function formatKnowledgeBase(kb: KnowledgeBaseWithCounts) { + return { + id: kb.id, + name: kb.name, + description: kb.description, + tokenCount: kb.tokenCount, + embeddingModel: kb.embeddingModel, + embeddingDimension: kb.embeddingDimension, + chunkingConfig: kb.chunkingConfig, + docCount: kb.docCount, + connectorTypes: kb.connectorTypes, + createdAt: serializeDate(kb.createdAt), + updatedAt: serializeDate(kb.updatedAt), + } +} + +/** + * Handles unexpected errors with consistent logging and response. + */ +export function handleError( + requestId: string, + error: unknown, + defaultMessage: string +): NextResponse { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: 'Validation error', details: error.errors }, { status: 400 }) + } + + if (error instanceof Error) { + if (error.message.includes('does not have permission')) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const isStorageLimitError = + error.message.includes('Storage limit exceeded') || error.message.includes('storage limit') + if (isStorageLimitError) { + return NextResponse.json({ error: 'Storage limit exceeded' }, { status: 413 }) + } + + const isDuplicate = error.message.includes('already exists') + if (isDuplicate) { + return NextResponse.json({ error: 'Resource already exists' }, { status: 409 }) + } + } + + logger.error(`[${requestId}] ${defaultMessage}:`, error) + return NextResponse.json({ error: defaultMessage }, { status: 500 }) +} diff --git a/apps/sim/app/api/v1/logs/[id]/route.ts b/apps/sim/app/api/v1/logs/[id]/route.ts index b1d8f89ff36..6e5176c5760 100644 --- a/apps/sim/app/api/v1/logs/[id]/route.ts +++ b/apps/sim/app/api/v1/logs/[id]/route.ts @@ -47,12 +47,12 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ workflowUpdatedAt: workflow.updatedAt, }) .from(workflowExecutionLogs) - .innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) + .leftJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) .innerJoin( permissions, and( eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, workflow.workspaceId), + eq(permissions.entityId, workflowExecutionLogs.workspaceId), eq(permissions.userId, userId) ) ) @@ -66,7 +66,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ const workflowSummary = { id: log.workflowId, - name: log.workflowName, + name: log.workflowName || 'Deleted Workflow', description: log.workflowDescription, color: log.workflowColor, folderId: log.workflowFolderId, @@ -74,6 +74,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ workspaceId: log.workflowWorkspaceId, createdAt: log.workflowCreatedAt, updatedAt: log.workflowUpdatedAt, + deleted: !log.workflowName, } const response = { diff --git a/apps/sim/app/api/v1/logs/executions/[executionId]/route.ts b/apps/sim/app/api/v1/logs/executions/[executionId]/route.ts index 5c2967ef735..f791c13b25f 100644 --- a/apps/sim/app/api/v1/logs/executions/[executionId]/route.ts +++ b/apps/sim/app/api/v1/logs/executions/[executionId]/route.ts @@ -1,10 +1,5 @@ import { db } from '@sim/db' -import { - permissions, - workflow, - workflowExecutionLogs, - workflowExecutionSnapshots, -} from '@sim/db/schema' +import { permissions, workflowExecutionLogs, workflowExecutionSnapshots } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' @@ -31,15 +26,13 @@ export async function GET( const rows = await db .select({ log: workflowExecutionLogs, - workflow: workflow, }) .from(workflowExecutionLogs) - .innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) .innerJoin( permissions, and( eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, workflow.workspaceId), + eq(permissions.entityId, workflowExecutionLogs.workspaceId), eq(permissions.userId, userId) ) ) diff --git a/apps/sim/app/api/v1/logs/route.ts b/apps/sim/app/api/v1/logs/route.ts index 83a7b621923..bc9562fd273 100644 --- a/apps/sim/app/api/v1/logs/route.ts +++ b/apps/sim/app/api/v1/logs/route.ts @@ -123,12 +123,12 @@ export async function GET(request: NextRequest) { workflowDescription: workflow.description, }) .from(workflowExecutionLogs) - .innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) + .leftJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) .innerJoin( permissions, and( eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, params.workspaceId), + eq(permissions.entityId, workflowExecutionLogs.workspaceId), eq(permissions.userId, userId) ) ) @@ -168,8 +168,9 @@ export async function GET(request: NextRequest) { if (params.details === 'full') { result.workflow = { id: log.workflowId, - name: log.workflowName, + name: log.workflowName || 'Deleted Workflow', description: log.workflowDescription, + deleted: !log.workflowName, } if (log.cost) { diff --git a/apps/sim/app/api/v1/middleware.ts b/apps/sim/app/api/v1/middleware.ts index 60a7b934749..ad42be802a3 100644 --- a/apps/sim/app/api/v1/middleware.ts +++ b/apps/sim/app/api/v1/middleware.ts @@ -1,7 +1,8 @@ import { createLogger } from '@sim/logger' import { type NextRequest, NextResponse } from 'next/server' import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription' -import { RateLimiter } from '@/lib/core/rate-limiter' +import type { SubscriptionPlan } from '@/lib/core/rate-limiter' +import { getRateLimit, RateLimiter } from '@/lib/core/rate-limiter' import { authenticateV1Request } from '@/app/api/v1/auth' const logger = createLogger('V1Middleware') @@ -14,12 +15,29 @@ export interface RateLimitResult { limit: number retryAfterMs?: number userId?: string + workspaceId?: string + keyType?: 'personal' | 'workspace' error?: string } export async function checkRateLimit( request: NextRequest, - endpoint: 'logs' | 'logs-detail' | 'workflows' | 'workflow-detail' | 'audit-logs' = 'logs' + endpoint: + | 'logs' + | 'logs-detail' + | 'workflows' + | 'workflow-detail' + | 'audit-logs' + | 'tables' + | 'table-detail' + | 'table-rows' + | 'table-row-detail' + | 'table-columns' + | 'files' + | 'file-detail' + | 'knowledge' + | 'knowledge-detail' + | 'knowledge-search' = 'logs' ): Promise { try { const auth = await authenticateV1Request(request) @@ -51,20 +69,18 @@ export async function checkRateLimit( }) } - const rateLimitStatus = await rateLimiter.getRateLimitStatusWithSubscription( - userId, - subscription, - 'api-endpoint', - false - ) + const plan = (subscription?.plan || 'free') as SubscriptionPlan + const config = getRateLimit(plan, 'api-endpoint') return { allowed: result.allowed, remaining: result.remaining, resetAt: result.resetAt, - limit: rateLimitStatus.requestsPerMinute, + limit: config.refillRate, retryAfterMs: result.retryAfterMs, userId, + workspaceId: auth.workspaceId, + keyType: auth.keyType, } } catch (error) { logger.error('Rate limit check error', { error }) @@ -89,26 +105,40 @@ export function createRateLimitResponse(result: RateLimitResult): NextResponse { return NextResponse.json({ error: result.error || 'Unauthorized' }, { status: 401, headers }) } - if (!result.allowed) { - const retryAfterSeconds = result.retryAfterMs - ? Math.ceil(result.retryAfterMs / 1000) - : Math.ceil((result.resetAt.getTime() - Date.now()) / 1000) + const retryAfterSeconds = result.retryAfterMs + ? Math.ceil(result.retryAfterMs / 1000) + : Math.ceil((result.resetAt.getTime() - Date.now()) / 1000) - return NextResponse.json( - { - error: 'Rate limit exceeded', - message: `API rate limit exceeded. Please retry after ${result.resetAt.toISOString()}`, - retryAfter: result.resetAt.getTime(), + return NextResponse.json( + { + error: 'Rate limit exceeded', + message: `API rate limit exceeded. Please retry after ${result.resetAt.toISOString()}`, + retryAfter: result.resetAt.getTime(), + }, + { + status: 429, + headers: { + ...headers, + 'Retry-After': retryAfterSeconds.toString(), }, - { - status: 429, - headers: { - ...headers, - 'Retry-After': retryAfterSeconds.toString(), - }, - } + } + ) +} + +/** Verify that a workspace-scoped API key is only used for its own workspace. */ +export function checkWorkspaceScope( + rateLimit: RateLimitResult, + requestedWorkspaceId: string +): NextResponse | null { + if ( + rateLimit.keyType === 'workspace' && + rateLimit.workspaceId && + rateLimit.workspaceId !== requestedWorkspaceId + ) { + return NextResponse.json( + { error: 'API key is not authorized for this workspace' }, + { status: 403 } ) } - - return NextResponse.json({ error: 'Bad request' }, { status: 400, headers }) + return null } diff --git a/apps/sim/app/api/v1/tables/[tableId]/columns/route.ts b/apps/sim/app/api/v1/tables/[tableId]/columns/route.ts new file mode 100644 index 00000000000..fb707274bfc --- /dev/null +++ b/apps/sim/app/api/v1/tables/[tableId]/columns/route.ts @@ -0,0 +1,305 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { generateRequestId } from '@/lib/core/utils/request' +import { + addTableColumn, + deleteColumn, + renameColumn, + updateColumnConstraints, + updateColumnType, +} from '@/lib/table' +import { + accessError, + CreateColumnSchema, + checkAccess, + DeleteColumnSchema, + normalizeColumn, + UpdateColumnSchema, +} from '@/app/api/table/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1TableColumnsAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +interface ColumnsRouteParams { + params: Promise<{ tableId: string }> +} + +/** POST /api/v1/tables/[tableId]/columns — Add a column to the table schema. */ +export async function POST(request: NextRequest, { params }: ColumnsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const rateLimit = await checkRateLimit(request, 'table-columns') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = CreateColumnSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updatedTable = await addTableColumn(tableId, validated.column, requestId) + + recordAudit({ + workspaceId: validated.workspaceId, + actorId: userId, + action: AuditAction.TABLE_UPDATED, + resourceType: AuditResourceType.TABLE, + resourceId: tableId, + resourceName: table.name, + description: `Added column "${validated.column.name}" to table "${table.name}"`, + metadata: { column: validated.column }, + request, + }) + + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + if (error.message.includes('already exists') || error.message.includes('maximum column')) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + if (error.message === 'Table not found') { + return NextResponse.json({ error: error.message }, { status: 404 }) + } + } + + logger.error(`[${requestId}] Error adding column to table ${tableId}:`, error) + return NextResponse.json({ error: 'Failed to add column' }, { status: 500 }) + } +} + +/** PATCH /api/v1/tables/[tableId]/columns — Update a column (rename, type change, constraints). */ +export async function PATCH(request: NextRequest, { params }: ColumnsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const rateLimit = await checkRateLimit(request, 'table-columns') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = UpdateColumnSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const { updates } = validated + let updatedTable = null + + if (updates.name) { + updatedTable = await renameColumn( + { tableId, oldName: validated.columnName, newName: updates.name }, + requestId + ) + } + + if (updates.type) { + updatedTable = await updateColumnType( + { tableId, columnName: updates.name ?? validated.columnName, newType: updates.type }, + requestId + ) + } + + if (updates.required !== undefined || updates.unique !== undefined) { + updatedTable = await updateColumnConstraints( + { + tableId, + columnName: updates.name ?? validated.columnName, + ...(updates.required !== undefined ? { required: updates.required } : {}), + ...(updates.unique !== undefined ? { unique: updates.unique } : {}), + }, + requestId + ) + } + + if (!updatedTable) { + return NextResponse.json({ error: 'No updates specified' }, { status: 400 }) + } + + recordAudit({ + workspaceId: validated.workspaceId, + actorId: userId, + action: AuditAction.TABLE_UPDATED, + resourceType: AuditResourceType.TABLE, + resourceId: tableId, + resourceName: table.name, + description: `Updated column "${validated.columnName}" in table "${table.name}"`, + metadata: { columnName: validated.columnName, updates }, + request, + }) + + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + const msg = error.message + if (msg.includes('not found') || msg.includes('Table not found')) { + return NextResponse.json({ error: msg }, { status: 404 }) + } + if ( + msg.includes('already exists') || + msg.includes('Cannot delete the last column') || + msg.includes('Cannot set column') || + msg.includes('Invalid column') || + msg.includes('exceeds maximum') || + msg.includes('incompatible') || + msg.includes('duplicate') + ) { + return NextResponse.json({ error: msg }, { status: 400 }) + } + } + + logger.error(`[${requestId}] Error updating column in table ${tableId}:`, error) + return NextResponse.json({ error: 'Failed to update column' }, { status: 500 }) + } +} + +/** DELETE /api/v1/tables/[tableId]/columns — Delete a column from the table schema. */ +export async function DELETE(request: NextRequest, { params }: ColumnsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const rateLimit = await checkRateLimit(request, 'table-columns') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = DeleteColumnSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updatedTable = await deleteColumn( + { tableId, columnName: validated.columnName }, + requestId + ) + + recordAudit({ + workspaceId: validated.workspaceId, + actorId: userId, + action: AuditAction.TABLE_UPDATED, + resourceType: AuditResourceType.TABLE, + resourceId: tableId, + resourceName: table.name, + description: `Deleted column "${validated.columnName}" from table "${table.name}"`, + metadata: { columnName: validated.columnName }, + request, + }) + + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + if (error.message.includes('not found') || error.message === 'Table not found') { + return NextResponse.json({ error: error.message }, { status: 404 }) + } + if (error.message.includes('Cannot delete') || error.message.includes('last column')) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + } + + logger.error(`[${requestId}] Error deleting column from table ${tableId}:`, error) + return NextResponse.json({ error: 'Failed to delete column' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/tables/[tableId]/route.ts b/apps/sim/app/api/v1/tables/[tableId]/route.ts new file mode 100644 index 00000000000..06c2a1de4fb --- /dev/null +++ b/apps/sim/app/api/v1/tables/[tableId]/route.ts @@ -0,0 +1,142 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { generateRequestId } from '@/lib/core/utils/request' +import { deleteTable, type TableSchema } from '@/lib/table' +import { accessError, checkAccess, normalizeColumn } from '@/app/api/table/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1TableDetailAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +interface TableRouteParams { + params: Promise<{ tableId: string }> +} + +/** GET /api/v1/tables/[tableId] — Get table details. */ +export async function GET(request: NextRequest, { params }: TableRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + const { searchParams } = new URL(request.url) + const workspaceId = searchParams.get('workspaceId') + + if (!workspaceId) { + return NextResponse.json( + { error: 'workspaceId query parameter is required' }, + { status: 400 } + ) + } + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'read') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const schemaData = table.schema as TableSchema + + return NextResponse.json({ + success: true, + data: { + table: { + id: table.id, + name: table.name, + description: table.description, + schema: { + columns: schemaData.columns.map(normalizeColumn), + }, + rowCount: table.rowCount, + maxRows: table.maxRows, + createdAt: + table.createdAt instanceof Date + ? table.createdAt.toISOString() + : String(table.createdAt), + updatedAt: + table.updatedAt instanceof Date + ? table.updatedAt.toISOString() + : String(table.updatedAt), + }, + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error getting table:`, error) + return NextResponse.json({ error: 'Failed to get table' }, { status: 500 }) + } +} + +/** DELETE /api/v1/tables/[tableId] — Archive a table. */ +export async function DELETE(request: NextRequest, { params }: TableRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + const { searchParams } = new URL(request.url) + const workspaceId = searchParams.get('workspaceId') + + if (!workspaceId) { + return NextResponse.json( + { error: 'workspaceId query parameter is required' }, + { status: 400 } + ) + } + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + if (result.table.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + await deleteTable(tableId, requestId) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.TABLE_DELETED, + resourceType: AuditResourceType.TABLE, + resourceId: tableId, + resourceName: result.table.name, + description: `Archived table "${result.table.name}"`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + message: 'Table archived successfully', + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error deleting table:`, error) + return NextResponse.json({ error: 'Failed to delete table' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/tables/[tableId]/rows/[rowId]/route.ts b/apps/sim/app/api/v1/tables/[tableId]/rows/[rowId]/route.ts new file mode 100644 index 00000000000..bc7901a80d0 --- /dev/null +++ b/apps/sim/app/api/v1/tables/[tableId]/rows/[rowId]/route.ts @@ -0,0 +1,278 @@ +import { db } from '@sim/db' +import { userTableRows } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { generateRequestId } from '@/lib/core/utils/request' +import type { RowData } from '@/lib/table' +import { updateRow } from '@/lib/table' +import { accessError, checkAccess } from '@/app/api/table/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1TableRowAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const UpdateRowSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + data: z.record(z.unknown(), { required_error: 'Row data is required' }), +}) + +interface RowRouteParams { + params: Promise<{ tableId: string; rowId: string }> +} + +/** GET /api/v1/tables/[tableId]/rows/[rowId] — Get a single row. */ +export async function GET(request: NextRequest, { params }: RowRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-row-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId, rowId } = await params + const { searchParams } = new URL(request.url) + const workspaceId = searchParams.get('workspaceId') + + if (!workspaceId) { + return NextResponse.json( + { error: 'workspaceId query parameter is required' }, + { status: 400 } + ) + } + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'read') + if (!result.ok) return accessError(result, requestId, tableId) + + if (result.table.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const [row] = await db + .select({ + id: userTableRows.id, + data: userTableRows.data, + position: userTableRows.position, + createdAt: userTableRows.createdAt, + updatedAt: userTableRows.updatedAt, + }) + .from(userTableRows) + .where( + and( + eq(userTableRows.id, rowId), + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, workspaceId) + ) + ) + .limit(1) + + if (!row) { + return NextResponse.json({ error: 'Row not found' }, { status: 404 }) + } + + return NextResponse.json({ + success: true, + data: { + row: { + id: row.id, + data: row.data, + position: row.position, + createdAt: + row.createdAt instanceof Date ? row.createdAt.toISOString() : String(row.createdAt), + updatedAt: + row.updatedAt instanceof Date ? row.updatedAt.toISOString() : String(row.updatedAt), + }, + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error getting row:`, error) + return NextResponse.json({ error: 'Failed to get row' }, { status: 500 }) + } +} + +/** PATCH /api/v1/tables/[tableId]/rows/[rowId] — Partial update a single row. */ +export async function PATCH(request: NextRequest, { params }: RowRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-row-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId, rowId } = await params + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = UpdateRowSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + // Fetch existing row to merge partial update + const [existingRow] = await db + .select({ data: userTableRows.data }) + .from(userTableRows) + .where( + and( + eq(userTableRows.id, rowId), + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId) + ) + ) + .limit(1) + + if (!existingRow) { + return NextResponse.json({ error: 'Row not found' }, { status: 404 }) + } + + const mergedData = { + ...(existingRow.data as RowData), + ...(validated.data as RowData), + } + + const updatedRow = await updateRow( + { + tableId, + rowId, + data: mergedData, + workspaceId: validated.workspaceId, + }, + table, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + row: { + id: updatedRow.id, + data: updatedRow.data, + position: updatedRow.position, + createdAt: + updatedRow.createdAt instanceof Date + ? updatedRow.createdAt.toISOString() + : updatedRow.createdAt, + updatedAt: + updatedRow.updatedAt instanceof Date + ? updatedRow.updatedAt.toISOString() + : updatedRow.updatedAt, + }, + message: 'Row updated successfully', + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : String(error) + + if (errorMessage === 'Row not found') { + return NextResponse.json({ error: errorMessage }, { status: 404 }) + } + + if ( + errorMessage.includes('Row size exceeds') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('Cannot set unique column') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error updating row:`, error) + return NextResponse.json({ error: 'Failed to update row' }, { status: 500 }) + } +} + +/** DELETE /api/v1/tables/[tableId]/rows/[rowId] — Delete a single row. */ +export async function DELETE(request: NextRequest, { params }: RowRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-row-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId, rowId } = await params + const { searchParams } = new URL(request.url) + const workspaceId = searchParams.get('workspaceId') + + if (!workspaceId) { + return NextResponse.json( + { error: 'workspaceId query parameter is required' }, + { status: 400 } + ) + } + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + if (result.table.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const [deletedRow] = await db + .delete(userTableRows) + .where( + and( + eq(userTableRows.id, rowId), + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, workspaceId) + ) + ) + .returning() + + if (!deletedRow) { + return NextResponse.json({ error: 'Row not found' }, { status: 404 }) + } + + return NextResponse.json({ + success: true, + data: { + message: 'Row deleted successfully', + deletedCount: 1, + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error deleting row:`, error) + return NextResponse.json({ error: 'Failed to delete row' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/tables/[tableId]/rows/route.ts b/apps/sim/app/api/v1/tables/[tableId]/rows/route.ts new file mode 100644 index 00000000000..8021625b1b8 --- /dev/null +++ b/apps/sim/app/api/v1/tables/[tableId]/rows/route.ts @@ -0,0 +1,603 @@ +import { db } from '@sim/db' +import { userTableRows } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { generateRequestId } from '@/lib/core/utils/request' +import type { Filter, RowData, Sort, TableSchema } from '@/lib/table' +import { + batchInsertRows, + deleteRowsByFilter, + deleteRowsByIds, + insertRow, + TABLE_LIMITS, + USER_TABLE_ROWS_SQL_NAME, + updateRowsByFilter, + validateBatchRows, + validateRowData, + validateRowSize, +} from '@/lib/table' +import { buildFilterClause, buildSortClause } from '@/lib/table/sql' +import { accessError, checkAccess } from '@/app/api/table/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1TableRowsAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const InsertRowSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + data: z.record(z.unknown(), { required_error: 'Row data is required' }), +}) + +const BatchInsertRowsSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + rows: z + .array(z.record(z.unknown()), { required_error: 'Rows array is required' }) + .min(1, 'At least one row is required') + .max(1000, 'Cannot insert more than 1000 rows per batch'), +}) + +const QueryRowsSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + filter: z.record(z.unknown()).optional(), + sort: z.record(z.enum(['asc', 'desc'])).optional(), + limit: z + .preprocess( + (val) => (val === null || val === undefined || val === '' ? undefined : Number(val)), + z + .number({ required_error: 'Limit must be a number' }) + .int('Limit must be an integer') + .min(1, 'Limit must be at least 1') + .max(TABLE_LIMITS.MAX_QUERY_LIMIT, `Limit cannot exceed ${TABLE_LIMITS.MAX_QUERY_LIMIT}`) + .optional() + ) + .default(100), + offset: z + .preprocess( + (val) => (val === null || val === undefined || val === '' ? undefined : Number(val)), + z + .number({ required_error: 'Offset must be a number' }) + .int('Offset must be an integer') + .min(0, 'Offset must be 0 or greater') + .optional() + ) + .default(0), +}) + +const nonEmptyFilter = z + .record(z.unknown(), { required_error: 'Filter criteria is required' }) + .refine((f) => Object.keys(f).length > 0, { message: 'Filter must not be empty' }) + +const optionalPositiveLimit = (max: number, label: string) => + z.preprocess( + (val) => (val === null || val === undefined || val === '' ? undefined : Number(val)), + z + .number() + .int(`${label} must be an integer`) + .min(1, `${label} must be at least 1`) + .max(max, `Cannot ${label.toLowerCase()} more than ${max} rows per operation`) + .optional() + ) + +const UpdateRowsByFilterSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + filter: nonEmptyFilter, + data: z.record(z.unknown(), { required_error: 'Update data is required' }), + limit: optionalPositiveLimit(1000, 'Limit'), +}) + +const DeleteRowsByFilterSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + filter: nonEmptyFilter, + limit: optionalPositiveLimit(1000, 'Limit'), +}) + +const DeleteRowsByIdsSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + rowIds: z + .array(z.string().min(1), { required_error: 'Row IDs are required' }) + .min(1, 'At least one row ID is required') + .max(1000, 'Cannot delete more than 1000 rows per operation'), +}) + +const DeleteRowsRequestSchema = z.union([DeleteRowsByFilterSchema, DeleteRowsByIdsSchema]) + +interface TableRowsRouteParams { + params: Promise<{ tableId: string }> +} + +async function handleBatchInsert( + requestId: string, + tableId: string, + validated: z.infer, + userId: string +): Promise { + const accessResult = await checkAccess(tableId, userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const validation = await validateBatchRows({ + rows: validated.rows as RowData[], + schema: table.schema as TableSchema, + tableId, + }) + if (!validation.valid) return validation.response + + try { + const insertedRows = await batchInsertRows( + { + tableId, + rows: validated.rows as RowData[], + workspaceId: validated.workspaceId, + userId, + }, + table, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + rows: insertedRows.map((r) => ({ + id: r.id, + data: r.data, + position: r.position, + createdAt: r.createdAt instanceof Date ? r.createdAt.toISOString() : r.createdAt, + updatedAt: r.updatedAt instanceof Date ? r.updatedAt.toISOString() : r.updatedAt, + })), + insertedCount: insertedRows.length, + message: `Successfully inserted ${insertedRows.length} rows`, + }, + }) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + + if ( + errorMessage.includes('row limit') || + errorMessage.includes('Insufficient capacity') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Row size exceeds') || + errorMessage.match(/^Row \d+:/) + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error batch inserting rows:`, error) + return NextResponse.json({ error: 'Failed to insert rows' }, { status: 500 }) + } +} + +/** GET /api/v1/tables/[tableId]/rows — Query rows with filtering, sorting, pagination. */ +export async function GET(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-rows') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + const { searchParams } = new URL(request.url) + + let filter: Record | undefined + let sort: Sort | undefined + + try { + const filterParam = searchParams.get('filter') + const sortParam = searchParams.get('sort') + if (filterParam) { + filter = JSON.parse(filterParam) as Record + } + if (sortParam) { + sort = JSON.parse(sortParam) as Sort + } + } catch { + return NextResponse.json({ error: 'Invalid filter or sort JSON' }, { status: 400 }) + } + + const validated = QueryRowsSchema.parse({ + workspaceId: searchParams.get('workspaceId'), + filter, + sort, + limit: searchParams.get('limit'), + offset: searchParams.get('offset'), + }) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const accessResult = await checkAccess(tableId, userId, 'read') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const baseConditions = [ + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId), + ] + + if (validated.filter) { + const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME) + if (filterClause) { + baseConditions.push(filterClause) + } + } + + let query = db + .select({ + id: userTableRows.id, + data: userTableRows.data, + position: userTableRows.position, + createdAt: userTableRows.createdAt, + updatedAt: userTableRows.updatedAt, + }) + .from(userTableRows) + .where(and(...baseConditions)) + + if (validated.sort) { + const schema = table.schema as TableSchema + const sortClause = buildSortClause(validated.sort, USER_TABLE_ROWS_SQL_NAME, schema.columns) + if (sortClause) { + query = query.orderBy(sortClause) as typeof query + } else { + query = query.orderBy(userTableRows.position) as typeof query + } + } else { + query = query.orderBy(userTableRows.position) as typeof query + } + + const countQuery = db + .select({ count: sql`count(*)` }) + .from(userTableRows) + .where(and(...baseConditions)) + + const [countResult, rows] = await Promise.all([ + countQuery, + query.limit(validated.limit).offset(validated.offset), + ]) + const totalCount = countResult[0].count + + return NextResponse.json({ + success: true, + data: { + rows: rows.map((r) => ({ + id: r.id, + data: r.data, + position: r.position, + createdAt: r.createdAt instanceof Date ? r.createdAt.toISOString() : String(r.createdAt), + updatedAt: r.updatedAt instanceof Date ? r.updatedAt.toISOString() : String(r.updatedAt), + })), + rowCount: rows.length, + totalCount: Number(totalCount), + limit: validated.limit, + offset: validated.offset, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error querying rows:`, error) + return NextResponse.json({ error: 'Failed to query rows' }, { status: 500 }) + } +} + +/** POST /api/v1/tables/[tableId]/rows — Insert row(s). Supports single or batch. */ +export async function POST(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-rows') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + if ( + typeof body === 'object' && + body !== null && + 'rows' in body && + Array.isArray((body as Record).rows) + ) { + const batchValidated = BatchInsertRowsSchema.parse(body) + const scopeError = checkWorkspaceScope(rateLimit, batchValidated.workspaceId) + if (scopeError) return scopeError + return handleBatchInsert(requestId, tableId, batchValidated, userId) + } + + const validated = InsertRowSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const accessResult = await checkAccess(tableId, userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const rowData = validated.data as RowData + + const validation = await validateRowData({ + rowData, + schema: table.schema as TableSchema, + tableId, + }) + if (!validation.valid) return validation.response + + const row = await insertRow( + { + tableId, + data: rowData, + workspaceId: validated.workspaceId, + userId, + }, + table, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + row: { + id: row.id, + data: row.data, + position: row.position, + createdAt: row.createdAt instanceof Date ? row.createdAt.toISOString() : row.createdAt, + updatedAt: row.updatedAt instanceof Date ? row.updatedAt.toISOString() : row.updatedAt, + }, + message: 'Row inserted successfully', + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : String(error) + + if ( + errorMessage.includes('row limit') || + errorMessage.includes('Insufficient capacity') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Row size exceeds') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error inserting row:`, error) + return NextResponse.json({ error: 'Failed to insert row' }, { status: 500 }) + } +} + +/** PUT /api/v1/tables/[tableId]/rows — Bulk update rows by filter. */ +export async function PUT(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-rows') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = UpdateRowsByFilterSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const accessResult = await checkAccess(tableId, userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const sizeValidation = validateRowSize(validated.data as RowData) + if (!sizeValidation.valid) { + return NextResponse.json( + { error: 'Validation error', details: sizeValidation.errors }, + { status: 400 } + ) + } + + const result = await updateRowsByFilter( + { + tableId, + filter: validated.filter as Filter, + data: validated.data as RowData, + limit: validated.limit, + workspaceId: validated.workspaceId, + }, + table, + requestId + ) + + if (result.affectedCount === 0) { + return NextResponse.json({ + success: true, + data: { + message: 'No rows matched the filter criteria', + updatedCount: 0, + }, + }) + } + + return NextResponse.json({ + success: true, + data: { + message: 'Rows updated successfully', + updatedCount: result.affectedCount, + updatedRowIds: result.affectedRowIds, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : String(error) + + if ( + errorMessage.includes('Row size exceeds') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('Cannot set unique column') || + errorMessage.includes('Filter is required') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error updating rows by filter:`, error) + return NextResponse.json({ error: 'Failed to update rows' }, { status: 500 }) + } +} + +/** DELETE /api/v1/tables/[tableId]/rows — Delete rows by filter or IDs. */ +export async function DELETE(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-rows') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = DeleteRowsRequestSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const accessResult = await checkAccess(tableId, userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + if ('rowIds' in validated) { + const result = await deleteRowsByIds( + { tableId, rowIds: validated.rowIds, workspaceId: validated.workspaceId }, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + message: + result.deletedCount === 0 + ? 'No matching rows found for the provided IDs' + : 'Rows deleted successfully', + deletedCount: result.deletedCount, + deletedRowIds: result.deletedRowIds, + requestedCount: result.requestedCount, + ...(result.missingRowIds.length > 0 ? { missingRowIds: result.missingRowIds } : {}), + }, + }) + } + + const result = await deleteRowsByFilter( + { + tableId, + filter: validated.filter as Filter, + limit: validated.limit, + workspaceId: validated.workspaceId, + }, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + message: + result.affectedCount === 0 + ? 'No rows matched the filter criteria' + : 'Rows deleted successfully', + deletedCount: result.affectedCount, + deletedRowIds: result.affectedRowIds, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : String(error) + + if (errorMessage.includes('Filter is required')) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error deleting rows:`, error) + return NextResponse.json({ error: 'Failed to delete rows' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/tables/[tableId]/rows/upsert/route.ts b/apps/sim/app/api/v1/tables/[tableId]/rows/upsert/route.ts new file mode 100644 index 00000000000..93f1351a8f2 --- /dev/null +++ b/apps/sim/app/api/v1/tables/[tableId]/rows/upsert/route.ts @@ -0,0 +1,119 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { generateRequestId } from '@/lib/core/utils/request' +import type { RowData } from '@/lib/table' +import { upsertRow } from '@/lib/table' +import { accessError, checkAccess } from '@/app/api/table/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1TableUpsertAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const UpsertRowSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + data: z.record(z.unknown(), { required_error: 'Row data is required' }), + conflictTarget: z.string().optional(), +}) + +interface UpsertRouteParams { + params: Promise<{ tableId: string }> +} + +/** POST /api/v1/tables/[tableId]/rows/upsert — Insert or update a row based on unique columns. */ +export async function POST(request: NextRequest, { params }: UpsertRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-rows') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = UpsertRowSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const upsertResult = await upsertRow( + { + tableId, + workspaceId: validated.workspaceId, + data: validated.data as RowData, + userId, + conflictTarget: validated.conflictTarget, + }, + table, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + row: { + id: upsertResult.row.id, + data: upsertResult.row.data, + createdAt: + upsertResult.row.createdAt instanceof Date + ? upsertResult.row.createdAt.toISOString() + : upsertResult.row.createdAt, + updatedAt: + upsertResult.row.updatedAt instanceof Date + ? upsertResult.row.updatedAt.toISOString() + : upsertResult.row.updatedAt, + }, + operation: upsertResult.operation, + message: `Row ${upsertResult.operation === 'update' ? 'updated' : 'inserted'} successfully`, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : String(error) + + if ( + errorMessage.includes('unique column') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('conflictTarget') || + errorMessage.includes('row limit') || + errorMessage.includes('Schema validation') || + errorMessage.includes('Upsert requires') || + errorMessage.includes('Row size exceeds') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error upserting row:`, error) + return NextResponse.json({ error: 'Failed to upsert row' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/tables/route.ts b/apps/sim/app/api/v1/tables/route.ts new file mode 100644 index 00000000000..09ff717f9cd --- /dev/null +++ b/apps/sim/app/api/v1/tables/route.ts @@ -0,0 +1,260 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { generateRequestId } from '@/lib/core/utils/request' +import { + createTable, + getWorkspaceTableLimits, + listTables, + TABLE_LIMITS, + type TableSchema, +} from '@/lib/table' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' +import { normalizeColumn } from '@/app/api/table/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1TablesAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const ListTablesSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), +}) + +const ColumnSchema = z.object({ + name: z + .string() + .min(1, 'Column name is required') + .max( + TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH, + `Column name must be ${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters or less` + ) + .regex( + /^[a-z_][a-z0-9_]*$/i, + 'Column name must start with a letter or underscore and contain only alphanumeric characters and underscores' + ), + type: z.enum(['string', 'number', 'boolean', 'date', 'json'], { + errorMap: () => ({ + message: 'Column type must be one of: string, number, boolean, date, json', + }), + }), + required: z.boolean().optional().default(false), + unique: z.boolean().optional().default(false), +}) + +const CreateTableSchema = z.object({ + name: z + .string() + .min(1, 'Table name is required') + .max( + TABLE_LIMITS.MAX_TABLE_NAME_LENGTH, + `Table name must be ${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters or less` + ) + .regex( + /^[a-z_][a-z0-9_]*$/i, + 'Table name must start with a letter or underscore and contain only alphanumeric characters and underscores' + ), + description: z + .string() + .max( + TABLE_LIMITS.MAX_DESCRIPTION_LENGTH, + `Description must be ${TABLE_LIMITS.MAX_DESCRIPTION_LENGTH} characters or less` + ) + .optional(), + schema: z.object({ + columns: z + .array(ColumnSchema) + .min(1, 'Table must have at least one column') + .max( + TABLE_LIMITS.MAX_COLUMNS_PER_TABLE, + `Table cannot have more than ${TABLE_LIMITS.MAX_COLUMNS_PER_TABLE} columns` + ), + }), + workspaceId: z.string().min(1, 'Workspace ID is required'), +}) + +/** GET /api/v1/tables — List all tables in a workspace. */ +export async function GET(request: NextRequest) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'tables') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { searchParams } = new URL(request.url) + + const validation = ListTablesSchema.safeParse({ + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) { + return NextResponse.json( + { error: 'Validation error', details: validation.error.errors }, + { status: 400 } + ) + } + + const { workspaceId } = validation.data + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission === null) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const tables = await listTables(workspaceId) + + return NextResponse.json({ + success: true, + data: { + tables: tables.map((t) => { + const schemaData = t.schema as TableSchema + return { + id: t.id, + name: t.name, + description: t.description, + schema: { + columns: schemaData.columns.map(normalizeColumn), + }, + rowCount: t.rowCount, + maxRows: t.maxRows, + createdAt: + t.createdAt instanceof Date ? t.createdAt.toISOString() : String(t.createdAt), + updatedAt: + t.updatedAt instanceof Date ? t.updatedAt.toISOString() : String(t.updatedAt), + } + }), + totalCount: tables.length, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error listing tables:`, error) + return NextResponse.json({ error: 'Failed to list tables' }, { status: 500 }) + } +} + +/** POST /api/v1/tables — Create a new table. */ +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'tables') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const params = CreateTableSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, params.workspaceId) + if (scopeError) return scopeError + + const permission = await getUserEntityPermissions(userId, 'workspace', params.workspaceId) + if (permission === null || permission === 'read') { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const planLimits = await getWorkspaceTableLimits(params.workspaceId) + + const normalizedSchema: TableSchema = { + columns: params.schema.columns.map(normalizeColumn), + } + + const table = await createTable( + { + name: params.name, + description: params.description, + schema: normalizedSchema, + workspaceId: params.workspaceId, + userId, + maxRows: planLimits.maxRowsPerTable, + maxTables: planLimits.maxTables, + }, + requestId + ) + + recordAudit({ + workspaceId: params.workspaceId, + actorId: userId, + action: AuditAction.TABLE_CREATED, + resourceType: AuditResourceType.TABLE, + resourceId: table.id, + resourceName: table.name, + description: `Created table "${table.name}" via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + table: { + id: table.id, + name: table.name, + description: table.description, + schema: { + columns: (table.schema as TableSchema).columns.map(normalizeColumn), + }, + rowCount: table.rowCount, + maxRows: table.maxRows, + createdAt: + table.createdAt instanceof Date + ? table.createdAt.toISOString() + : String(table.createdAt), + updatedAt: + table.updatedAt instanceof Date + ? table.updatedAt.toISOString() + : String(table.updatedAt), + }, + message: 'Table created successfully', + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + if (error.message.includes('maximum table limit')) { + return NextResponse.json({ error: error.message }, { status: 403 }) + } + if ( + error.message.includes('Invalid table name') || + error.message.includes('Invalid schema') || + error.message.includes('already exists') + ) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + } + + logger.error(`[${requestId}] Error creating table:`, error) + return NextResponse.json({ error: 'Failed to create table' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/workflows/[id]/route.ts b/apps/sim/app/api/v1/workflows/[id]/route.ts index 658a0f8ea4d..9fe825e6224 100644 --- a/apps/sim/app/api/v1/workflows/[id]/route.ts +++ b/apps/sim/app/api/v1/workflows/[id]/route.ts @@ -1,9 +1,11 @@ import { db } from '@sim/db' -import { permissions, workflow, workflowBlocks } from '@sim/db/schema' +import { workflowBlocks } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' +import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta' import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware' @@ -25,39 +27,20 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ logger.info(`[${requestId}] Fetching workflow details for ${id}`, { userId }) - const rows = await db - .select({ - id: workflow.id, - name: workflow.name, - description: workflow.description, - color: workflow.color, - folderId: workflow.folderId, - workspaceId: workflow.workspaceId, - isDeployed: workflow.isDeployed, - deployedAt: workflow.deployedAt, - runCount: workflow.runCount, - lastRunAt: workflow.lastRunAt, - variables: workflow.variables, - createdAt: workflow.createdAt, - updatedAt: workflow.updatedAt, - }) - .from(workflow) - .innerJoin( - permissions, - and( - eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, workflow.workspaceId), - eq(permissions.userId, userId) - ) - ) - .where(eq(workflow.id, id)) - .limit(1) - - const workflowData = rows[0] + const workflowData = await getActiveWorkflowRecord(id) if (!workflowData) { return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) } + const permission = await getUserEntityPermissions( + userId, + 'workspace', + workflowData.workspaceId! + ) + if (!permission) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + const blockRows = await db .select({ id: workflowBlocks.id, diff --git a/apps/sim/app/api/v1/workflows/route.ts b/apps/sim/app/api/v1/workflows/route.ts index 23bb707f152..267650aff10 100644 --- a/apps/sim/app/api/v1/workflows/route.ts +++ b/apps/sim/app/api/v1/workflows/route.ts @@ -1,9 +1,10 @@ import { db } from '@sim/db' -import { permissions, workflow } from '@sim/db/schema' +import { workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, asc, eq, gt, or } from 'drizzle-orm' +import { and, asc, eq, gt, isNull, or } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta' import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware' @@ -69,12 +70,12 @@ export async function GET(request: NextRequest) { }, }) - const conditions = [ - eq(workflow.workspaceId, params.workspaceId), - eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, params.workspaceId), - eq(permissions.userId, userId), - ] + const permission = await getUserEntityPermissions(userId, 'workspace', params.workspaceId) + if (!permission) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const conditions = [eq(workflow.workspaceId, params.workspaceId), isNull(workflow.archivedAt)] if (params.folderId) { conditions.push(eq(workflow.folderId, params.folderId)) @@ -124,14 +125,6 @@ export async function GET(request: NextRequest) { updatedAt: workflow.updatedAt, }) .from(workflow) - .innerJoin( - permissions, - and( - eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, params.workspaceId), - eq(permissions.userId, userId) - ) - ) .where(and(...conditions)) .orderBy(...orderByClause) .limit(params.limit + 1) diff --git a/apps/sim/app/api/webhooks/[id]/route.ts b/apps/sim/app/api/webhooks/[id]/route.ts index f1f1fbd628a..88d8f26e0b3 100644 --- a/apps/sim/app/api/webhooks/[id]/route.ts +++ b/apps/sim/app/api/webhooks/[id]/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { webhook, workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' @@ -41,7 +41,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ }) .from(webhook) .innerJoin(workflow, eq(webhook.workflowId, workflow.id)) - .where(eq(webhook.id, id)) + .where(and(eq(webhook.id, id), isNull(webhook.archivedAt))) .limit(1) if (webhooks.length === 0) { @@ -106,7 +106,7 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise< }) .from(webhook) .innerJoin(workflow, eq(webhook.workflowId, workflow.id)) - .where(eq(webhook.id, id)) + .where(and(eq(webhook.id, id), isNull(webhook.archivedAt))) .limit(1) if (webhooks.length === 0) { @@ -204,7 +204,13 @@ export async function DELETE( const allCredentialSetWebhooks = await db .select() .from(webhook) - .where(and(eq(webhook.workflowId, webhookData.workflow.id), eq(webhook.blockId, blockId))) + .where( + and( + eq(webhook.workflowId, webhookData.workflow.id), + eq(webhook.blockId, blockId), + isNull(webhook.archivedAt) + ) + ) const webhooksToDelete = allCredentialSetWebhooks.filter( (w) => w.credentialSetId === credentialSetId diff --git a/apps/sim/app/api/webhooks/agentmail/route.ts b/apps/sim/app/api/webhooks/agentmail/route.ts new file mode 100644 index 00000000000..15ecf2693e5 --- /dev/null +++ b/apps/sim/app/api/webhooks/agentmail/route.ts @@ -0,0 +1,277 @@ +import { + db, + mothershipInboxAllowedSender, + mothershipInboxTask, + mothershipInboxWebhook, + permissions, + user, + workspace, +} from '@sim/db' +import { createLogger } from '@sim/logger' +import { tasks } from '@trigger.dev/sdk' +import { and, eq, gt, ne, sql } from 'drizzle-orm' +import { NextResponse } from 'next/server' +import { Webhook } from 'svix' +import { v4 as uuidv4 } from 'uuid' +import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags' +import { executeInboxTask } from '@/lib/mothership/inbox/executor' +import type { AgentMailWebhookPayload, RejectionReason } from '@/lib/mothership/inbox/types' + +const logger = createLogger('AgentMailWebhook') + +const AUTOMATED_SENDERS = ['mailer-daemon@', 'noreply@', 'no-reply@', 'postmaster@'] +const MAX_EMAILS_PER_HOUR = 20 + +export async function POST(req: Request) { + try { + const rawBody = await req.text() + const svixId = req.headers.get('svix-id') + const svixTimestamp = req.headers.get('svix-timestamp') + const svixSignature = req.headers.get('svix-signature') + + const payload = JSON.parse(rawBody) as AgentMailWebhookPayload + + if (payload.event_type !== 'message.received') { + return NextResponse.json({ ok: true }) + } + + const { message } = payload + const inboxId = message?.inbox_id + if (!message || !inboxId) { + return NextResponse.json({ ok: true }) + } + + const [result] = await db + .select({ + id: workspace.id, + inboxEnabled: workspace.inboxEnabled, + inboxAddress: workspace.inboxAddress, + inboxProviderId: workspace.inboxProviderId, + webhookSecret: mothershipInboxWebhook.secret, + }) + .from(workspace) + .leftJoin(mothershipInboxWebhook, eq(mothershipInboxWebhook.workspaceId, workspace.id)) + .where(eq(workspace.inboxProviderId, inboxId)) + .limit(1) + + if (!result || !result.webhookSecret) { + if (!result) { + logger.warn('No workspace found for inbox', { inboxId }) + } else { + logger.warn('No webhook secret found for workspace', { workspaceId: result.id }) + } + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + try { + const wh = new Webhook(result.webhookSecret) + wh.verify(rawBody, { + 'svix-id': svixId || '', + 'svix-timestamp': svixTimestamp || '', + 'svix-signature': svixSignature || '', + }) + } catch (verifyErr) { + logger.warn('Webhook signature verification failed', { + workspaceId: result.id, + error: verifyErr instanceof Error ? verifyErr.message : 'Unknown error', + }) + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + if (!result.inboxEnabled) { + logger.info('Inbox disabled, rejecting', { workspaceId: result.id }) + return NextResponse.json({ ok: true }) + } + + const fromEmail = extractSenderEmail(message.from_) || '' + logger.info('Webhook received', { fromEmail, from_raw: message.from_, workspaceId: result.id }) + + if (result.inboxAddress && fromEmail === result.inboxAddress.toLowerCase()) { + logger.info('Skipping email from inbox itself', { workspaceId: result.id }) + return NextResponse.json({ ok: true }) + } + + if (AUTOMATED_SENDERS.some((prefix) => fromEmail.startsWith(prefix))) { + await createRejectedTask(result.id, message, 'automated_sender') + return NextResponse.json({ ok: true }) + } + + const emailMessageId = message.message_id + const inReplyTo = message.in_reply_to || null + + const [existingResult, isAllowed, recentCount, parentTaskResult] = await Promise.all([ + emailMessageId + ? db + .select({ id: mothershipInboxTask.id }) + .from(mothershipInboxTask) + .where(eq(mothershipInboxTask.emailMessageId, emailMessageId)) + .limit(1) + : Promise.resolve([]), + isSenderAllowed(fromEmail, result.id), + getRecentTaskCount(result.id), + inReplyTo + ? db + .select({ chatId: mothershipInboxTask.chatId }) + .from(mothershipInboxTask) + .where(eq(mothershipInboxTask.responseMessageId, inReplyTo)) + .limit(1) + : Promise.resolve([]), + ]) + + if (existingResult[0]) { + logger.info('Duplicate webhook, skipping', { emailMessageId }) + return NextResponse.json({ ok: true }) + } + + if (!isAllowed) { + await createRejectedTask(result.id, message, 'sender_not_allowed') + return NextResponse.json({ ok: true }) + } + + if (recentCount >= MAX_EMAILS_PER_HOUR) { + await createRejectedTask(result.id, message, 'rate_limit_exceeded') + return NextResponse.json({ ok: true }) + } + + const chatId = parentTaskResult[0]?.chatId ?? null + + const fromName = extractDisplayName(message.from_) + + const taskId = uuidv4() + const bodyText = message.text?.substring(0, 50_000) || null + const bodyHtml = message.html?.substring(0, 50_000) || null + const bodyPreview = (bodyText || '')?.substring(0, 200) || null + + await db.insert(mothershipInboxTask).values({ + id: taskId, + workspaceId: result.id, + fromEmail, + fromName, + subject: message.subject || '(no subject)', + bodyPreview, + bodyText, + bodyHtml, + emailMessageId, + inReplyTo, + agentmailMessageId: message.message_id, + status: 'received', + chatId, + hasAttachments: (message.attachments?.length ?? 0) > 0, + ccRecipients: message.cc?.length ? JSON.stringify(message.cc) : null, + }) + + if (isTriggerDevEnabled) { + try { + const handle = await tasks.trigger('mothership-inbox-execution', { taskId }) + await db + .update(mothershipInboxTask) + .set({ triggerJobId: handle.id }) + .where(eq(mothershipInboxTask.id, taskId)) + } catch (triggerError) { + logger.warn('Trigger.dev dispatch failed, falling back to local execution', { + taskId, + triggerError, + }) + executeInboxTask(taskId).catch((err) => { + logger.error('Local inbox task execution failed', { + taskId, + error: err instanceof Error ? err.message : 'Unknown error', + }) + }) + } + } else { + logger.info('Trigger.dev not available, executing inbox task locally', { taskId }) + executeInboxTask(taskId).catch((err) => { + logger.error('Local inbox task execution failed', { + taskId, + error: err instanceof Error ? err.message : 'Unknown error', + }) + }) + } + + return NextResponse.json({ ok: true }) + } catch (error) { + logger.error('AgentMail webhook error', { + error: error instanceof Error ? error.message : 'Unknown error', + }) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +async function isSenderAllowed(email: string, workspaceId: string): Promise { + const [allowedSenderResult, memberResult] = await Promise.all([ + db + .select({ id: mothershipInboxAllowedSender.id }) + .from(mothershipInboxAllowedSender) + .where( + and( + eq(mothershipInboxAllowedSender.workspaceId, workspaceId), + eq(mothershipInboxAllowedSender.email, email) + ) + ) + .limit(1), + db + .select({ userId: permissions.userId }) + .from(permissions) + .innerJoin(user, eq(permissions.userId, user.id)) + .where( + and( + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, workspaceId), + sql`lower(${user.email}) = ${email}` + ) + ) + .limit(1), + ]) + + return !!(allowedSenderResult[0] || memberResult[0]) +} + +async function getRecentTaskCount(workspaceId: string): Promise { + const oneHourAgo = new Date(Date.now() - 60 * 60 * 1000) + const [result] = await db + .select({ count: sql`count(*)::int` }) + .from(mothershipInboxTask) + .where( + and( + eq(mothershipInboxTask.workspaceId, workspaceId), + gt(mothershipInboxTask.createdAt, oneHourAgo), + ne(mothershipInboxTask.status, 'rejected') + ) + ) + return result?.count ?? 0 +} + +async function createRejectedTask( + workspaceId: string, + message: AgentMailWebhookPayload['message'], + reason: RejectionReason +): Promise { + await db.insert(mothershipInboxTask).values({ + id: uuidv4(), + workspaceId, + fromEmail: extractSenderEmail(message.from_) || 'unknown', + fromName: extractDisplayName(message.from_), + subject: message.subject || '(no subject)', + bodyPreview: (message.text || '').substring(0, 200) || null, + emailMessageId: message.message_id, + agentmailMessageId: message.message_id, + status: 'rejected', + rejectionReason: reason, + hasAttachments: (message.attachments?.length ?? 0) > 0, + }) +} + +/** + * Extract the raw email address from AgentMail's from_ field. + * Format: "username@domain.com" or "Display Name " + */ +function extractSenderEmail(from: string): string { + const match = from.match(/<([^>]+)>/) + return (match?.[1] || from).toLowerCase().trim() +} + +function extractDisplayName(from: string): string | null { + const match = from.match(/^(.+?)\s* { + if (existingWebhook) { + await db + .update(webhook) + .set({ + workflowId: existingWebhook.workflowId, + blockId: existingWebhook.blockId, + path: existingWebhook.path, + provider: existingWebhook.provider, + providerConfig: existingWebhook.providerConfig, + credentialSetId: existingWebhook.credentialSetId, + isActive: existingWebhook.isActive, + archivedAt: existingWebhook.archivedAt, + updatedAt: existingWebhook.updatedAt, + }) + .where(eq(webhook.id, savedWebhook.id)) + logger.info(`[${requestId}] Restored previous webhook configuration after failed re-save`, { + webhookId: savedWebhook.id, + }) + return + } + + await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) +} + // Get all webhooks for the current user export async function GET(request: NextRequest) { const requestId = generateRequestId() @@ -93,6 +123,7 @@ export async function GET(request: NextRequest) { and( eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId), + isNull(webhook.archivedAt), or( eq(webhook.deploymentVersionId, workflowDeploymentVersion.id), and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId)) @@ -132,7 +163,7 @@ export async function GET(request: NextRequest) { }) .from(webhook) .innerJoin(workflow, eq(webhook.workflowId, workflow.id)) - .where(inArray(workflow.workspaceId, workspaceIds)) + .where(and(inArray(workflow.workspaceId, workspaceIds), isNull(webhook.archivedAt))) logger.info(`[${requestId}] Retrieved ${webhooks.length} workspace-accessible webhooks`) return NextResponse.json({ webhooks }, { status: 200 }) @@ -196,6 +227,7 @@ export async function POST(request: NextRequest) { and( eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId), + isNull(webhook.archivedAt), or( eq(webhook.deploymentVersionId, workflowDeploymentVersion.id), and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId)) @@ -275,6 +307,7 @@ export async function POST(request: NextRequest) { and( eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId), + isNull(webhook.archivedAt), or( eq(webhook.deploymentVersionId, workflowDeploymentVersion.id), and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId)) @@ -290,7 +323,7 @@ export async function POST(request: NextRequest) { const existingByPath = await db .select({ id: webhook.id, workflowId: webhook.workflowId }) .from(webhook) - .where(eq(webhook.path, finalPath)) + .where(and(eq(webhook.path, finalPath), isNull(webhook.archivedAt))) .limit(1) if (existingByPath.length > 0) { // If a webhook with the same path exists but belongs to a different workflow, return an error @@ -306,6 +339,7 @@ export async function POST(request: NextRequest) { } let savedWebhook: any = null + let existingWebhook: any = null const originalProviderConfig = providerConfig || {} let resolvedProviderConfig = await resolveEnvVarsInObject( originalProviderConfig, @@ -380,7 +414,7 @@ export async function POST(request: NextRequest) { const webhookRows = await db .select() .from(webhook) - .where(eq(webhook.id, wh.id)) + .where(and(eq(webhook.id, wh.id), isNull(webhook.archivedAt))) .limit(1) if (webhookRows.length > 0) { @@ -425,7 +459,7 @@ export async function POST(request: NextRequest) { const primaryWebhookRows = await db .select() .from(webhook) - .where(eq(webhook.id, syncResult.webhooks[0].id)) + .where(and(eq(webhook.id, syncResult.webhooks[0].id), isNull(webhook.archivedAt))) .limit(1) return NextResponse.json( @@ -466,26 +500,53 @@ export async function POST(request: NextRequest) { const userProvided = originalProviderConfig as Record const configToSave: Record = { ...userProvided } - try { - const result = await createExternalWebhookSubscription( - request, - createTempWebhookData(), - workflowRecord, - userId, - requestId - ) - const updatedConfig = result.updatedProviderConfig as Record - mergeNonUserFields(configToSave, updatedConfig, userProvided) - resolvedProviderConfig = updatedConfig - externalSubscriptionCreated = result.externalSubscriptionCreated - } catch (err) { - logger.error(`[${requestId}] Error creating external webhook subscription`, err) - return NextResponse.json( - { - error: 'Failed to create external webhook subscription', - details: err instanceof Error ? err.message : 'Unknown error', - }, - { status: 500 } + if (targetWebhookId) { + const existingRows = await db + .select() + .from(webhook) + .where(eq(webhook.id, targetWebhookId)) + .limit(1) + existingWebhook = existingRows[0] || null + } + + const shouldRecreateSubscription = + existingWebhook && + shouldRecreateExternalWebhookSubscription({ + previousProvider: existingWebhook.provider as string, + nextProvider: provider, + previousConfig: ((existingWebhook.providerConfig as Record) || + {}) as Record, + nextConfig: resolvedProviderConfig, + }) + + if (!existingWebhook || shouldRecreateSubscription) { + try { + const result = await createExternalWebhookSubscription( + request, + createTempWebhookData(), + workflowRecord, + userId, + requestId + ) + const updatedConfig = result.updatedProviderConfig as Record + mergeNonUserFields(configToSave, updatedConfig, userProvided) + resolvedProviderConfig = updatedConfig + externalSubscriptionCreated = result.externalSubscriptionCreated + } catch (err) { + logger.error(`[${requestId}] Error creating external webhook subscription`, err) + return NextResponse.json( + { + error: 'Failed to create external webhook subscription', + details: err instanceof Error ? err.message : 'Unknown error', + }, + { status: 500 } + ) + } + } else { + mergeNonUserFields( + configToSave, + (existingWebhook.providerConfig as Record) || {}, + userProvided ) } @@ -556,6 +617,17 @@ export async function POST(request: NextRequest) { throw dbError } + if (existingWebhook && shouldRecreateSubscription) { + try { + await cleanupExternalWebhook(existingWebhook, workflowRecord, requestId) + } catch (cleanupError) { + logger.warn( + `[${requestId}] Failed to cleanup previous external webhook subscription ${existingWebhook.id}`, + cleanupError + ) + } + } + // --- Gmail/Outlook webhook setup (these don't require external subscriptions, configure after DB save) --- if (savedWebhook && provider === 'gmail') { logger.info(`[${requestId}] Gmail provider detected. Setting up Gmail webhook configuration.`) @@ -564,7 +636,7 @@ export async function POST(request: NextRequest) { if (!success) { logger.error(`[${requestId}] Failed to configure Gmail polling, rolling back webhook`) - await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) + await revertSavedWebhook(savedWebhook, existingWebhook, requestId) return NextResponse.json( { error: 'Failed to configure Gmail polling', @@ -580,7 +652,7 @@ export async function POST(request: NextRequest) { `[${requestId}] Error setting up Gmail webhook configuration, rolling back webhook`, err ) - await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) + await revertSavedWebhook(savedWebhook, existingWebhook, requestId) return NextResponse.json( { error: 'Failed to configure Gmail webhook', @@ -602,7 +674,7 @@ export async function POST(request: NextRequest) { if (!success) { logger.error(`[${requestId}] Failed to configure Outlook polling, rolling back webhook`) - await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) + await revertSavedWebhook(savedWebhook, existingWebhook, requestId) return NextResponse.json( { error: 'Failed to configure Outlook polling', @@ -618,7 +690,7 @@ export async function POST(request: NextRequest) { `[${requestId}] Error setting up Outlook webhook configuration, rolling back webhook`, err ) - await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) + await revertSavedWebhook(savedWebhook, existingWebhook, requestId) return NextResponse.json( { error: 'Failed to configure Outlook webhook', @@ -638,7 +710,7 @@ export async function POST(request: NextRequest) { if (!success) { logger.error(`[${requestId}] Failed to configure RSS polling, rolling back webhook`) - await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) + await revertSavedWebhook(savedWebhook, existingWebhook, requestId) return NextResponse.json( { error: 'Failed to configure RSS polling', @@ -654,7 +726,7 @@ export async function POST(request: NextRequest) { `[${requestId}] Error setting up RSS webhook configuration, rolling back webhook`, err ) - await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) + await revertSavedWebhook(savedWebhook, existingWebhook, requestId) return NextResponse.json( { error: 'Failed to configure RSS webhook', diff --git a/apps/sim/app/api/workflows/[id]/chat/status/route.test.ts b/apps/sim/app/api/workflows/[id]/chat/status/route.test.ts index 3be0cba6e2e..48a36ca069c 100644 --- a/apps/sim/app/api/workflows/[id]/chat/status/route.test.ts +++ b/apps/sim/app/api/workflows/[id]/chat/status/route.test.ts @@ -23,7 +23,9 @@ const { })) vi.mock('drizzle-orm', () => ({ + and: vi.fn((...args: unknown[]) => ({ type: 'and', args })), eq: vi.fn(), + isNull: vi.fn((field: unknown) => ({ type: 'isNull', field })), })) vi.mock('@sim/db', () => ({ @@ -45,6 +47,7 @@ vi.mock('@sim/db/schema', () => ({ password: 'password', isActive: 'isActive', workflowId: 'workflowId', + archivedAt: 'archivedAt', }, })) diff --git a/apps/sim/app/api/workflows/[id]/chat/status/route.ts b/apps/sim/app/api/workflows/[id]/chat/status/route.ts index ef84667d5d3..22d9c7d5532 100644 --- a/apps/sim/app/api/workflows/[id]/chat/status/route.ts +++ b/apps/sim/app/api/workflows/[id]/chat/status/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { chat } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' @@ -50,7 +50,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ isActive: chat.isActive, }) .from(chat) - .where(eq(chat.workflowId, id)) + .where(and(eq(chat.workflowId, id), isNull(chat.archivedAt))) .limit(1) const isDeployed = deploymentResults.length > 0 && deploymentResults[0].isActive diff --git a/apps/sim/app/api/workflows/[id]/deploy/route.ts b/apps/sim/app/api/workflows/[id]/deploy/route.ts index 1dd8798a3f2..5ad26782382 100644 --- a/apps/sim/app/api/workflows/[id]/deploy/route.ts +++ b/apps/sim/app/api/workflows/[id]/deploy/route.ts @@ -11,6 +11,7 @@ import { saveTriggerWebhooksForDeploy, } from '@/lib/webhooks/deploy' import { + activateWorkflowVersionById, deployWorkflow, loadWorkflowFromNormalizedTables, undeployWorkflow, @@ -154,6 +155,27 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ .limit(1) const previousVersionId = currentActiveVersion?.id + const rollbackDeployment = async () => { + if (previousVersionId) { + await restorePreviousVersionWebhooks({ + request, + workflow: workflowData as Record, + userId: actorUserId, + previousVersionId, + requestId, + }) + const reactivateResult = await activateWorkflowVersionById({ + workflowId: id, + deploymentVersionId: previousVersionId, + }) + if (reactivateResult.success) { + return + } + } + + await undeployWorkflow({ workflowId: id }) + } + const deployResult = await deployWorkflow({ workflowId: id, deployedBy: actorUserId, @@ -190,7 +212,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ requestId, deploymentVersionId, }) - await undeployWorkflow({ workflowId: id }) + await rollbackDeployment() return createErrorResponse( triggerSaveResult.error?.message || 'Failed to save trigger configuration', triggerSaveResult.error?.status || 500 @@ -214,16 +236,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ requestId, deploymentVersionId, }) - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow: workflowData as Record, - userId: actorUserId, - previousVersionId, - requestId, - }) - } - await undeployWorkflow({ workflowId: id }) + await rollbackDeployment() return createErrorResponse(scheduleResult.error || 'Failed to create schedule', 500) } if (scheduleResult.scheduleId) { @@ -364,14 +377,13 @@ export async function DELETE( return createErrorResponse(error.message, error.status) } - // Clean up external webhook subscriptions before undeploying - await cleanupWebhooksForWorkflow(id, workflowData as Record, requestId) - const result = await undeployWorkflow({ workflowId: id }) if (!result.success) { return createErrorResponse(result.error || 'Failed to undeploy workflow', 500) } + await cleanupWebhooksForWorkflow(id, workflowData as Record, requestId) + await removeMcpToolsForWorkflow(id, requestId) logger.info(`[${requestId}] Workflow undeployed successfully: ${id}`) diff --git a/apps/sim/app/api/workflows/[id]/deployments/[version]/revert/route.ts b/apps/sim/app/api/workflows/[id]/deployments/[version]/revert/route.ts index 6050bb4b253..d3762c9181f 100644 --- a/apps/sim/app/api/workflows/[id]/deployments/[version]/revert/route.ts +++ b/apps/sim/app/api/workflows/[id]/deployments/[version]/revert/route.ts @@ -5,7 +5,6 @@ import type { NextRequest } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { env } from '@/lib/core/config/env' import { generateRequestId } from '@/lib/core/utils/request' -import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync' import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' import { validateWorkflowPermissions } from '@/lib/workflows/utils' import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' @@ -91,13 +90,6 @@ export async function POST( .set({ lastSynced: new Date(), updatedAt: new Date() }) .where(eq(workflow.id, id)) - await syncMcpToolsForWorkflow({ - workflowId: id, - requestId, - state: deployedState, - context: 'revert', - }) - try { const socketServerUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002' await fetch(`${socketServerUrl}/api/workflow-reverted`, { diff --git a/apps/sim/app/api/workflows/[id]/duplicate/route.ts b/apps/sim/app/api/workflows/[id]/duplicate/route.ts index ad37410c9d5..cc00c0c0b7c 100644 --- a/apps/sim/app/api/workflows/[id]/duplicate/route.ts +++ b/apps/sim/app/api/workflows/[id]/duplicate/route.ts @@ -15,6 +15,7 @@ const DuplicateRequestSchema = z.object({ color: z.string().optional(), workspaceId: z.string().optional(), folderId: z.string().nullable().optional(), + newId: z.string().uuid().optional(), }) // POST /api/workflows/[id]/duplicate - Duplicate a workflow with all its blocks, edges, and subflows @@ -32,7 +33,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: try { const body = await req.json() - const { name, description, color, workspaceId, folderId } = DuplicateRequestSchema.parse(body) + const { name, description, color, workspaceId, folderId, newId } = + DuplicateRequestSchema.parse(body) logger.info(`[${requestId}] Duplicating workflow ${sourceWorkflowId} for user ${userId}`) @@ -45,6 +47,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: workspaceId, folderId, requestId, + newWorkflowId: newId, }) try { diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts index 1200debd413..15d5d22a6f2 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.ts @@ -349,11 +349,55 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: runFromBlock: rawRunFromBlock, } = validation.data + if (isPublicApiAccess && isClientSession) { + return NextResponse.json( + { error: 'Public API callers cannot set isClientSession' }, + { status: 400 } + ) + } + + if (auth.authType === 'api_key') { + if (isClientSession) { + return NextResponse.json( + { error: 'API key callers cannot set isClientSession' }, + { status: 400 } + ) + } + + if (workflowStateOverride) { + return NextResponse.json( + { error: 'API key callers cannot provide workflowStateOverride' }, + { status: 400 } + ) + } + + if (useDraftState) { + return NextResponse.json( + { error: 'API key callers cannot execute draft workflow state' }, + { status: 400 } + ) + } + } + // Resolve runFromBlock snapshot from executionId if needed let resolvedRunFromBlock: | { startBlockId: string; sourceSnapshot: SerializableExecutionState } | undefined if (rawRunFromBlock) { + if (rawRunFromBlock.sourceSnapshot && auth.authType === 'api_key') { + return NextResponse.json( + { error: 'API key callers cannot provide runFromBlock.sourceSnapshot' }, + { status: 400 } + ) + } + + if (rawRunFromBlock.executionId && (auth.authType === 'api_key' || isPublicApiAccess)) { + return NextResponse.json( + { error: 'External callers cannot resume from stored execution snapshots' }, + { status: 400 } + ) + } + if (rawRunFromBlock.sourceSnapshot && !isPublicApiAccess) { // Public API callers cannot inject arbitrary block state via sourceSnapshot. // They must use executionId to resume from a server-stored execution state. @@ -362,13 +406,13 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: sourceSnapshot: rawRunFromBlock.sourceSnapshot as SerializableExecutionState, } } else if (rawRunFromBlock.executionId) { - const { getExecutionState, getLatestExecutionState } = await import( + const { getExecutionStateForWorkflow, getLatestExecutionState } = await import( '@/lib/workflows/executor/execution-state' ) const snapshot = rawRunFromBlock.executionId === 'latest' ? await getLatestExecutionState(workflowId) - : await getExecutionState(rawRunFromBlock.executionId) + : await getExecutionStateForWorkflow(rawRunFromBlock.executionId, workflowId) if (!snapshot) { return NextResponse.json( { @@ -425,6 +469,25 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const enableSSE = streamHeader || streamParam === true const executionModeHeader = req.headers.get('X-Execution-Mode') const isAsyncMode = executionModeHeader === 'async' + const requiresWriteExecutionAccess = Boolean( + useDraftState || workflowStateOverride || rawRunFromBlock + ) + + if ( + isAsyncMode && + (body.useDraftState !== undefined || + body.workflowStateOverride !== undefined || + body.runFromBlock !== undefined || + body.stopAfterBlockId !== undefined || + body.selectedOutputs?.length || + body.includeFileBase64 !== undefined || + body.base64MaxBytes !== undefined) + ) { + return NextResponse.json( + { error: 'Async execution does not support draft or override execution controls' }, + { status: 400 } + ) + } logger.info(`[${requestId}] Starting server-side execution`, { workflowId, @@ -460,7 +523,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({ workflowId, userId, - action: shouldUseDraftState ? 'write' : 'read', + action: requiresWriteExecutionAccess ? 'write' : 'read', }) if (!workflowAuthorization.allowed) { return NextResponse.json( @@ -499,6 +562,13 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: } const workspaceId = workflow.workspaceId + if (auth.apiKeyType === 'workspace' && auth.workspaceId !== workspaceId) { + return NextResponse.json( + { error: 'API key is not authorized for this workspace' }, + { status: 403 } + ) + } + logger.info(`[${requestId}] Preprocessing passed`, { workflowId, actorUserId, diff --git a/apps/sim/app/api/workflows/[id]/executions/[executionId]/cancel/route.ts b/apps/sim/app/api/workflows/[id]/executions/[executionId]/cancel/route.ts index 49c99e1ede6..842e130eaaa 100644 --- a/apps/sim/app/api/workflows/[id]/executions/[executionId]/cancel/route.ts +++ b/apps/sim/app/api/workflows/[id]/executions/[executionId]/cancel/route.ts @@ -33,6 +33,16 @@ export async function POST( ) } + if ( + auth.apiKeyType === 'workspace' && + workflowAuthorization.workflow?.workspaceId !== auth.workspaceId + ) { + return NextResponse.json( + { error: 'API key is not authorized for this workspace' }, + { status: 403 } + ) + } + logger.info('Cancel execution requested', { workflowId, executionId, userId: auth.userId }) const marked = await markExecutionCancelled(executionId) diff --git a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts index 1f77ff391d6..745c5b7d44e 100644 --- a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts +++ b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts @@ -46,6 +46,16 @@ export async function GET( ) } + if ( + auth.apiKeyType === 'workspace' && + workflowAuthorization.workflow?.workspaceId !== auth.workspaceId + ) { + return NextResponse.json( + { error: 'API key is not authorized for this workspace' }, + { status: 403 } + ) + } + const meta = await getExecutionMeta(executionId) if (!meta) { return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 }) diff --git a/apps/sim/app/api/workflows/[id]/restore/route.ts b/apps/sim/app/api/workflows/[id]/restore/route.ts new file mode 100644 index 00000000000..7e8a76e8a35 --- /dev/null +++ b/apps/sim/app/api/workflows/[id]/restore/route.ts @@ -0,0 +1,55 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { restoreWorkflow } from '@/lib/workflows/lifecycle' +import { getWorkflowById } from '@/lib/workflows/utils' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('RestoreWorkflowAPI') + +export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const requestId = generateRequestId() + const { id: workflowId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const workflowData = await getWorkflowById(workflowId, { includeArchived: true }) + if (!workflowData) { + return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) + } + + if (workflowData.workspaceId) { + const permission = await getUserEntityPermissions( + auth.userId, + 'workspace', + workflowData.workspaceId + ) + if (permission !== 'admin' && permission !== 'write') { + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + } else if (workflowData.userId !== auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const result = await restoreWorkflow(workflowId, { requestId }) + + if (!result.restored) { + return NextResponse.json({ error: 'Workflow is not archived' }, { status: 400 }) + } + + logger.info(`[${requestId}] Restored workflow ${workflowId}`) + + return NextResponse.json({ success: true }) + } catch (error) { + logger.error(`[${requestId}] Error restoring workflow ${workflowId}`, error) + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/workflows/[id]/route.test.ts b/apps/sim/app/api/workflows/[id]/route.test.ts index d886e27d466..2000e5093ee 100644 --- a/apps/sim/app/api/workflows/[id]/route.test.ts +++ b/apps/sim/app/api/workflows/[id]/route.test.ts @@ -21,7 +21,7 @@ const mockCheckSessionOrInternalAuth = vi.fn() const mockLoadWorkflowFromNormalizedTables = vi.fn() const mockGetWorkflowById = vi.fn() const mockAuthorizeWorkflowByWorkspacePermission = vi.fn() -const mockDbDelete = vi.fn() +const mockArchiveWorkflow = vi.fn() const mockDbUpdate = vi.fn() const mockDbSelect = vi.fn() @@ -72,9 +72,12 @@ vi.mock('@/lib/workflows/utils', () => ({ }) => mockAuthorizeWorkflowByWorkspacePermission(params), })) +vi.mock('@/lib/workflows/lifecycle', () => ({ + archiveWorkflow: (...args: unknown[]) => mockArchiveWorkflow(...args), +})) + vi.mock('@sim/db', () => ({ db: { - delete: () => mockDbDelete(), update: () => mockDbUpdate(), select: () => mockDbSelect(), }, @@ -297,8 +300,9 @@ describe('Workflow By ID API Route', () => { }), }) - mockDbDelete.mockReturnValue({ - where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]), + mockArchiveWorkflow.mockResolvedValue({ + archived: true, + workflow: mockWorkflow, }) setupGlobalFetchMock({ ok: true }) @@ -340,8 +344,9 @@ describe('Workflow By ID API Route', () => { }), }) - mockDbDelete.mockReturnValue({ - where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]), + mockArchiveWorkflow.mockResolvedValue({ + archived: true, + workflow: mockWorkflow, }) setupGlobalFetchMock({ ok: true }) diff --git a/apps/sim/app/api/workflows/[id]/route.ts b/apps/sim/app/api/workflows/[id]/route.ts index 19d89e8eeb7..8b79fe2c287 100644 --- a/apps/sim/app/api/workflows/[id]/route.ts +++ b/apps/sim/app/api/workflows/[id]/route.ts @@ -1,14 +1,13 @@ import { db } from '@sim/db' -import { templates, webhook, workflow } from '@sim/db/schema' +import { templates, workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq, isNull, ne } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { AuthType, checkHybridAuth, checkSessionOrInternalAuth } from '@/lib/auth/hybrid' -import { env } from '@/lib/core/config/env' -import { PlatformEvents } from '@/lib/core/telemetry' import { generateRequestId } from '@/lib/core/utils/request' +import { archiveWorkflow } from '@/lib/workflows/lifecycle' import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils' import { authorizeWorkflowByWorkspacePermission, getWorkflowById } from '@/lib/workflows/utils' @@ -49,6 +48,13 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) } + if (auth.apiKeyType === 'workspace' && auth.workspaceId !== workflowData.workspaceId) { + return NextResponse.json( + { error: 'API key is not authorized for this workspace' }, + { status: 403 } + ) + } + if (isInternalCall && !userId) { // Internal system calls (e.g. workflow-in-workflow executor) may not carry a userId. // These are already authenticated via internal JWT; allow read access. @@ -183,7 +189,7 @@ export async function DELETE( const totalWorkflowsInWorkspace = await db .select({ id: workflow.id }) .from(workflow) - .where(eq(workflow.workspaceId, workflowData.workspaceId)) + .where(and(eq(workflow.workspaceId, workflowData.workspaceId), isNull(workflow.archivedAt))) if (totalWorkflowsInWorkspace.length <= 1) { return NextResponse.json( @@ -241,92 +247,13 @@ export async function DELETE( } } - // Clean up external webhooks before deleting workflow - try { - const { cleanupExternalWebhook } = await import('@/lib/webhooks/provider-subscriptions') - const webhooksToCleanup = await db - .select({ - webhook: webhook, - workflow: { - id: workflow.id, - userId: workflow.userId, - workspaceId: workflow.workspaceId, - }, - }) - .from(webhook) - .innerJoin(workflow, eq(webhook.workflowId, workflow.id)) - .where(eq(webhook.workflowId, workflowId)) - - if (webhooksToCleanup.length > 0) { - logger.info( - `[${requestId}] Found ${webhooksToCleanup.length} webhook(s) to cleanup for workflow ${workflowId}` - ) - - // Clean up each webhook (don't fail if cleanup fails) - for (const webhookData of webhooksToCleanup) { - try { - await cleanupExternalWebhook(webhookData.webhook, webhookData.workflow, requestId) - } catch (cleanupError) { - logger.warn( - `[${requestId}] Failed to cleanup external webhook ${webhookData.webhook.id} during workflow deletion`, - cleanupError - ) - // Continue with deletion even if cleanup fails - } - } - } - } catch (webhookCleanupError) { - logger.warn( - `[${requestId}] Error during webhook cleanup for workflow deletion (continuing with deletion)`, - webhookCleanupError - ) - // Continue with workflow deletion even if webhook cleanup fails - } - - await db.delete(workflow).where(eq(workflow.id, workflowId)) - - try { - PlatformEvents.workflowDeleted({ - workflowId, - workspaceId: workflowData.workspaceId || undefined, - }) - } catch { - // Telemetry should not fail the operation + const archiveResult = await archiveWorkflow(workflowId, { requestId }) + if (!archiveResult.workflow) { + return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) } const elapsed = Date.now() - startTime - logger.info(`[${requestId}] Successfully deleted workflow ${workflowId} in ${elapsed}ms`) - - // Notify Socket.IO system to disconnect users from this workflow's room - // This prevents "Block not found" errors when collaborative updates try to process - // after the workflow has been deleted - try { - const socketUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002' - const socketResponse = await fetch(`${socketUrl}/api/workflow-deleted`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'x-api-key': env.INTERNAL_API_SECRET, - }, - body: JSON.stringify({ workflowId }), - }) - - if (socketResponse.ok) { - logger.info( - `[${requestId}] Notified Socket.IO server about workflow ${workflowId} deletion` - ) - } else { - logger.warn( - `[${requestId}] Failed to notify Socket.IO server about workflow ${workflowId} deletion` - ) - } - } catch (error) { - logger.warn( - `[${requestId}] Error notifying Socket.IO server about workflow ${workflowId} deletion:`, - error - ) - // Don't fail the deletion if Socket.IO notification fails - } + logger.info(`[${requestId}] Successfully archived workflow ${workflowId} in ${elapsed}ms`) recordAudit({ workspaceId: workflowData.workspaceId || null, @@ -337,8 +264,9 @@ export async function DELETE( resourceType: AuditResourceType.WORKFLOW, resourceId: workflowId, resourceName: workflowData.name, - description: `Deleted workflow "${workflowData.name}"`, + description: `Archived workflow "${workflowData.name}"`, metadata: { + archived: archiveResult.archived, deleteTemplates: deleteTemplatesParam === 'delete', }, request, @@ -417,6 +345,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ const conditions = [ eq(workflow.workspaceId, workflowData.workspaceId), + isNull(workflow.archivedAt), eq(workflow.name, targetName), ne(workflow.id, workflowId), ] diff --git a/apps/sim/app/api/workflows/[id]/state/route.ts b/apps/sim/app/api/workflows/[id]/state/route.ts index 7cca4990882..26a63ecdd81 100644 --- a/apps/sim/app/api/workflows/[id]/state/route.ts +++ b/apps/sim/app/api/workflows/[id]/state/route.ts @@ -8,7 +8,10 @@ import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { env } from '@/lib/core/config/env' import { generateRequestId } from '@/lib/core/utils/request' import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence' -import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' +import { + loadWorkflowFromNormalizedTables, + saveWorkflowToNormalizedTables, +} from '@/lib/workflows/persistence/utils' import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/sanitization/validation' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' import { validateEdges } from '@/stores/workflows/workflow/edge-validation' @@ -109,6 +112,49 @@ const WorkflowStateSchema = z.object({ variables: z.any().optional(), // Workflow variables }) +/** + * GET /api/workflows/[id]/state + * Fetch the current workflow state from normalized tables. + * Used by the client after server-side edits (edit_workflow) to stay in sync. + */ +export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workflowId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const authorization = await authorizeWorkflowByWorkspacePermission({ + workflowId, + userId: auth.userId, + action: 'read', + }) + if (!authorization.allowed) { + return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + } + + const normalized = await loadWorkflowFromNormalizedTables(workflowId) + if (!normalized) { + return NextResponse.json({ error: 'Workflow state not found' }, { status: 404 }) + } + + return NextResponse.json({ + blocks: normalized.blocks, + edges: normalized.edges, + loops: normalized.loops || {}, + parallels: normalized.parallels || {}, + }) + } catch (error) { + logger.error('Failed to fetch workflow state', { + workflowId, + error: error instanceof Error ? error.message : String(error), + }) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + /** * PUT /api/workflows/[id]/state * Save complete workflow state to normalized database tables diff --git a/apps/sim/app/api/workflows/route.test.ts b/apps/sim/app/api/workflows/route.test.ts index bff62acfc83..b1d92ca64ed 100644 --- a/apps/sim/app/api/workflows/route.test.ts +++ b/apps/sim/app/api/workflows/route.test.ts @@ -45,6 +45,8 @@ vi.mock('@sim/db/schema', () => ({ id: 'id', folderId: 'folderId', userId: 'userId', + name: 'name', + archivedAt: 'archivedAt', updatedAt: 'updatedAt', workspaceId: 'workspaceId', sortOrder: 'sortOrder', @@ -108,11 +110,16 @@ describe('Workflows API Route - POST ordering', () => { const minResultsQueue: Array> = [ [{ minOrder: 5 }], [{ minOrder: 2 }], + [], ] mockDbSelect.mockImplementation(() => ({ from: vi.fn().mockReturnValue({ - where: vi.fn().mockImplementation(() => Promise.resolve(minResultsQueue.shift() ?? [])), + where: vi.fn().mockImplementation(() => ({ + limit: vi.fn().mockImplementation(() => Promise.resolve(minResultsQueue.shift() ?? [])), + then: (onFulfilled: (value: Array<{ minOrder: number }>) => unknown) => + Promise.resolve(minResultsQueue.shift() ?? []).then(onFulfilled), + })), }), })) @@ -141,11 +148,15 @@ describe('Workflows API Route - POST ordering', () => { }) it('defaults to sortOrder 0 when there are no siblings', async () => { - const minResultsQueue: Array> = [[], []] + const minResultsQueue: Array> = [[], [], []] mockDbSelect.mockImplementation(() => ({ from: vi.fn().mockReturnValue({ - where: vi.fn().mockImplementation(() => Promise.resolve(minResultsQueue.shift() ?? [])), + where: vi.fn().mockImplementation(() => ({ + limit: vi.fn().mockImplementation(() => Promise.resolve(minResultsQueue.shift() ?? [])), + then: (onFulfilled: (value: Array<{ minOrder: number }>) => unknown) => + Promise.resolve(minResultsQueue.shift() ?? []).then(onFulfilled), + })), }), })) diff --git a/apps/sim/app/api/workflows/route.ts b/apps/sim/app/api/workflows/route.ts index 611d808cf61..3181185b75e 100644 --- a/apps/sim/app/api/workflows/route.ts +++ b/apps/sim/app/api/workflows/route.ts @@ -1,21 +1,27 @@ import { db } from '@sim/db' import { permissions, workflow, workflowFolder } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, asc, eq, inArray, isNull, min } from 'drizzle-orm' +import { and, asc, eq, inArray, isNull, min, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' +import { getNextWorkflowColor } from '@/lib/workflows/colors' +import { listWorkflows, type WorkflowScope } from '@/lib/workflows/utils' import { getUserEntityPermissions, workspaceExists } from '@/lib/workspaces/permissions/utils' import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' const logger = createLogger('WorkflowAPI') const CreateWorkflowSchema = z.object({ + id: z.string().uuid().optional(), name: z.string().min(1, 'Name is required'), description: z.string().optional().default(''), - color: z.string().optional().default('#3972F6'), + color: z + .string() + .optional() + .transform((c) => c || getNextWorkflowColor()), workspaceId: z.string().optional(), folderId: z.string().nullable().optional(), sortOrder: z.number().int().optional(), @@ -27,6 +33,7 @@ export async function GET(request: NextRequest) { const startTime = Date.now() const url = new URL(request.url) const workspaceId = url.searchParams.get('workspaceId') + const scope = (url.searchParams.get('scope') ?? 'active') as WorkflowScope try { const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) @@ -62,16 +69,16 @@ export async function GET(request: NextRequest) { } } + if (!['active', 'archived', 'all'].includes(scope)) { + return NextResponse.json({ error: 'Invalid scope' }, { status: 400 }) + } + let workflows const orderByClause = [asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id)] if (workspaceId) { - workflows = await db - .select() - .from(workflow) - .where(eq(workflow.workspaceId, workspaceId)) - .orderBy(...orderByClause) + workflows = await listWorkflows(workspaceId, { scope }) } else { const workspacePermissionRows = await db .select({ workspaceId: permissions.entityId }) @@ -84,7 +91,16 @@ export async function GET(request: NextRequest) { workflows = await db .select() .from(workflow) - .where(inArray(workflow.workspaceId, workspaceIds)) + .where( + scope === 'all' + ? inArray(workflow.workspaceId, workspaceIds) + : scope === 'archived' + ? and( + inArray(workflow.workspaceId, workspaceIds), + sql`${workflow.archivedAt} IS NOT NULL` + ) + : and(inArray(workflow.workspaceId, workspaceIds), isNull(workflow.archivedAt)) + ) .orderBy(...orderByClause) } @@ -109,6 +125,7 @@ export async function POST(req: NextRequest) { try { const body = await req.json() const { + id: clientId, name, description, color, @@ -140,7 +157,7 @@ export async function POST(req: NextRequest) { ) } - const workflowId = crypto.randomUUID() + const workflowId = clientId || crypto.randomUUID() const now = new Date() logger.info(`[${requestId}] Creating workflow ${workflowId} for user ${userId}`) @@ -173,7 +190,13 @@ export async function POST(req: NextRequest) { db .select({ minOrder: min(workflow.sortOrder) }) .from(workflow) - .where(and(eq(workflow.workspaceId, workspaceId), workflowParentCondition)), + .where( + and( + eq(workflow.workspaceId, workspaceId), + workflowParentCondition, + isNull(workflow.archivedAt) + ) + ), db .select({ minOrder: min(workflowFolder.sortOrder) }) .from(workflowFolder) @@ -191,6 +214,31 @@ export async function POST(req: NextRequest) { sortOrder = minSortOrder != null ? minSortOrder - 1 : 0 } + const duplicateConditions = [ + eq(workflow.workspaceId, workspaceId), + isNull(workflow.archivedAt), + eq(workflow.name, name), + ] + + if (folderId) { + duplicateConditions.push(eq(workflow.folderId, folderId)) + } else { + duplicateConditions.push(isNull(workflow.folderId)) + } + + const [duplicateWorkflow] = await db + .select({ id: workflow.id }) + .from(workflow) + .where(and(...duplicateConditions)) + .limit(1) + + if (duplicateWorkflow) { + return NextResponse.json( + { error: `A workflow named "${name}" already exists in this folder` }, + { status: 409 } + ) + } + await db.insert(workflow).values({ id: workflowId, userId, diff --git a/apps/sim/app/api/workspaces/[id]/api-keys/[keyId]/route.ts b/apps/sim/app/api/workspaces/[id]/api-keys/[keyId]/route.ts index d95daf99ee6..bb9a5ff6989 100644 --- a/apps/sim/app/api/workspaces/[id]/api-keys/[keyId]/route.ts +++ b/apps/sim/app/api/workspaces/[id]/api-keys/[keyId]/route.ts @@ -32,7 +32,7 @@ export async function PUT( const userId = session.user.id const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) - if (!permission || (permission !== 'admin' && permission !== 'write')) { + if (permission !== 'admin') { return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) } @@ -128,7 +128,7 @@ export async function DELETE( const userId = session.user.id const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) - if (!permission || (permission !== 'admin' && permission !== 'write')) { + if (permission !== 'admin') { return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) } diff --git a/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts b/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts index ab4c9600df9..e3f34529a2c 100644 --- a/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts +++ b/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts @@ -13,7 +13,21 @@ import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/per const logger = createLogger('WorkspaceBYOKKeysAPI') -const VALID_PROVIDERS = ['openai', 'anthropic', 'google', 'mistral'] as const +const VALID_PROVIDERS = [ + 'openai', + 'anthropic', + 'google', + 'mistral', + 'firecrawl', + 'exa', + 'serper', + 'linkup', + 'perplexity', + 'jina', + 'google_cloud', + 'parallel_ai', + 'brandfetch', +] as const const UpsertKeySchema = z.object({ providerId: z.enum(VALID_PROVIDERS), diff --git a/apps/sim/app/api/workspaces/[id]/files/[fileId]/content/route.ts b/apps/sim/app/api/workspaces/[id]/files/[fileId]/content/route.ts new file mode 100644 index 00000000000..24b5eb56cf0 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/files/[fileId]/content/route.ts @@ -0,0 +1,94 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { getSession } from '@/lib/auth' +import { generateRequestId } from '@/lib/core/utils/request' +import { updateWorkspaceFileContent } from '@/lib/uploads/contexts/workspace' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('WorkspaceFileContentAPI') + +/** + * PUT /api/workspaces/[id]/files/[fileId]/content + * Update a workspace file's text content (requires write permission) + */ +export async function PUT( + request: NextRequest, + { params }: { params: Promise<{ id: string; fileId: string }> } +) { + const requestId = generateRequestId() + const { id: workspaceId, fileId } = await params + + try { + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) + if (userPermission !== 'admin' && userPermission !== 'write') { + logger.warn( + `[${requestId}] User ${session.user.id} lacks write permission for workspace ${workspaceId}` + ) + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + + const body = await request.json() + const { content } = body as { content: string } + + if (typeof content !== 'string') { + return NextResponse.json({ error: 'Content must be a string' }, { status: 400 }) + } + + const buffer = Buffer.from(content, 'utf-8') + + const maxFileSizeBytes = 50 * 1024 * 1024 + if (buffer.length > maxFileSizeBytes) { + return NextResponse.json( + { error: `File size exceeds ${maxFileSizeBytes / 1024 / 1024}MB limit` }, + { status: 413 } + ) + } + + const updatedFile = await updateWorkspaceFileContent( + workspaceId, + fileId, + session.user.id, + buffer + ) + + logger.info(`[${requestId}] Updated content for workspace file: ${updatedFile.name}`) + + recordAudit({ + workspaceId, + actorId: session.user.id, + actorName: session.user.name, + actorEmail: session.user.email, + action: AuditAction.FILE_UPDATED, + resourceType: AuditResourceType.FILE, + resourceId: fileId, + description: `Updated content of file "${updatedFile.name}"`, + request, + }) + + return NextResponse.json({ + success: true, + file: updatedFile, + }) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Failed to update file content' + const isNotFound = errorMessage.includes('File not found') + const isQuotaExceeded = errorMessage.includes('Storage limit exceeded') + const status = isNotFound ? 404 : isQuotaExceeded ? 402 : 500 + + if (status === 500) { + logger.error(`[${requestId}] Error updating file content:`, error) + } else { + logger.warn(`[${requestId}] ${errorMessage}`) + } + + return NextResponse.json({ success: false, error: errorMessage }, { status }) + } +} diff --git a/apps/sim/app/api/workspaces/[id]/files/[fileId]/restore/route.ts b/apps/sim/app/api/workspaces/[id]/files/[fileId]/restore/route.ts new file mode 100644 index 00000000000..eae4bae4368 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/files/[fileId]/restore/route.ts @@ -0,0 +1,40 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { getSession } from '@/lib/auth' +import { generateRequestId } from '@/lib/core/utils/request' +import { restoreWorkspaceFile } from '@/lib/uploads/contexts/workspace' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('RestoreWorkspaceFileAPI') + +export async function POST( + request: NextRequest, + { params }: { params: Promise<{ id: string; fileId: string }> } +) { + const requestId = generateRequestId() + const { id: workspaceId, fileId } = await params + + try { + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) + if (userPermission !== 'admin' && userPermission !== 'write') { + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + + await restoreWorkspaceFile(workspaceId, fileId) + + logger.info(`[${requestId}] Restored workspace file ${fileId}`) + + return NextResponse.json({ success: true }) + } catch (error) { + logger.error(`[${requestId}] Error restoring workspace file ${fileId}`, error) + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/workspaces/[id]/files/[fileId]/route.ts b/apps/sim/app/api/workspaces/[id]/files/[fileId]/route.ts index 80c91a2adf0..c440618863e 100644 --- a/apps/sim/app/api/workspaces/[id]/files/[fileId]/route.ts +++ b/apps/sim/app/api/workspaces/[id]/files/[fileId]/route.ts @@ -3,16 +3,84 @@ import { type NextRequest, NextResponse } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' -import { deleteWorkspaceFile } from '@/lib/uploads/contexts/workspace' +import { + deleteWorkspaceFile, + FileConflictError, + renameWorkspaceFile, +} from '@/lib/uploads/contexts/workspace' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' export const dynamic = 'force-dynamic' const logger = createLogger('WorkspaceFileAPI') +/** + * PATCH /api/workspaces/[id]/files/[fileId] + * Rename a workspace file (requires write permission) + */ +export async function PATCH( + request: NextRequest, + { params }: { params: Promise<{ id: string; fileId: string }> } +) { + const requestId = generateRequestId() + const { id: workspaceId, fileId } = await params + + try { + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) + if (userPermission !== 'admin' && userPermission !== 'write') { + logger.warn( + `[${requestId}] User ${session.user.id} lacks write permission for workspace ${workspaceId}` + ) + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + + const body = await request.json() + const { name } = body + + if (!name || typeof name !== 'string' || !name.trim()) { + return NextResponse.json({ error: 'Name is required' }, { status: 400 }) + } + + const updatedFile = await renameWorkspaceFile(workspaceId, fileId, name) + + logger.info(`[${requestId}] Renamed workspace file: ${fileId} to "${updatedFile.name}"`) + + recordAudit({ + workspaceId, + actorId: session.user.id, + actorName: session.user.name, + actorEmail: session.user.email, + action: AuditAction.FILE_UPDATED, + resourceType: AuditResourceType.FILE, + resourceId: fileId, + description: `Renamed file to "${updatedFile.name}"`, + request, + }) + + return NextResponse.json({ + success: true, + file: updatedFile, + }) + } catch (error) { + logger.error(`[${requestId}] Error renaming workspace file:`, error) + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Failed to rename file', + }, + { status: error instanceof FileConflictError ? 409 : 500 } + ) + } +} + /** * DELETE /api/workspaces/[id]/files/[fileId] - * Delete a workspace file (requires write permission) + * Archive a workspace file (requires write permission) */ export async function DELETE( request: NextRequest, @@ -38,7 +106,7 @@ export async function DELETE( await deleteWorkspaceFile(workspaceId, fileId) - logger.info(`[${requestId}] Deleted workspace file: ${fileId}`) + logger.info(`[${requestId}] Archived workspace file: ${fileId}`) recordAudit({ workspaceId, @@ -48,7 +116,7 @@ export async function DELETE( action: AuditAction.FILE_DELETED, resourceType: AuditResourceType.FILE, resourceId: fileId, - description: `Deleted file "${fileId}"`, + description: `Archived file "${fileId}"`, request, }) diff --git a/apps/sim/app/api/workspaces/[id]/files/route.ts b/apps/sim/app/api/workspaces/[id]/files/route.ts index a62575dce0e..d6ceb728e00 100644 --- a/apps/sim/app/api/workspaces/[id]/files/route.ts +++ b/apps/sim/app/api/workspaces/[id]/files/route.ts @@ -3,7 +3,11 @@ import { type NextRequest, NextResponse } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' -import { listWorkspaceFiles, uploadWorkspaceFile } from '@/lib/uploads/contexts/workspace' +import { + listWorkspaceFiles, + uploadWorkspaceFile, + type WorkspaceFileScope, +} from '@/lib/uploads/contexts/workspace' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' @@ -34,7 +38,12 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) } - const files = await listWorkspaceFiles(workspaceId) + const scope = (new URL(request.url).searchParams.get('scope') ?? 'active') as WorkspaceFileScope + if (!['active', 'archived', 'all'].includes(scope)) { + return NextResponse.json({ error: 'Invalid scope' }, { status: 400 }) + } + + const files = await listWorkspaceFiles(workspaceId, { scope }) logger.info(`[${requestId}] Listed ${files.length} files for workspace ${workspaceId}`) diff --git a/apps/sim/app/api/workspaces/[id]/inbox/route.ts b/apps/sim/app/api/workspaces/[id]/inbox/route.ts new file mode 100644 index 00000000000..3e64cf34174 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/inbox/route.ts @@ -0,0 +1,140 @@ +import { db, mothershipInboxTask, workspace } from '@sim/db' +import { createLogger } from '@sim/logger' +import { eq, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { hasInboxAccess } from '@/lib/billing/core/subscription' +import { disableInbox, enableInbox, updateInboxAddress } from '@/lib/mothership/inbox/lifecycle' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('InboxConfigAPI') + +const patchSchema = z.object({ + enabled: z.boolean().optional(), + username: z.string().min(1).max(64).optional(), +}) + +export async function GET(_req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workspaceId } = await params + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [hasAccess, permission] = await Promise.all([ + hasInboxAccess(session.user.id), + getUserEntityPermissions(session.user.id, 'workspace', workspaceId), + ]) + if (!hasAccess) { + return NextResponse.json({ error: 'Sim Mailer requires a Max plan' }, { status: 403 }) + } + if (!permission) { + return NextResponse.json({ error: 'Not found' }, { status: 404 }) + } + + const [wsResult, statsResult] = await Promise.all([ + db + .select({ + inboxEnabled: workspace.inboxEnabled, + inboxAddress: workspace.inboxAddress, + }) + .from(workspace) + .where(eq(workspace.id, workspaceId)) + .limit(1), + db + .select({ + status: mothershipInboxTask.status, + count: sql`count(*)::int`, + }) + .from(mothershipInboxTask) + .where(eq(mothershipInboxTask.workspaceId, workspaceId)) + .groupBy(mothershipInboxTask.status), + ]) + + const [ws] = wsResult + if (!ws) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + + const stats = { + total: 0, + completed: 0, + processing: 0, + failed: 0, + } + for (const row of statsResult) { + const count = Number(row.count) + stats.total += count + if (row.status === 'completed') stats.completed = count + else if (row.status === 'processing') stats.processing = count + else if (row.status === 'failed') stats.failed = count + } + + return NextResponse.json({ + enabled: ws.inboxEnabled, + address: ws.inboxAddress, + taskStats: stats, + }) +} + +export async function PATCH(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workspaceId } = await params + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [hasAccess, permission] = await Promise.all([ + hasInboxAccess(session.user.id), + getUserEntityPermissions(session.user.id, 'workspace', workspaceId), + ]) + if (!hasAccess) { + return NextResponse.json({ error: 'Sim Mailer requires a Max plan' }, { status: 403 }) + } + if (permission !== 'admin') { + return NextResponse.json({ error: 'Admin access required' }, { status: 403 }) + } + + try { + const body = patchSchema.parse(await req.json()) + + if (body.enabled === true) { + const [current] = await db + .select({ inboxEnabled: workspace.inboxEnabled }) + .from(workspace) + .where(eq(workspace.id, workspaceId)) + .limit(1) + if (current?.inboxEnabled) { + return NextResponse.json({ error: 'Inbox is already enabled' }, { status: 409 }) + } + const config = await enableInbox(workspaceId, { username: body.username }) + return NextResponse.json(config) + } + + if (body.enabled === false) { + await disableInbox(workspaceId) + return NextResponse.json({ enabled: false, address: null }) + } + + if (body.username) { + const config = await updateInboxAddress(workspaceId, body.username) + return NextResponse.json(config) + } + + return NextResponse.json({ error: 'No valid update provided' }, { status: 400 }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: 'Invalid request', details: error.errors }, { status: 400 }) + } + + logger.error('Inbox config update failed', { + workspaceId, + error: error instanceof Error ? error.message : 'Unknown error', + }) + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Failed to update inbox' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/workspaces/[id]/inbox/senders/route.ts b/apps/sim/app/api/workspaces/[id]/inbox/senders/route.ts new file mode 100644 index 00000000000..3b48f75db04 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/inbox/senders/route.ts @@ -0,0 +1,172 @@ +import { db, mothershipInboxAllowedSender, permissions, user } from '@sim/db' +import { createLogger } from '@sim/logger' +import { and, eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { v4 as uuidv4 } from 'uuid' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { hasInboxAccess } from '@/lib/billing/core/subscription' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('InboxSendersAPI') + +const addSenderSchema = z.object({ + email: z.string().email('Invalid email address'), + label: z.string().max(100).optional(), +}) + +const deleteSenderSchema = z.object({ + senderId: z.string().min(1), +}) + +export async function GET(_req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workspaceId } = await params + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [hasAccess, permission] = await Promise.all([ + hasInboxAccess(session.user.id), + getUserEntityPermissions(session.user.id, 'workspace', workspaceId), + ]) + if (!hasAccess) { + return NextResponse.json({ error: 'Sim Mailer requires a Max plan' }, { status: 403 }) + } + if (!permission) { + return NextResponse.json({ error: 'Not found' }, { status: 404 }) + } + + const [senders, members] = await Promise.all([ + db + .select({ + id: mothershipInboxAllowedSender.id, + email: mothershipInboxAllowedSender.email, + label: mothershipInboxAllowedSender.label, + createdAt: mothershipInboxAllowedSender.createdAt, + }) + .from(mothershipInboxAllowedSender) + .where(eq(mothershipInboxAllowedSender.workspaceId, workspaceId)) + .orderBy(mothershipInboxAllowedSender.createdAt), + db + .select({ + email: user.email, + name: user.name, + }) + .from(permissions) + .innerJoin(user, eq(permissions.userId, user.id)) + .where(and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspaceId))), + ]) + + return NextResponse.json({ + senders: senders.map((s) => ({ + id: s.id, + email: s.email, + label: s.label, + createdAt: s.createdAt, + })), + workspaceMembers: members.map((m) => ({ + email: m.email, + name: m.name, + isAutoAllowed: true, + })), + }) +} + +export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workspaceId } = await params + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [hasAccess, permission] = await Promise.all([ + hasInboxAccess(session.user.id), + getUserEntityPermissions(session.user.id, 'workspace', workspaceId), + ]) + if (!hasAccess) { + return NextResponse.json({ error: 'Sim Mailer requires a Max plan' }, { status: 403 }) + } + if (permission !== 'admin') { + return NextResponse.json({ error: 'Admin access required' }, { status: 403 }) + } + + try { + const { email, label } = addSenderSchema.parse(await req.json()) + const normalizedEmail = email.toLowerCase() + + const [existing] = await db + .select({ id: mothershipInboxAllowedSender.id }) + .from(mothershipInboxAllowedSender) + .where( + and( + eq(mothershipInboxAllowedSender.workspaceId, workspaceId), + eq(mothershipInboxAllowedSender.email, normalizedEmail) + ) + ) + .limit(1) + + if (existing) { + return NextResponse.json({ error: 'Sender already exists' }, { status: 409 }) + } + + const [sender] = await db + .insert(mothershipInboxAllowedSender) + .values({ + id: uuidv4(), + workspaceId, + email: normalizedEmail, + label: label || null, + addedBy: session.user.id, + }) + .returning() + + return NextResponse.json({ sender }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: 'Invalid request', details: error.errors }, { status: 400 }) + } + logger.error('Failed to add sender', { workspaceId, error }) + return NextResponse.json({ error: 'Failed to add sender' }, { status: 500 }) + } +} + +export async function DELETE(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workspaceId } = await params + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [hasAccess, permission] = await Promise.all([ + hasInboxAccess(session.user.id), + getUserEntityPermissions(session.user.id, 'workspace', workspaceId), + ]) + if (!hasAccess) { + return NextResponse.json({ error: 'Sim Mailer requires a Max plan' }, { status: 403 }) + } + if (permission !== 'admin') { + return NextResponse.json({ error: 'Admin access required' }, { status: 403 }) + } + + try { + const { senderId } = deleteSenderSchema.parse(await req.json()) + + await db + .delete(mothershipInboxAllowedSender) + .where( + and( + eq(mothershipInboxAllowedSender.id, senderId), + eq(mothershipInboxAllowedSender.workspaceId, workspaceId) + ) + ) + + return NextResponse.json({ ok: true }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: 'Invalid request', details: error.errors }, { status: 400 }) + } + logger.error('Failed to delete sender', { workspaceId, error }) + return NextResponse.json({ error: 'Failed to delete sender' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/workspaces/[id]/inbox/tasks/route.ts b/apps/sim/app/api/workspaces/[id]/inbox/tasks/route.ts new file mode 100644 index 00000000000..8deb40cb670 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/inbox/tasks/route.ts @@ -0,0 +1,88 @@ +import { db, mothershipInboxTask } from '@sim/db' +import { createLogger } from '@sim/logger' +import { and, desc, eq, lt } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { getSession } from '@/lib/auth' +import { hasInboxAccess } from '@/lib/billing/core/subscription' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('InboxTasksAPI') + +export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workspaceId } = await params + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [hasAccess, permission] = await Promise.all([ + hasInboxAccess(session.user.id), + getUserEntityPermissions(session.user.id, 'workspace', workspaceId), + ]) + if (!hasAccess) { + return NextResponse.json({ error: 'Sim Mailer requires a Max plan' }, { status: 403 }) + } + if (!permission) { + return NextResponse.json({ error: 'Not found' }, { status: 404 }) + } + + const url = new URL(req.url) + const status = url.searchParams.get('status') || 'all' + const limit = Math.min(Number(url.searchParams.get('limit') || '20'), 50) + const cursor = url.searchParams.get('cursor') // ISO date string for cursor-based pagination + + const conditions = [eq(mothershipInboxTask.workspaceId, workspaceId)] + + const validStatuses = ['received', 'processing', 'completed', 'failed', 'rejected'] as const + if (status !== 'all') { + if (!validStatuses.includes(status as (typeof validStatuses)[number])) { + return NextResponse.json({ error: 'Invalid status filter' }, { status: 400 }) + } + conditions.push(eq(mothershipInboxTask.status, status)) + } + + if (cursor) { + const cursorDate = new Date(cursor) + if (Number.isNaN(cursorDate.getTime())) { + return NextResponse.json({ error: 'Invalid cursor value' }, { status: 400 }) + } + conditions.push(lt(mothershipInboxTask.createdAt, cursorDate)) + } + + const tasks = await db + .select({ + id: mothershipInboxTask.id, + fromEmail: mothershipInboxTask.fromEmail, + fromName: mothershipInboxTask.fromName, + subject: mothershipInboxTask.subject, + bodyPreview: mothershipInboxTask.bodyPreview, + status: mothershipInboxTask.status, + hasAttachments: mothershipInboxTask.hasAttachments, + resultSummary: mothershipInboxTask.resultSummary, + errorMessage: mothershipInboxTask.errorMessage, + rejectionReason: mothershipInboxTask.rejectionReason, + chatId: mothershipInboxTask.chatId, + createdAt: mothershipInboxTask.createdAt, + completedAt: mothershipInboxTask.completedAt, + }) + .from(mothershipInboxTask) + .where(and(...conditions)) + .orderBy(desc(mothershipInboxTask.createdAt)) + .limit(limit + 1) // Fetch one extra to determine hasMore + + const hasMore = tasks.length > limit + const resultTasks = hasMore ? tasks.slice(0, limit) : tasks + const nextCursor = + hasMore && resultTasks.length > 0 + ? resultTasks[resultTasks.length - 1].createdAt.toISOString() + : null + + return NextResponse.json({ + tasks: resultTasks, + pagination: { + limit, + hasMore, + nextCursor, + }, + }) +} diff --git a/apps/sim/app/api/workspaces/[id]/members/route.ts b/apps/sim/app/api/workspaces/[id]/members/route.ts new file mode 100644 index 00000000000..987946d5a3c --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/members/route.ts @@ -0,0 +1,39 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { getSession } from '@/lib/auth' +import { + getUserEntityPermissions, + getWorkspaceMemberProfiles, +} from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('WorkspaceMembersAPI') + +/** + * GET /api/workspaces/[id]/members + * + * Returns lightweight member profiles (id, name, image) for a workspace. + * Intended for UI display (avatars, owner cells) without the overhead of + * full permission data. + */ +export async function GET(_request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + try { + const { id: workspaceId } = await params + const session = await getSession() + + if (!session?.user?.id) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const permission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) + if (permission === null) { + return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 404 }) + } + + const members = await getWorkspaceMemberProfiles(workspaceId) + + return NextResponse.json({ members }) + } catch (error) { + logger.error('Error fetching workspace members:', error) + return NextResponse.json({ error: 'Failed to fetch workspace members' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/workspaces/[id]/notifications/[notificationId]/route.ts b/apps/sim/app/api/workspaces/[id]/notifications/[notificationId]/route.ts index ddc27300180..96acb82811d 100644 --- a/apps/sim/app/api/workspaces/[id]/notifications/[notificationId]/route.ts +++ b/apps/sim/app/api/workspaces/[id]/notifications/[notificationId]/route.ts @@ -8,13 +8,12 @@ import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { encryptSecret } from '@/lib/core/security/encryption' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' -import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types' import { MAX_EMAIL_RECIPIENTS, MAX_WORKFLOW_IDS } from '../constants' const logger = createLogger('WorkspaceNotificationAPI') const levelFilterSchema = z.array(z.enum(['info', 'error'])) -const triggerFilterSchema = z.array(z.enum(CORE_TRIGGER_TYPES)) +const triggerFilterSchema = z.array(z.string().min(1)) const alertRuleSchema = z.enum([ 'consecutive_failures', diff --git a/apps/sim/app/api/workspaces/[id]/notifications/route.ts b/apps/sim/app/api/workspaces/[id]/notifications/route.ts index 6fc8f4866c7..6c46cef900a 100644 --- a/apps/sim/app/api/workspaces/[id]/notifications/route.ts +++ b/apps/sim/app/api/workspaces/[id]/notifications/route.ts @@ -9,14 +9,13 @@ import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { encryptSecret } from '@/lib/core/security/encryption' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' -import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types' import { MAX_EMAIL_RECIPIENTS, MAX_NOTIFICATIONS_PER_TYPE, MAX_WORKFLOW_IDS } from './constants' const logger = createLogger('WorkspaceNotificationsAPI') const notificationTypeSchema = z.enum(['webhook', 'email', 'slack']) const levelFilterSchema = z.array(z.enum(['info', 'error'])) -const triggerFilterSchema = z.array(z.enum(CORE_TRIGGER_TYPES)) +const triggerFilterSchema = z.array(z.string().min(1)) const alertRuleSchema = z.enum([ 'consecutive_failures', @@ -82,7 +81,7 @@ const createNotificationSchema = z workflowIds: z.array(z.string()).max(MAX_WORKFLOW_IDS).default([]), allWorkflows: z.boolean().default(false), levelFilter: levelFilterSchema.default(['info', 'error']), - triggerFilter: triggerFilterSchema.default([...CORE_TRIGGER_TYPES]), + triggerFilter: triggerFilterSchema.default([]), includeFinalOutput: z.boolean().default(false), includeTraceSpans: z.boolean().default(false), includeRateLimits: z.boolean().default(false), diff --git a/apps/sim/app/api/workspaces/[id]/route.ts b/apps/sim/app/api/workspaces/[id]/route.ts index 503773be0d3..cf2ed3826d8 100644 --- a/apps/sim/app/api/workspaces/[id]/route.ts +++ b/apps/sim/app/api/workspaces/[id]/route.ts @@ -1,19 +1,24 @@ import { workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq, inArray } from 'drizzle-orm' +import { and, eq, inArray, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' +import { archiveWorkspace } from '@/lib/workspaces/lifecycle' const logger = createLogger('WorkspaceByIdAPI') import { db } from '@sim/db' -import { knowledgeBase, permissions, templates, workspace } from '@sim/db/schema' +import { permissions, templates, workspace } from '@sim/db/schema' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const patchWorkspaceSchema = z.object({ name: z.string().trim().min(1).optional(), + color: z + .string() + .regex(/^#[0-9a-fA-F]{6}$/) + .optional(), billedAccountUserId: z.string().optional(), allowPersonalApiKeys: z.boolean().optional(), }) @@ -80,7 +85,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ const workspaceDetails = await db .select() .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .then((rows) => rows[0]) if (!workspaceDetails) { @@ -113,10 +118,11 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise< try { const body = patchWorkspaceSchema.parse(await request.json()) - const { name, billedAccountUserId, allowPersonalApiKeys } = body + const { name, color, billedAccountUserId, allowPersonalApiKeys } = body if ( name === undefined && + color === undefined && billedAccountUserId === undefined && allowPersonalApiKeys === undefined ) { @@ -126,7 +132,7 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise< const existingWorkspace = await db .select() .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .then((rows) => rows[0]) if (!existingWorkspace) { @@ -139,6 +145,10 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise< updateData.name = name } + if (color !== undefined) { + updateData.color = color + } + if (allowPersonalApiKeys !== undefined) { updateData.allowPersonalApiKeys = Boolean(allowPersonalApiKeys) } @@ -233,67 +243,37 @@ export async function DELETE( const [workspaceRecord] = await db .select({ name: workspace.name }) .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .limit(1) - // Delete workspace and all related data in a transaction - let workspaceWorkflowCount = 0 - await db.transaction(async (tx) => { - // Get all workflows in this workspace before deletion - const workspaceWorkflows = await tx - .select({ id: workflow.id }) - .from(workflow) - .where(eq(workflow.workspaceId, workspaceId)) - - workspaceWorkflowCount = workspaceWorkflows.length - - if (workspaceWorkflows.length > 0) { - const workflowIds = workspaceWorkflows.map((w) => w.id) - - // Handle templates based on user choice - if (deleteTemplates) { - // Delete published templates that reference these workflows - await tx.delete(templates).where(inArray(templates.workflowId, workflowIds)) - logger.info(`Deleted templates for workflows in workspace ${workspaceId}`) - } else { - // Set workflowId to null for templates to create "orphaned" templates - // This allows templates to remain without source workflows - await tx - .update(templates) - .set({ workflowId: null }) - .where(inArray(templates.workflowId, workflowIds)) - logger.info( - `Updated templates to orphaned status for workflows in workspace ${workspaceId}` - ) - } + const workspaceWorkflows = await db + .select({ id: workflow.id }) + .from(workflow) + .where(eq(workflow.workspaceId, workspaceId)) + + const workflowIds = workspaceWorkflows.map((entry) => entry.id) + + if (workflowIds.length > 0) { + if (deleteTemplates) { + await db.delete(templates).where(inArray(templates.workflowId, workflowIds)) + } else { + await db + .update(templates) + .set({ workflowId: null }) + .where(inArray(templates.workflowId, workflowIds)) } + } - // Delete all workflows in the workspace - database cascade will handle all workflow-related data - // The database cascade will handle deleting related workflow_blocks, workflow_edges, workflow_subflows, - // workflow_logs, workflow_execution_snapshots, workflow_execution_logs, workflow_execution_trace_spans, - // workflow_schedule, webhook, chat, and memory records - await tx.delete(workflow).where(eq(workflow.workspaceId, workspaceId)) - - // Clear workspace ID from knowledge bases instead of deleting them - // This allows knowledge bases to become "unassigned" rather than being deleted - await tx - .update(knowledgeBase) - .set({ workspaceId: null, updatedAt: new Date() }) - .where(eq(knowledgeBase.workspaceId, workspaceId)) - - // Delete all permissions associated with this workspace - await tx - .delete(permissions) - .where(and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspaceId))) - - // Delete the workspace itself - await tx.delete(workspace).where(eq(workspace.id, workspaceId)) - - logger.info(`Successfully deleted workspace ${workspaceId} and all related data`) + const archiveResult = await archiveWorkspace(workspaceId, { + requestId: `workspace-${workspaceId}`, }) + if (!archiveResult.archived && !workspaceRecord) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + recordAudit({ - workspaceId: null, + workspaceId, actorId: session.user.id, actorName: session.user.name, actorEmail: session.user.email, @@ -301,11 +281,12 @@ export async function DELETE( resourceType: AuditResourceType.WORKSPACE, resourceId: workspaceId, resourceName: workspaceRecord?.name, - description: `Deleted workspace "${workspaceRecord?.name || workspaceId}"`, + description: `Archived workspace "${workspaceRecord?.name || workspaceId}"`, metadata: { affected: { - workflows: workspaceWorkflowCount, + workflows: workflowIds.length, }, + archived: archiveResult.archived, deleteTemplates, }, request, diff --git a/apps/sim/app/api/workspaces/invitations/[invitationId]/route.test.ts b/apps/sim/app/api/workspaces/invitations/[invitationId]/route.test.ts index 65f80c17799..3df5bd76884 100644 --- a/apps/sim/app/api/workspaces/invitations/[invitationId]/route.test.ts +++ b/apps/sim/app/api/workspaces/invitations/[invitationId]/route.test.ts @@ -4,6 +4,7 @@ import { beforeEach, describe, expect, it, vi } from 'vitest' const mockGetSession = vi.fn() const mockHasWorkspaceAdminAccess = vi.fn() +const mockGetWorkspaceById = vi.fn() let dbSelectResults: any[] = [] let dbSelectCallIndex = 0 @@ -63,6 +64,7 @@ vi.mock('@/lib/auth', () => ({ vi.mock('@/lib/workspaces/permissions/utils', () => ({ hasWorkspaceAdminAccess: (userId: string, workspaceId: string) => mockHasWorkspaceAdminAccess(userId, workspaceId), + getWorkspaceById: (id: string) => mockGetWorkspaceById(id), })) vi.mock('@/lib/credentials/environment', () => ({ @@ -120,8 +122,9 @@ vi.mock('@sim/db/schema', () => ({ })) vi.mock('drizzle-orm', () => ({ - eq: vi.fn((a, b) => ({ type: 'eq', a, b })), - and: vi.fn((...args) => ({ type: 'and', args })), + eq: vi.fn((a: unknown, b: unknown) => ({ type: 'eq', a, b })), + and: vi.fn((...args: unknown[]) => ({ type: 'and', args })), + isNull: vi.fn((field: unknown) => ({ type: 'isNull', field })), })) vi.mock('crypto', () => ({ @@ -164,6 +167,7 @@ describe('Workspace Invitation [invitationId] API Route', () => { vi.clearAllMocks() dbSelectResults = [] dbSelectCallIndex = 0 + mockGetWorkspaceById.mockResolvedValue({ id: 'workspace-456', name: 'Test Workspace' }) }) describe('GET /api/workspaces/invitations/[invitationId]', () => { @@ -240,7 +244,9 @@ describe('Workspace Invitation [invitationId] API Route', () => { const response = await GET(request, { params }) expect(response.status).toBe(307) - expect(response.headers.get('location')).toBe('https://test.sim.ai/workspace/workspace-456/w') + expect(response.headers.get('location')).toBe( + 'https://test.sim.ai/workspace/workspace-456/home' + ) }) it('should redirect to error page with token preserved when invitation expired', async () => { @@ -495,7 +501,7 @@ describe('Workspace Invitation [invitationId] API Route', () => { expect(response2.status).toBe(307) expect(response2.headers.get('location')).toBe( - 'https://test.sim.ai/workspace/workspace-456/w' + 'https://test.sim.ai/workspace/workspace-456/home' ) }) }) diff --git a/apps/sim/app/api/workspaces/invitations/[invitationId]/route.ts b/apps/sim/app/api/workspaces/invitations/[invitationId]/route.ts index fac6b6f6dae..723b2954de0 100644 --- a/apps/sim/app/api/workspaces/invitations/[invitationId]/route.ts +++ b/apps/sim/app/api/workspaces/invitations/[invitationId]/route.ts @@ -10,7 +10,7 @@ import { workspaceInvitation, } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { WorkspaceInvitationEmail } from '@/components/emails' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' @@ -19,7 +19,7 @@ import { getBaseUrl } from '@/lib/core/utils/urls' import { syncWorkspaceEnvCredentials } from '@/lib/credentials/environment' import { sendEmail } from '@/lib/messaging/email/mailer' import { getFromEmailAddress } from '@/lib/messaging/email/utils' -import { hasWorkspaceAdminAccess } from '@/lib/workspaces/permissions/utils' +import { getWorkspaceById, hasWorkspaceAdminAccess } from '@/lib/workspaces/permissions/utils' const logger = createLogger('WorkspaceInvitationAPI') @@ -74,7 +74,7 @@ export async function GET( const workspaceDetails = await db .select() .from(workspace) - .where(eq(workspace.id, invitation.workspaceId)) + .where(and(eq(workspace.id, invitation.workspaceId), isNull(workspace.archivedAt))) .then((rows) => rows[0]) if (!workspaceDetails) { @@ -141,7 +141,7 @@ export async function GET( .where(eq(workspaceInvitation.id, invitation.id)) return NextResponse.redirect( - new URL(`/workspace/${invitation.workspaceId}/w`, getBaseUrl()) + new URL(`/workspace/${invitation.workspaceId}/home`, getBaseUrl()) ) } @@ -193,7 +193,9 @@ export async function GET( request: req, }) - return NextResponse.redirect(new URL(`/workspace/${invitation.workspaceId}/w`, getBaseUrl())) + return NextResponse.redirect( + new URL(`/workspace/${invitation.workspaceId}/home`, getBaseUrl()) + ) } return NextResponse.json({ @@ -235,6 +237,11 @@ export async function DELETE( return NextResponse.json({ error: 'Invitation not found' }, { status: 404 }) } + const activeWorkspace = await getWorkspaceById(invitation.workspaceId) + if (!activeWorkspace) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + const hasAdminAccess = await hasWorkspaceAdminAccess(session.user.id, invitation.workspaceId) if (!hasAdminAccess) { diff --git a/apps/sim/app/api/workspaces/invitations/route.test.ts b/apps/sim/app/api/workspaces/invitations/route.test.ts index 0919385d0f9..248e721258d 100644 --- a/apps/sim/app/api/workspaces/invitations/route.test.ts +++ b/apps/sim/app/api/workspaces/invitations/route.test.ts @@ -15,6 +15,7 @@ const { mockGetEmailDomain, mockValidateInvitationsAllowed, mockRandomUUID, + mockGetWorkspaceById, } = vi.hoisted(() => { const mockGetSession = vi.fn() const mockInsertValues = vi.fn().mockResolvedValue(undefined) @@ -24,6 +25,7 @@ const { const mockGetEmailDomain = vi.fn().mockReturnValue('sim.ai') const mockValidateInvitationsAllowed = vi.fn().mockResolvedValue(undefined) const mockRandomUUID = vi.fn().mockReturnValue('mock-uuid-1234') + const mockGetWorkspaceById = vi.fn() const mockDbResults: { value: any[] } = { value: [] } @@ -52,6 +54,7 @@ const { mockGetEmailDomain, mockValidateInvitationsAllowed, mockRandomUUID, + mockGetWorkspaceById, } }) @@ -111,6 +114,10 @@ vi.mock('@/lib/core/config/env', async () => { return createEnvMock() }) +vi.mock('@/lib/workspaces/permissions/utils', () => ({ + getWorkspaceById: mockGetWorkspaceById, +})) + vi.mock('@/lib/core/utils/urls', () => ({ getEmailDomain: mockGetEmailDomain, })) @@ -135,6 +142,7 @@ vi.mock('drizzle-orm', () => ({ inArray: vi .fn() .mockImplementation((field: any, values: any) => ({ type: 'inArray', field, values })), + isNull: vi.fn().mockImplementation((field: any) => ({ type: 'isNull', field })), })) vi.mock('@/ee/access-control/utils/permission-check', () => ({ @@ -176,6 +184,7 @@ describe('Workspace Invitations API Route', () => { mockRender.mockResolvedValue('email content') mockGetEmailDomain.mockReturnValue('sim.ai') mockValidateInvitationsAllowed.mockResolvedValue(undefined) + mockGetWorkspaceById.mockResolvedValue({ id: 'workspace-1', name: 'Test Workspace' }) }) describe('GET /api/workspaces/invitations', () => { @@ -291,9 +300,9 @@ describe('Workspace Invitations API Route', () => { it('should return 404 when workspace is not found', async () => { mockGetSession.mockResolvedValue({ user: { id: 'user-123' } }) + mockGetWorkspaceById.mockResolvedValueOnce(null) mockDbResults.value = [ [{ permissionType: 'admin' }], // User has admin permissions - [], // Workspace not found ] const req = createMockRequest('POST', { diff --git a/apps/sim/app/api/workspaces/invitations/route.ts b/apps/sim/app/api/workspaces/invitations/route.ts index 543cc73727c..208e0a0e267 100644 --- a/apps/sim/app/api/workspaces/invitations/route.ts +++ b/apps/sim/app/api/workspaces/invitations/route.ts @@ -10,7 +10,7 @@ import { workspaceInvitation, } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq, inArray } from 'drizzle-orm' +import { and, eq, inArray, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { WorkspaceInvitationEmail } from '@/components/emails' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' @@ -19,6 +19,7 @@ import { PlatformEvents } from '@/lib/core/telemetry' import { getBaseUrl } from '@/lib/core/utils/urls' import { sendEmail } from '@/lib/messaging/email/mailer' import { getFromEmailAddress } from '@/lib/messaging/email/utils' +import { getWorkspaceById } from '@/lib/workspaces/permissions/utils' import { InvitationsNotAllowedError, validateInvitationsAllowed, @@ -50,6 +51,7 @@ export async function GET(req: NextRequest) { eq(permissions.userId, session.user.id) ) ) + .where(isNull(workspace.archivedAt)) if (userWorkspaces.length === 0) { return NextResponse.json({ invitations: [] }) @@ -114,10 +116,15 @@ export async function POST(req: NextRequest) { ) } + const activeWorkspace = await getWorkspaceById(workspaceId) + if (!activeWorkspace) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + const workspaceDetails = await db .select() .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .then((rows) => rows[0]) if (!workspaceDetails) { diff --git a/apps/sim/app/api/workspaces/route.ts b/apps/sim/app/api/workspaces/route.ts index 79c2c436df6..66493a5eb02 100644 --- a/apps/sim/app/api/workspaces/route.ts +++ b/apps/sim/app/api/workspaces/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { permissions, workflow, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, desc, eq, isNull } from 'drizzle-orm' +import { and, desc, eq, isNull, sql } from 'drizzle-orm' import { NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' @@ -9,22 +9,33 @@ import { getSession } from '@/lib/auth' import { PlatformEvents } from '@/lib/core/telemetry' import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults' import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' +import { getRandomWorkspaceColor } from '@/lib/workspaces/colors' +import type { WorkspaceScope } from '@/lib/workspaces/utils' const logger = createLogger('Workspaces') const createWorkspaceSchema = z.object({ name: z.string().trim().min(1, 'Name is required'), + color: z + .string() + .regex(/^#[0-9a-fA-F]{6}$/) + .optional(), skipDefaultWorkflow: z.boolean().optional().default(false), }) // Get all workspaces for the current user -export async function GET() { +export async function GET(request: Request) { const session = await getSession() if (!session?.user?.id) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } + const scope = (new URL(request.url).searchParams.get('scope') ?? 'active') as WorkspaceScope + if (!['active', 'archived', 'all'].includes(scope)) { + return NextResponse.json({ error: 'Invalid scope' }, { status: 400 }) + } + const userWorkspaces = await db .select({ workspace: workspace, @@ -32,10 +43,24 @@ export async function GET() { }) .from(permissions) .innerJoin(workspace, eq(permissions.entityId, workspace.id)) - .where(and(eq(permissions.userId, session.user.id), eq(permissions.entityType, 'workspace'))) + .where( + scope === 'all' + ? and(eq(permissions.userId, session.user.id), eq(permissions.entityType, 'workspace')) + : scope === 'archived' + ? and( + eq(permissions.userId, session.user.id), + eq(permissions.entityType, 'workspace'), + sql`${workspace.archivedAt} IS NOT NULL` + ) + : and( + eq(permissions.userId, session.user.id), + eq(permissions.entityType, 'workspace'), + isNull(workspace.archivedAt) + ) + ) .orderBy(desc(workspace.createdAt)) - if (userWorkspaces.length === 0) { + if (scope === 'active' && userWorkspaces.length === 0) { const defaultWorkspace = await createDefaultWorkspace(session.user.id, session.user.name) await migrateExistingWorkflows(session.user.id, defaultWorkspace.id) @@ -43,7 +68,9 @@ export async function GET() { return NextResponse.json({ workspaces: [defaultWorkspace] }) } - await ensureWorkflowsHaveWorkspace(session.user.id, userWorkspaces[0].workspace.id) + if (scope === 'active') { + await ensureWorkflowsHaveWorkspace(session.user.id, userWorkspaces[0].workspace.id) + } const workspacesWithPermissions = userWorkspaces.map( ({ workspace: workspaceDetails, permissionType }) => ({ @@ -65,9 +92,9 @@ export async function POST(req: Request) { } try { - const { name, skipDefaultWorkflow } = createWorkspaceSchema.parse(await req.json()) + const { name, color, skipDefaultWorkflow } = createWorkspaceSchema.parse(await req.json()) - const newWorkspace = await createWorkspace(session.user.id, name, skipDefaultWorkflow) + const newWorkspace = await createWorkspace(session.user.id, name, skipDefaultWorkflow, color) recordAudit({ workspaceId: newWorkspace.id, @@ -96,16 +123,23 @@ async function createDefaultWorkspace(userId: string, userName?: string | null) return createWorkspace(userId, workspaceName) } -async function createWorkspace(userId: string, name: string, skipDefaultWorkflow = false) { +async function createWorkspace( + userId: string, + name: string, + skipDefaultWorkflow = false, + explicitColor?: string +) { const workspaceId = crypto.randomUUID() const workflowId = crypto.randomUUID() const now = new Date() + const color = explicitColor || getRandomWorkspaceColor() try { await db.transaction(async (tx) => { await tx.insert(workspace).values({ id: workspaceId, name, + color, ownerId: userId, billedAccountUserId: userId, allowPersonalApiKeys: true, @@ -174,6 +208,7 @@ async function createWorkspace(userId: string, name: string, skipDefaultWorkflow return { id: workspaceId, name, + color, ownerId: userId, billedAccountUserId: userId, allowPersonalApiKeys: true, diff --git a/apps/sim/app/chat/[identifier]/page.tsx b/apps/sim/app/chat/[identifier]/page.tsx index 9ba983fc5ca..a90238b644c 100644 --- a/apps/sim/app/chat/[identifier]/page.tsx +++ b/apps/sim/app/chat/[identifier]/page.tsx @@ -1,5 +1,10 @@ +import type { Metadata } from 'next' import ChatClient from '@/app/chat/[identifier]/chat' +export const metadata: Metadata = { + title: 'Chat', +} + export default async function ChatPage({ params }: { params: Promise<{ identifier: string }> }) { const { identifier } = await params return diff --git a/apps/sim/app/chat/components/auth/email/email-auth.tsx b/apps/sim/app/chat/components/auth/email/email-auth.tsx index d6ba3de5326..d84b9a8b6b8 100644 --- a/apps/sim/app/chat/components/auth/email/email-auth.tsx +++ b/apps/sim/app/chat/components/auth/email/email-auth.tsx @@ -52,7 +52,7 @@ export default function EmailAuth({ identifier, onAuthSuccess }: EmailAuthProps) useEffect(() => { if (countdown > 0) { - const timer = setTimeout(() => setCountdown(countdown - 1), 1000) + const timer = setTimeout(() => setCountdown((c) => c - 1), 1000) return () => clearTimeout(timer) } if (countdown === 0 && isResendDisabled) { diff --git a/apps/sim/app/chat/components/input/input.tsx b/apps/sim/app/chat/components/input/input.tsx index 5e385d7f3a9..5c9bfea95be 100644 --- a/apps/sim/app/chat/components/input/input.tsx +++ b/apps/sim/app/chat/components/input/input.tsx @@ -5,6 +5,7 @@ import { useEffect, useRef, useState } from 'react' import { motion } from 'framer-motion' import { Paperclip, Send, Square, X } from 'lucide-react' import { Badge, Tooltip } from '@/components/emcn' +import { CHAT_ACCEPT_ATTRIBUTE } from '@/lib/uploads/utils/validation' import { VoiceInput } from '@/app/chat/components/input/voice-input' const logger = createLogger('ChatInput') @@ -15,6 +16,13 @@ const PLACEHOLDER_MOBILE = 'Enter a message' const PLACEHOLDER_DESKTOP = 'Enter a message or click the mic to speak' const MAX_TEXTAREA_HEIGHT = 120 // Max height in pixels (e.g., for about 3-4 lines) const MAX_TEXTAREA_HEIGHT_MOBILE = 100 // Smaller for mobile +const IS_STT_AVAILABLE = + typeof window !== 'undefined' && + !!( + (window as Window & { SpeechRecognition?: unknown; webkitSpeechRecognition?: unknown }) + .SpeechRecognition || + (window as Window & { webkitSpeechRecognition?: unknown }).webkitSpeechRecognition + ) interface AttachedFile { id: string @@ -42,10 +50,6 @@ export const ChatInput: React.FC<{ const [dragCounter, setDragCounter] = useState(0) const isDragOver = dragCounter > 0 - // Check if speech-to-text is available in the browser - const isSttAvailable = - typeof window !== 'undefined' && !!(window.SpeechRecognition || window.webkitSpeechRecognition) - // Function to adjust textarea height const adjustTextareaHeight = () => { if (textareaRef.current) { @@ -195,7 +199,7 @@ export const ChatInput: React.FC<{
{/* Voice Input Only */} - {isSttAvailable && ( + {IS_STT_AVAILABLE && (
@@ -268,7 +272,7 @@ export const ChatInput: React.FC<{ > {/* File Previews */} {attachedFiles.length > 0 && ( -
+
{attachedFiles.map((file) => { const formatFileSize = (bytes: number) => { if (bytes === 0) return '0 B' @@ -348,7 +352,7 @@ export const ChatInput: React.FC<{ ref={fileInputRef} type='file' multiple - accept='.pdf,.csv,.doc,.docx,.txt,.md,.xlsx,.xls,.html,.htm,.pptx,.ppt,.json,.xml,.rtf,image/*' + accept={CHAT_ACCEPT_ATTRIBUTE} onChange={(e) => { handleFileSelect(e.target.files) if (fileInputRef.current) { @@ -406,7 +410,7 @@ export const ChatInput: React.FC<{
{/* Voice Input */} - {isSttAvailable && ( + {IS_STT_AVAILABLE && (
diff --git a/apps/sim/app/chat/components/input/voice-input.tsx b/apps/sim/app/chat/components/input/voice-input.tsx index e23ece4a29b..53a97a583df 100644 --- a/apps/sim/app/chat/components/input/voice-input.tsx +++ b/apps/sim/app/chat/components/input/voice-input.tsx @@ -31,11 +31,9 @@ interface SpeechRecognitionStatic { new (): SpeechRecognition } -declare global { - interface Window { - SpeechRecognition?: SpeechRecognitionStatic - webkitSpeechRecognition?: SpeechRecognitionStatic - } +type WindowWithSpeech = Window & { + SpeechRecognition?: SpeechRecognitionStatic + webkitSpeechRecognition?: SpeechRecognitionStatic } interface VoiceInputProps { @@ -57,8 +55,9 @@ export function VoiceInput({ // Check if speech recognition is supported useEffect(() => { - const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition - setIsSupported(!!SpeechRecognition) + const w = window as WindowWithSpeech + const SpeechRecognitionCtor = w.SpeechRecognition || w.webkitSpeechRecognition + setIsSupported(!!SpeechRecognitionCtor) }, []) const handleVoiceClick = useCallback(() => { diff --git a/apps/sim/app/chat/components/message/message.tsx b/apps/sim/app/chat/components/message/message.tsx index 7a8f4546d4e..a9186ae020e 100644 --- a/apps/sim/app/chat/components/message/message.tsx +++ b/apps/sim/app/chat/components/message/message.tsx @@ -1,6 +1,6 @@ 'use client' -import { memo, useMemo, useState } from 'react' +import { memo, useState } from 'react' import { Check, Copy, File as FileIcon, FileText, Image as ImageIcon } from 'lucide-react' import { Tooltip } from '@/components/emcn' import { @@ -8,6 +8,7 @@ import { ChatFileDownloadAll, } from '@/app/chat/components/message/components/file-download' import MarkdownRenderer from '@/app/chat/components/message/components/markdown-renderer' +import { useThrottledValue } from '@/hooks/use-throttled-value' export interface ChatAttachment { id: string @@ -39,16 +40,15 @@ export interface ChatMessage { } function EnhancedMarkdownRenderer({ content }: { content: string }) { - return + const throttled = useThrottledValue(content) + return } export const ClientChatMessage = memo( function ClientChatMessage({ message }: { message: ChatMessage }) { const [isCopied, setIsCopied] = useState(false) - const isJsonObject = useMemo(() => { - return typeof message.content === 'object' && message.content !== null - }, [message.content]) + const isJsonObject = typeof message.content === 'object' && message.content !== null // Since tool calls are now handled via SSE events and stored in message.toolCalls, // we can use the content directly without parsing diff --git a/apps/sim/app/chat/components/voice-interface/voice-interface.tsx b/apps/sim/app/chat/components/voice-interface/voice-interface.tsx index 6f2d0653b4e..fd7f291c31a 100644 --- a/apps/sim/app/chat/components/voice-interface/voice-interface.tsx +++ b/apps/sim/app/chat/components/voice-interface/voice-interface.tsx @@ -36,11 +36,9 @@ interface SpeechRecognitionStatic { new (): SpeechRecognition } -declare global { - interface Window { - SpeechRecognition?: SpeechRecognitionStatic - webkitSpeechRecognition?: SpeechRecognitionStatic - } +type WindowWithSpeech = Window & { + SpeechRecognition?: SpeechRecognitionStatic + webkitSpeechRecognition?: SpeechRecognitionStatic } interface VoiceInterfaceProps { @@ -93,7 +91,11 @@ export function VoiceInterface({ const responseTimeoutRef = useRef(null) const isSupported = - typeof window !== 'undefined' && !!(window.SpeechRecognition || window.webkitSpeechRecognition) + typeof window !== 'undefined' && + !!( + (window as WindowWithSpeech).SpeechRecognition || + (window as WindowWithSpeech).webkitSpeechRecognition + ) useEffect(() => { isMutedRef.current = isMuted @@ -214,7 +216,8 @@ export function VoiceInterface({ const setupSpeechRecognition = useCallback(() => { if (!isSupported) return - const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition + const w = window as WindowWithSpeech + const SpeechRecognition = w.SpeechRecognition || w.webkitSpeechRecognition if (!SpeechRecognition) return const recognition = new SpeechRecognition() diff --git a/apps/sim/app/chat/hooks/use-chat-streaming.ts b/apps/sim/app/chat/hooks/use-chat-streaming.ts index e0208709311..79dfb02f40a 100644 --- a/apps/sim/app/chat/hooks/use-chat-streaming.ts +++ b/apps/sim/app/chat/hooks/use-chat-streaming.ts @@ -78,18 +78,15 @@ export function useChatStreaming() { abortControllerRef.current.abort() abortControllerRef.current = null - // Add a message indicating the response was stopped + const latestContent = accumulatedTextRef.current + setMessages((prev) => { const lastMessage = prev[prev.length - 1] - // Only modify if the last message is from the assistant (as expected) if (lastMessage && lastMessage.type === 'assistant') { - // Append a note that the response was stopped + const content = latestContent || lastMessage.content const updatedContent = - lastMessage.content + - (lastMessage.content - ? '\n\n_Response stopped by user._' - : '_Response stopped by user._') + content + (content ? '\n\n_Response stopped by user._' : '_Response stopped by user._') return [ ...prev.slice(0, -1), @@ -100,7 +97,6 @@ export function useChatStreaming() { return prev }) - // Reset streaming state immediately setIsStreamingResponse(false) accumulatedTextRef.current = '' lastStreamedPositionRef.current = 0 @@ -139,9 +135,49 @@ export function useChatStreaming() { let accumulatedText = '' let lastAudioPosition = 0 - // Track which blocks have streamed content (like chat panel) const messageIdMap = new Map() const messageId = crypto.randomUUID() + + const UI_BATCH_MAX_MS = 50 + let uiDirty = false + let uiRAF: number | null = null + let uiTimer: ReturnType | null = null + let lastUIFlush = 0 + + const flushUI = () => { + if (uiRAF !== null) { + cancelAnimationFrame(uiRAF) + uiRAF = null + } + if (uiTimer !== null) { + clearTimeout(uiTimer) + uiTimer = null + } + if (!uiDirty) return + uiDirty = false + lastUIFlush = performance.now() + const snapshot = accumulatedText + setMessages((prev) => + prev.map((msg) => { + if (msg.id !== messageId) return msg + if (!msg.isStreaming) return msg + return { ...msg, content: snapshot } + }) + ) + } + + const scheduleUIFlush = () => { + if (uiRAF !== null) return + const elapsed = performance.now() - lastUIFlush + if (elapsed >= UI_BATCH_MAX_MS) { + flushUI() + return + } + uiRAF = requestAnimationFrame(flushUI) + if (uiTimer === null) { + uiTimer = setTimeout(flushUI, Math.max(0, UI_BATCH_MAX_MS - elapsed)) + } + } setMessages((prev) => [ ...prev, { @@ -165,6 +201,7 @@ export function useChatStreaming() { const { done, value } = await reader.read() if (done) { + flushUI() // Stream any remaining text for TTS if ( shouldPlayAudio && @@ -217,6 +254,7 @@ export function useChatStreaming() { } if (eventType === 'final' && json.data) { + flushUI() const finalData = json.data as { success: boolean error?: string | { message?: string } @@ -367,6 +405,7 @@ export function useChatStreaming() { } accumulatedText += contentChunk + accumulatedTextRef.current = accumulatedText logger.debug('[useChatStreaming] Received chunk', { blockId, chunkLength: contentChunk.length, @@ -374,11 +413,8 @@ export function useChatStreaming() { messageId, chunk: contentChunk.substring(0, 20), }) - setMessages((prev) => - prev.map((msg) => - msg.id === messageId ? { ...msg, content: accumulatedText } : msg - ) - ) + uiDirty = true + scheduleUIFlush() // Real-time TTS for voice mode if (shouldPlayAudio && streamingOptions?.audioStreamHandler) { @@ -419,10 +455,13 @@ export function useChatStreaming() { } } catch (error) { logger.error('Error processing stream:', error) + flushUI() setMessages((prev) => prev.map((msg) => (msg.id === messageId ? { ...msg, isStreaming: false } : msg)) ) } finally { + if (uiRAF !== null) cancelAnimationFrame(uiRAF) + if (uiTimer !== null) clearTimeout(uiTimer) setIsStreamingResponse(false) abortControllerRef.current = null diff --git a/apps/sim/app/favicon.ico b/apps/sim/app/favicon.ico deleted file mode 100644 index 9aa82bcf046763fb94ffcb3235ad88f6c2bb3626..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 15086 zcmeHO*>jXd5MN8nXa59zu*&kuCtn1_TT$`0yv6duQi#YcB2plzC@3P}g(4~_3IdiW zD57G#MMXTgRgg+vUyV-0AAz|5AU#g~Nzi+;o{!LF$PfvFzmpdwVbnfJn zbErGzW}cACos!GtI(5oFAD7Fm#k=EBv%S|bx!h=ca3a1zmt0R2zHjbNPP+2g`cwOK zH(hFKQ4ESyc_GGHVoum?=bFu?bB(WZ+y%Y@D=|0ly(3{y@)0q26qzaTKH-mbN09gC zuxIZZuVu!Mj31c{WHOM+!2eAKPP<1;x4yBQhPo@BOXtXUTJrX5SJdfU>9^_mVB0xX z>eHH<sNKL*MoqFY@m`3A^`-z3uU)_|PY;+Mr{&C*O*6TEbl1 ze%(V7{@4o4Uc$Of;wH@%J90X3y(?k!4#nzMtQT|k05N9*|EBFaKljEG@VlS5slXqC zgw-3%l4E{?W(7qhQO_(i)GF*)BK))dmcJ!-?k+{rF52Jrx?opF> zp?<&4jk0CV>R)~MfzeHzBX-0z@V9?q?)g0tcKxILwolE`HK;27b8U!|YsNY~bDu%3 z-1J2fxF=BTz-OwD|4Fup{oFxf@1H8}ozKNDUx$4PIX4ysdv(z_ioNROpZF=~w>>6) z#kzJaES_wquq_e>OX?I>N7MrYH3 z@UMaX!C0lb!{Q8)eb7J9p*#ml+u0|qf$lyO{R8~e$ze|&bX>jI0aFV4hcX3y!+oIo zpTC>M5As`f#_y6$vc`F8`%l6CaVXeBL!K+_v-I6HBPDFanF9QF+^nYU>APlXd;2)> zrcT1Hf5qN1wo!lEXQJ){o>>NTn#C{pgWTC^?~6GdXE5iUpTGb32etwq)Y$i{#dIH1 zpwSIy@7S3VxX(DhLC>NM^KJ%f+4onDkifGS=a-vT8_)8qN9tPpWvg^tzY4NJ+n_oA z6CTbd%7|O=mGapReL?)vL?9hrcAY?@H z)&gAG2L6~EMvI&CX`Bz+DN}4G_WefenAhUkwH80W=UPK{-1}ck_~m!uehR<-q0bif znzr*h>Z*rc5eq$Q?wu5$2S2Cl<|wtk`RC!0H+fwaG=Z!d!` z;uvW^D4!w^%z1;NuDTfZCiOyFv6q`qnRm2PL%oSI@$Jf}U(0;gY8kH77{m`*<-YL5 zPd-+Pc6-pt%fFkmR2uFC?J;hp#-MIa^3MHIDY_Y*jMteAWHOM+Kqdp33>+yL=*Tf( z&4uVtxerOs^Z5hJf#}Hj8z~0`He*N0g=myN(_CIq_P#P4V4hYdlx`>o%+u<6_wlAn z?^@fX##QQm5^43Wl|1cA(QQhnvhyybUrC;q($1c7H&16qR#z7<=qq7t4>4}__a)y6 zYx)b}7g9eXeTDp8ynYS-mXh^N{ffUW_ZxKS*JoVa!$0d@TP*hO3Gn>^3u9ixrbhp1 z_rYQxnW3@lV)~5PGRCUd)A;aNst=rgY2rxlOKZ%JF%iZm0%B4A-7m%0u8^>Gr`V@v zi-o_Ov1<>1D&r&q-=v2Rzj!>xgXnixA8;DK$508sZxO%f+sM!KzO)8E-K!r&pFQIj z^yk7SOW$Ew`KNGhhm}6p?&ABSFPMBn{J|nd%~*o}4fGMWJFN7{56T$VAADtuf^mhg zWrx@Yr-?7 zz)@EI5Wi5LJ$c9c2J(%usk@#m`01~BP{QB45i9yK%2cV`6+JUv5q9i}F%RTw*q#ro z{QD)}m!;1*oSV5=%nf72KK_Qrf7st+pD-S0@!p{?<4%q_0Y$XR{>EN$?|p@}->SS0 z8@K6xQ2+C#!-P28j1PtRG-3T_v6JVD|7o3s?YpC_Ag{yLOcXc!lemt+$C#8`ybQ8^ zkohvq?STw3?qmBxXY|0HAnptA6FUm=AnFn7BgT9$KztB;RM&^`K=4|14D}ddt*Wn> z|HBwdOLK3bo$+Aq3IE-yb{VhLc(^vuPmGCCH?+Ec3-fDnJn9CC zeJ9!wu)}!=YLo-a{>-U@ZRnnvFU&t>zOIhJTrT7n@IGR19HaIT?@wxpev6ao9%8XK^zYYauTG!|Iqb13VsfTqC=OyaP5j`%`g~ z=Lzpn%;VvlI3DvB?WpORGvjdgGCdzD*sy61#Wi^}7x^}aY7eMAF#v0abt@jX8h^KC O542?u=KHo)N&N>ImdASl diff --git a/apps/sim/app/form/[identifier]/page.tsx b/apps/sim/app/form/[identifier]/page.tsx index ba4004789d4..10f5a6e7fce 100644 --- a/apps/sim/app/form/[identifier]/page.tsx +++ b/apps/sim/app/form/[identifier]/page.tsx @@ -1,5 +1,10 @@ +import type { Metadata } from 'next' import Form from '@/app/form/[identifier]/form' +export const metadata: Metadata = { + title: 'Form', +} + export default async function FormPage({ params }: { params: Promise<{ identifier: string }> }) { const { identifier } = await params return
diff --git a/apps/sim/app/invite/[id]/invite.tsx b/apps/sim/app/invite/[id]/invite.tsx index caa2659d47d..10658d8f7f0 100644 --- a/apps/sim/app/invite/[id]/invite.tsx +++ b/apps/sim/app/invite/[id]/invite.tsx @@ -200,7 +200,7 @@ export default function Invite() { }, [searchParams, inviteId]) useEffect(() => { - if (!session?.user || !token) return + if (!session?.user) return async function fetchInvitationDetails() { setIsLoading(true) @@ -301,7 +301,7 @@ export default function Invite() { } fetchInvitationDetails() - }, [session?.user, inviteId, token]) + }, [session?.user, inviteId]) const handleAcceptInvitation = async () => { if (!session?.user) return diff --git a/apps/sim/app/invite/[id]/page.tsx b/apps/sim/app/invite/[id]/page.tsx index 2f22144abc6..e04a2ca7743 100644 --- a/apps/sim/app/invite/[id]/page.tsx +++ b/apps/sim/app/invite/[id]/page.tsx @@ -1,3 +1,9 @@ +import type { Metadata } from 'next' import Invite from '@/app/invite/[id]/invite' +export const metadata: Metadata = { + title: 'Invite', + robots: { index: false }, +} + export default Invite diff --git a/apps/sim/app/layout.tsx b/apps/sim/app/layout.tsx index 33c504a5d39..dc3fb762019 100644 --- a/apps/sim/app/layout.tsx +++ b/apps/sim/app/layout.tsx @@ -110,13 +110,19 @@ export default function RootLayout({ children }: { children: React.ReactNode }) if (stored) { var parsed = JSON.parse(stored); var state = parsed && parsed.state; - var width = state && state.sidebarWidth; - var maxSidebarWidth = window.innerWidth * 0.3; + var isCollapsed = state && state.isCollapsed; - if (width >= 232 && width <= maxSidebarWidth) { - document.documentElement.style.setProperty('--sidebar-width', width + 'px'); - } else if (width > maxSidebarWidth) { - document.documentElement.style.setProperty('--sidebar-width', maxSidebarWidth + 'px'); + if (isCollapsed) { + document.documentElement.style.setProperty('--sidebar-width', '51px'); + } else { + var width = state && state.sidebarWidth; + var maxSidebarWidth = window.innerWidth * 0.3; + + if (width >= 248 && width <= maxSidebarWidth) { + document.documentElement.style.setProperty('--sidebar-width', width + 'px'); + } else if (width > maxSidebarWidth) { + document.documentElement.style.setProperty('--sidebar-width', maxSidebarWidth + 'px'); + } } } } catch (e) { diff --git a/apps/sim/app/llms-full.txt/route.ts b/apps/sim/app/llms-full.txt/route.ts index 47eaedfc6e8..7e89c74c73d 100644 --- a/apps/sim/app/llms-full.txt/route.ts +++ b/apps/sim/app/llms-full.txt/route.ts @@ -3,20 +3,20 @@ import { getBaseUrl } from '@/lib/core/utils/urls' export async function GET() { const baseUrl = getBaseUrl() - const llmsFullContent = `# Sim - AI Agent Workflow Builder + const llmsFullContent = `# Sim — Build AI Agents & Run Your Agentic Workforce -> Sim is an open-source AI agent workflow builder used by 70,000+ developers at startups to Fortune 500 companies. Build and deploy agentic workflows with a visual drag-and-drop canvas. SOC2 and HIPAA compliant. +> Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to deploy and orchestrate agentic workflows. ## Overview -Sim provides a visual interface for building AI agent workflows. Instead of writing code, users drag and drop blocks onto a canvas and connect them to create complex AI automations. Each block represents a step in the workflow - an LLM call, a tool invocation, an API request, or a code execution. +Sim lets teams create agents, workflows, knowledge bases, tables, and docs. Over 100,000 builders use Sim — from startups to Fortune 500 companies. Teams connect their tools and data, build agents that execute real workflows across systems, and manage them with full observability. SOC2 and HIPAA compliant. ## Product Details - **Product Name**: Sim -- **Category**: AI Development Tools / Workflow Automation +- **Category**: AI Agent Platform / Agentic Workflow Orchestration - **Deployment**: Cloud (SaaS) and Self-hosted options -- **Pricing**: Free tier, Pro ($20/month), Team ($40/month), Enterprise (custom) +- **Pricing**: Free tier, Pro ($25/month, 6K credits), Max ($100/month, 25K credits), Team plans available, Enterprise (custom) - **Compliance**: SOC2 Type II, HIPAA compliant ## Core Concepts @@ -66,7 +66,7 @@ Sim supports all major LLM providers: - Amazon Bedrock ### Integrations -100+ pre-built integrations including: +1,000+ pre-built integrations including: - **Communication**: Slack, Discord, Email (Gmail, Outlook), SMS (Twilio) - **Productivity**: Notion, Airtable, Google Sheets, Google Docs - **Development**: GitHub, GitLab, Jira, Linear @@ -81,6 +81,12 @@ Built-in support for: - Semantic search and retrieval - Chunking strategies (fixed size, semantic, recursive) +### Tables +Built-in table creation and management: +- Structured data storage +- Queryable tables for agent workflows +- Native integrations + ### Code Execution - Sandboxed JavaScript/TypeScript execution - Access to npm packages diff --git a/apps/sim/app/llms.txt/route.ts b/apps/sim/app/llms.txt/route.ts index 23e458f626c..e38d203c1b0 100644 --- a/apps/sim/app/llms.txt/route.ts +++ b/apps/sim/app/llms.txt/route.ts @@ -5,16 +5,16 @@ export async function GET() { const llmsContent = `# Sim -> Sim is an open-source AI agent workflow builder. 70,000+ developers at startups to Fortune 500 companies deploy agentic workflows on the Sim platform. SOC2 and HIPAA compliant. +> Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to deploy and orchestrate agentic workflows. -Sim provides a visual drag-and-drop interface for building and deploying AI agent workflows. Connect to 100+ integrations and ship production-ready AI automations. +Sim lets teams create agents, workflows, knowledge bases, tables, and docs. Over 100,000 builders use Sim — from startups to Fortune 500 companies. SOC2 and HIPAA compliant. ## Core Pages -- [Homepage](${baseUrl}): Main landing page with product overview and features +- [Homepage](${baseUrl}): Product overview, features, and pricing - [Templates](${baseUrl}/templates): Pre-built workflow templates to get started quickly - [Changelog](${baseUrl}/changelog): Product updates and release notes -- [Sim Studio Blog](${baseUrl}/studio): Announcements, insights, and guides for AI workflows +- [Sim Studio Blog](${baseUrl}/studio): Announcements, insights, and guides ## Documentation @@ -29,28 +29,31 @@ Sim provides a visual drag-and-drop interface for building and deploying AI agen - **Block**: Individual step (LLM call, tool call, HTTP request, code execution) - **Trigger**: Event or schedule that initiates workflow execution - **Execution**: A single run of a workflow with logs and outputs +- **Knowledge Base**: Vector-indexed document store for retrieval-augmented generation ## Capabilities -- Visual workflow builder with drag-and-drop canvas -- Multi-model LLM orchestration (OpenAI, Anthropic, Google, Mistral, xAI) -- Retrieval-augmented generation (RAG) with vector databases -- 100+ integrations (Slack, Gmail, Notion, Airtable, databases) +- AI agent creation and deployment +- Agentic workflow orchestration +- 1,000+ integrations (Slack, Gmail, Notion, Airtable, databases, and more) +- Multi-model LLM orchestration (OpenAI, Anthropic, Google, Mistral, xAI, Perplexity) +- Knowledge base creation with retrieval-augmented generation (RAG) +- Table creation and management +- Document creation and processing - Scheduled and webhook-triggered executions -- Real-time collaboration and version control ## Use Cases -- AI agent workflow automation -- RAG pipelines and document processing -- Chatbot and copilot workflows for SaaS -- Email and customer support automation +- AI agent deployment and orchestration +- Knowledge bases and RAG pipelines +- Document creation and processing +- Customer support automation - Internal operations (sales, marketing, legal, finance) ## Links - [GitHub Repository](https://github.com/simstudioai/sim): Open-source codebase -- [Discord Community](https://discord.gg/Hr4UWYEcTT): Get help and connect with users +- [Discord Community](https://discord.gg/Hr4UWYEcTT): Get help and connect with 100,000+ builders - [X/Twitter](https://x.com/simdotai): Product updates and announcements ## Optional diff --git a/apps/sim/app/manifest.ts b/apps/sim/app/manifest.ts index bfd0784215e..77c92d0c394 100644 --- a/apps/sim/app/manifest.ts +++ b/apps/sim/app/manifest.ts @@ -5,10 +5,10 @@ export default function manifest(): MetadataRoute.Manifest { const brand = getBrandConfig() return { - name: brand.name === 'Sim' ? 'Sim - AI Agent Workflow Builder' : brand.name, + name: brand.name === 'Sim' ? 'Sim — Build AI Agents & Run Your Agentic Workforce' : brand.name, short_name: brand.name, description: - 'Open-source AI agent workflow builder. 70,000+ developers build and deploy agentic workflows on Sim. Visual drag-and-drop interface for creating AI automations. SOC2 and HIPAA compliant.', + 'Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to orchestrate agentic workflows.', start_url: '/', scope: '/', display: 'standalone', diff --git a/apps/sim/app/page.tsx b/apps/sim/app/page.tsx index 2c533dea8ac..9c3a649fdbd 100644 --- a/apps/sim/app/page.tsx +++ b/apps/sim/app/page.tsx @@ -1,16 +1,18 @@ import type { Metadata } from 'next' import { getBaseUrl } from '@/lib/core/utils/urls' -import Landing from '@/app/(landing)/landing' +import Landing from '@/app/(home)/landing' + +export const dynamic = 'force-dynamic' const baseUrl = getBaseUrl() export const metadata: Metadata = { metadataBase: new URL(baseUrl), - title: 'Sim - AI Agent Workflow Builder | Open Source Platform', + title: 'Sim — Build AI Agents & Run Your Agentic Workforce', description: - 'Open-source AI agent workflow builder used by 70,000+ developers. Build and deploy agentic workflows with a visual drag-and-drop canvas. Connect 100+ apps and ship SOC2 & HIPAA-ready AI automations from startups to Fortune 500.', + 'Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to orchestrate agentic workflows.', keywords: - 'AI agent workflow builder, agentic workflows, open source AI, visual workflow builder, AI automation, LLM workflows, AI agents, workflow automation, no-code AI, SOC2 compliant, HIPAA compliant, enterprise AI', + 'AI agents, agentic workforce, open-source AI agent platform, agentic workflows, LLM orchestration, AI automation, knowledge base, workflow builder, AI integrations, SOC2 compliant, HIPAA compliant, enterprise AI', authors: [{ name: 'Sim' }], creator: 'Sim', publisher: 'Sim', @@ -20,9 +22,9 @@ export const metadata: Metadata = { telephone: false, }, openGraph: { - title: 'Sim - AI Agent Workflow Builder | Open Source', + title: 'Sim — Build AI Agents & Run Your Agentic Workforce', description: - 'Open-source platform used by 70,000+ developers. Design, deploy, and monitor agentic workflows with a visual drag-and-drop interface, 100+ integrations, and enterprise-grade security.', + 'Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to orchestrate agentic workflows. Create agents, workflows, knowledge bases, tables, and docs. Join over 100,000 builders.', type: 'website', url: baseUrl, siteName: 'Sim', @@ -32,7 +34,7 @@ export const metadata: Metadata = { url: '/logo/426-240/primary/small.png', width: 2130, height: 1200, - alt: 'Sim - AI Agent Workflow Builder', + alt: 'Sim — Build AI Agents & Run Your Agentic Workforce', type: 'image/png', }, ], @@ -41,12 +43,12 @@ export const metadata: Metadata = { card: 'summary_large_image', site: '@simdotai', creator: '@simdotai', - title: 'Sim - AI Agent Workflow Builder | Open Source', + title: 'Sim — Build AI Agents & Run Your Agentic Workforce', description: - 'Open-source platform for agentic workflows. 70,000+ developers. Visual builder. 100+ integrations. SOC2 & HIPAA compliant.', + 'Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to orchestrate agentic workflows.', images: { url: '/logo/426-240/primary/small.png', - alt: 'Sim - AI Agent Workflow Builder', + alt: 'Sim — Build AI Agents & Run Your Agentic Workforce', }, }, alternates: { @@ -72,12 +74,14 @@ export const metadata: Metadata = { classification: 'AI Development Tools', referrer: 'origin-when-cross-origin', other: { - 'llm:content-type': 'AI workflow builder, visual programming, no-code AI development', + 'llm:content-type': + 'AI agent platform, agentic workforce, agentic workflows, LLM orchestration', 'llm:use-cases': - 'email automation, Slack bots, Discord moderation, data analysis, customer support, content generation, agentic automations', + 'AI agents, agentic workforce, agentic workflows, knowledge bases, tables, document creation, email automation, Slack bots, data analysis, customer support, content generation', 'llm:integrations': - 'OpenAI, Anthropic, Google AI, Slack, Gmail, Discord, Notion, Airtable, Supabase', - 'llm:pricing': 'free tier available, pro $20/month, team $40/month, enterprise custom', + 'OpenAI, Anthropic, Google AI, Mistral, xAI, Perplexity, Slack, Gmail, Discord, Notion, Airtable, Supabase', + 'llm:pricing': + 'free tier available, pro $25/month, max $100/month, team plans available, enterprise custom', 'llm:region': 'global', 'llm:languages': 'en', }, diff --git a/apps/sim/app/resume/[workflowId]/[executionId]/page.tsx b/apps/sim/app/resume/[workflowId]/[executionId]/page.tsx index bfd2e2fac22..8ad86d3222c 100644 --- a/apps/sim/app/resume/[workflowId]/[executionId]/page.tsx +++ b/apps/sim/app/resume/[workflowId]/[executionId]/page.tsx @@ -1,6 +1,12 @@ +import type { Metadata } from 'next' import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager' import ResumeExecutionPage from '@/app/resume/[workflowId]/[executionId]/resume-page-client' +export const metadata: Metadata = { + title: 'Resume Execution', + robots: { index: false }, +} + export const runtime = 'nodejs' export const dynamic = 'force-dynamic' diff --git a/apps/sim/app/templates/[id]/template.tsx b/apps/sim/app/templates/[id]/template.tsx index e64252e0d65..9bb7f26f7cb 100644 --- a/apps/sim/app/templates/[id]/template.tsx +++ b/apps/sim/app/templates/[id]/template.tsx @@ -19,17 +19,15 @@ import { Breadcrumb, Button, Copy, + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, Popover, PopoverContent, PopoverItem, PopoverTrigger, } from '@/components/emcn' -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuTrigger, -} from '@/components/ui/dropdown-menu' import { Skeleton } from '@/components/ui/skeleton' import { VerifiedBadge } from '@/components/ui/verified-badge' import { useSession } from '@/lib/auth/auth-client' @@ -704,9 +702,9 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template - + {workspaces.length === 0 ? ( - + No workspaces with write access ) : ( @@ -714,11 +712,10 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template handleWorkspaceSelectForEdit(workspace.id)} - className='flex cursor-pointer items-center justify-between' >
- {workspace.name} - + {workspace.name} + {workspace.permissions} access
diff --git a/apps/sim/app/templates/page.tsx b/apps/sim/app/templates/page.tsx index c233949a45f..c74818acd3b 100644 --- a/apps/sim/app/templates/page.tsx +++ b/apps/sim/app/templates/page.tsx @@ -1,11 +1,18 @@ import { db } from '@sim/db' import { permissions, templateCreators, templates, workspace } from '@sim/db/schema' import { and, desc, eq } from 'drizzle-orm' +import type { Metadata } from 'next' import { redirect } from 'next/navigation' import { getSession } from '@/lib/auth' import type { Template } from '@/app/templates/templates' import Templates from '@/app/templates/templates' +export const metadata: Metadata = { + title: 'Templates', + description: + 'Browse pre-built workflow templates to get started quickly with AI agents, automations, and integrations.', +} + /** * Public templates list page. * Redirects authenticated users to their workspace-scoped templates page. diff --git a/apps/sim/app/unsubscribe/page.tsx b/apps/sim/app/unsubscribe/page.tsx index 8f6e6bf6881..d1b3ec2de10 100644 --- a/apps/sim/app/unsubscribe/page.tsx +++ b/apps/sim/app/unsubscribe/page.tsx @@ -1,3 +1,9 @@ +import type { Metadata } from 'next' import Unsubscribe from '@/app/unsubscribe/unsubscribe' +export const metadata: Metadata = { + title: 'Unsubscribe', + robots: { index: false }, +} + export default Unsubscribe diff --git a/apps/sim/app/workspace/[workspaceId]/components/error/error.tsx b/apps/sim/app/workspace/[workspaceId]/components/error/error.tsx new file mode 100644 index 00000000000..524a7f88601 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/error/error.tsx @@ -0,0 +1,64 @@ +'use client' + +import { useEffect } from 'react' +import { createLogger } from '@sim/logger' +import { RefreshCw } from 'lucide-react' +import { Button } from '@/components/emcn' + +interface ErrorAction { + label: string + icon?: React.ReactNode + onClick: () => void + variant?: 'default' | 'ghost' +} + +export interface ErrorStateProps { + error: Error & { digest?: string } + reset: () => void + title: string + description: string + loggerName: string + secondaryAction?: ErrorAction +} + +export function ErrorState({ + error, + reset, + title, + description, + loggerName, + secondaryAction, +}: ErrorStateProps) { + const logger = createLogger(loggerName) + + useEffect(() => { + logger.error(`${loggerName} error:`, { error: error.message, digest: error.digest }) + }, [error, logger, loggerName]) + + return ( +
+
+
+

{title}

+

{description}

+
+
+ {secondaryAction && ( + + )} + +
+
+
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/components/error/index.ts b/apps/sim/app/workspace/[workspaceId]/components/error/index.ts new file mode 100644 index 00000000000..1fdff4534b8 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/error/index.ts @@ -0,0 +1 @@ +export { ErrorState, type ErrorStateProps } from './error' diff --git a/apps/sim/app/workspace/[workspaceId]/components/index.ts b/apps/sim/app/workspace/[workspaceId]/components/index.ts new file mode 100644 index 00000000000..4a08e4f6c79 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/index.ts @@ -0,0 +1,28 @@ +export { ErrorState, type ErrorStateProps } from './error' +export { InlineRenameInput } from './inline-rename-input' +export { ownerCell } from './resource/components/owner-cell/owner-cell' +export type { + BreadcrumbEditing, + BreadcrumbItem, + CreateAction, + DropdownOption, + HeaderAction, +} from './resource/components/resource-header' +export { ResourceHeader } from './resource/components/resource-header' +export type { + ColumnOption, + FilterTag, + SearchConfig, + SortConfig, +} from './resource/components/resource-options-bar' +export { ResourceOptionsBar } from './resource/components/resource-options-bar' +export { timeCell } from './resource/components/time-cell/time-cell' +export type { + PaginationConfig, + ResourceCell, + ResourceColumn, + ResourceRow, + ResourceTableProps, + SelectableConfig, +} from './resource/resource' +export { Resource, ResourceTable } from './resource/resource' diff --git a/apps/sim/app/workspace/[workspaceId]/components/inline-rename-input.tsx b/apps/sim/app/workspace/[workspaceId]/components/inline-rename-input.tsx new file mode 100644 index 00000000000..c4311844752 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/inline-rename-input.tsx @@ -0,0 +1,38 @@ +'use client' + +import { useEffect, useRef } from 'react' + +interface InlineRenameInputProps { + value: string + onChange: (value: string) => void + onSubmit: () => void + onCancel: () => void +} + +export function InlineRenameInput({ value, onChange, onSubmit, onCancel }: InlineRenameInputProps) { + const inputRef = useRef(null) + + useEffect(() => { + const el = inputRef.current + if (el) { + el.focus() + el.select() + } + }, []) + + return ( + onChange(e.target.value)} + onKeyDown={(e) => { + if (e.key === 'Enter') onSubmit() + if (e.key === 'Escape') onCancel() + }} + onBlur={onSubmit} + onClick={(e) => e.stopPropagation()} + className='min-w-0 flex-1 truncate border-0 bg-transparent p-0 font-medium text-[14px] text-[var(--text-body)] outline-none focus:outline-none focus:ring-0' + /> + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/index.ts b/apps/sim/app/workspace/[workspaceId]/components/resource/components/index.ts new file mode 100644 index 00000000000..5b63ad787ff --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/index.ts @@ -0,0 +1,4 @@ +export * from './owner-cell' +export * from './resource-header' +export * from './resource-options-bar' +export * from './time-cell' diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/owner-cell/index.ts b/apps/sim/app/workspace/[workspaceId]/components/resource/components/owner-cell/index.ts new file mode 100644 index 00000000000..fa102e05d3a --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/owner-cell/index.ts @@ -0,0 +1 @@ +export { ownerCell } from './owner-cell' diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/owner-cell/owner-cell.tsx b/apps/sim/app/workspace/[workspaceId]/components/resource/components/owner-cell/owner-cell.tsx new file mode 100644 index 00000000000..26358376026 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/owner-cell/owner-cell.tsx @@ -0,0 +1,43 @@ +'use client' + +import type { ResourceCell } from '@/app/workspace/[workspaceId]/components/resource/resource' +import type { WorkspaceMember } from '@/hooks/queries/workspace' + +function OwnerAvatar({ name, image }: { name: string; image: string | null }) { + if (image) { + return ( + {name} + ) + } + + return ( + + {name.charAt(0).toUpperCase()} + + ) +} + +/** + * Resolves a user ID into a ResourceCell with an avatar icon and display name. + * Returns null label while members are still loading to avoid flashing raw IDs. + */ +export function ownerCell( + userId: string | null | undefined, + members?: WorkspaceMember[] +): ResourceCell { + if (!userId) return { label: null } + if (!members) return { label: null } + + const member = members.find((m) => m.userId === userId) + if (!member) return { label: null } + + return { + icon: , + label: member.name, + } +} diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/index.ts b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/index.ts new file mode 100644 index 00000000000..697f8c9aacc --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/index.ts @@ -0,0 +1,8 @@ +export type { + BreadcrumbEditing, + BreadcrumbItem, + CreateAction, + DropdownOption, + HeaderAction, +} from './resource-header' +export { ResourceHeader } from './resource-header' diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx new file mode 100644 index 00000000000..7c94ac098c1 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx @@ -0,0 +1,214 @@ +import { Fragment, memo } from 'react' +import { + Button, + ChevronDown, + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, + Plus, +} from '@/components/emcn' +import { cn } from '@/lib/core/utils/cn' +import { InlineRenameInput } from '@/app/workspace/[workspaceId]/components/inline-rename-input' + +export interface DropdownOption { + label: string + icon?: React.ElementType + onClick: () => void + disabled?: boolean +} + +export interface BreadcrumbEditing { + isEditing: boolean + value: string + onChange: (value: string) => void + onSubmit: () => void + onCancel: () => void +} + +export interface BreadcrumbItem { + label: string + onClick?: () => void + dropdownItems?: DropdownOption[] + editing?: BreadcrumbEditing +} + +export interface HeaderAction { + label: string + icon?: React.ElementType + onClick: () => void + disabled?: boolean +} + +export interface CreateAction { + label: string + onClick: () => void + disabled?: boolean +} + +interface ResourceHeaderProps { + icon?: React.ElementType + title?: string + breadcrumbs?: BreadcrumbItem[] + create?: CreateAction + actions?: HeaderAction[] +} + +export const ResourceHeader = memo(function ResourceHeader({ + icon: Icon, + title, + breadcrumbs, + create, + actions, +}: ResourceHeaderProps) { + const hasBreadcrumbs = breadcrumbs && breadcrumbs.length > 0 + + return ( +
+
+
+ {hasBreadcrumbs ? ( + breadcrumbs.map((crumb, i) => ( + + {i > 0 && ( + / + )} + + + )) + ) : ( + <> + {Icon && } + {title && ( +

{title}

+ )} + + )} +
+
+ {actions?.map((action) => { + const ActionIcon = action.icon + return ( + + ) + })} + {create && ( + + )} +
+
+
+ ) +}) + +function BreadcrumbSegment({ + icon: Icon, + label, + onClick, + dropdownItems, + editing, +}: { + icon?: React.ElementType + label: string + onClick?: () => void + dropdownItems?: DropdownOption[] + editing?: BreadcrumbEditing +}) { + if (editing?.isEditing) { + return ( + + {Icon && } + + + ) + } + + const content = ( + <> + {Icon && } + {label} + + ) + + if (dropdownItems && dropdownItems.length > 0) { + return ( + + + + + + {dropdownItems.map((item) => { + const ItemIcon = item.icon + return ( + + {ItemIcon && } + {item.label} + + ) + })} + + + ) + } + + if (onClick) { + return ( + + ) + } + + return ( + + {content} + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-options-bar/index.ts b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-options-bar/index.ts new file mode 100644 index 00000000000..ba2be6c912c --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-options-bar/index.ts @@ -0,0 +1,8 @@ +export type { + ColumnOption, + FilterTag, + SearchConfig, + SearchTag, + SortConfig, +} from './resource-options-bar' +export { ResourceOptionsBar } from './resource-options-bar' diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-options-bar/resource-options-bar.tsx b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-options-bar/resource-options-bar.tsx new file mode 100644 index 00000000000..b64349112cd --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-options-bar/resource-options-bar.tsx @@ -0,0 +1,226 @@ +import { memo, type ReactNode } from 'react' +import * as PopoverPrimitive from '@radix-ui/react-popover' +import { + ArrowDown, + ArrowUp, + ArrowUpDown, + Button, + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuSeparator, + DropdownMenuTrigger, + ListFilter, + Search, + X, +} from '@/components/emcn' +import { cn } from '@/lib/core/utils/cn' + +type SortDirection = 'asc' | 'desc' + +export interface ColumnOption { + id: string + label: string + type?: string + icon?: React.ElementType +} + +export interface SortConfig { + options: ColumnOption[] + active: { column: string; direction: SortDirection } | null + onSort: (column: string, direction: SortDirection) => void + onClear?: () => void +} + +export interface FilterTag { + label: string + onRemove: () => void +} + +export interface SearchTag { + label: string + value: string + onRemove: () => void +} + +export interface SearchConfig { + value: string + onChange: (value: string) => void + placeholder?: string + inputRef?: React.RefObject + onKeyDown?: (e: React.KeyboardEvent) => void + onFocus?: () => void + onBlur?: () => void + tags?: SearchTag[] + highlightedTagIndex?: number | null + onClearAll?: () => void + dropdown?: ReactNode + dropdownRef?: React.RefObject +} + +interface ResourceOptionsBarProps { + search?: SearchConfig + sort?: SortConfig + filter?: ReactNode + filterTags?: FilterTag[] + extras?: ReactNode +} + +export const ResourceOptionsBar = memo(function ResourceOptionsBar({ + search, + sort, + filter, + filterTags, + extras, +}: ResourceOptionsBarProps) { + const hasContent = search || sort || filter || extras || (filterTags && filterTags.length > 0) + if (!hasContent) return null + + return ( +
+
+ {search && ( +
+ +
+ {search.tags?.map((tag, i) => ( + + ))} + search.onChange(e.target.value)} + onKeyDown={search.onKeyDown} + onFocus={search.onFocus} + onBlur={search.onBlur} + placeholder={search.tags?.length ? '' : (search.placeholder ?? 'Search...')} + className='min-w-[80px] flex-1 bg-transparent py-[4px] text-[12px] text-[var(--text-secondary)] outline-none placeholder:text-[var(--text-subtle)]' + /> +
+ {search.tags?.length || search.value ? ( + + ) : null} + {search.dropdown && ( +
+ {search.dropdown} +
+ )} +
+ )} +
+ {extras} + {filterTags?.map((tag) => ( + + ))} + {filter && ( + + + + + + + {filter} + + + + )} + {sort && } +
+
+
+ ) +}) + +function SortDropdown({ config }: { config: SortConfig }) { + const { options, active, onSort, onClear } = config + + return ( + + + + + + {options.map((option) => { + const isActive = active?.column === option.id + const Icon = option.icon + const DirectionIcon = isActive ? (active.direction === 'asc' ? ArrowUp : ArrowDown) : null + + return ( + { + if (isActive) { + onSort(option.id, active.direction === 'asc' ? 'desc' : 'asc') + } else { + onSort(option.id, 'desc') + } + }} + > + {Icon && } + {option.label} + {DirectionIcon && ( + + )} + + ) + })} + {active && onClear && ( + <> + + + + Clear sort + + + )} + + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/time-cell/index.ts b/apps/sim/app/workspace/[workspaceId]/components/resource/components/time-cell/index.ts new file mode 100644 index 00000000000..20db143be94 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/time-cell/index.ts @@ -0,0 +1 @@ +export { timeCell } from './time-cell' diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/time-cell/time-cell.ts b/apps/sim/app/workspace/[workspaceId]/components/resource/components/time-cell/time-cell.ts new file mode 100644 index 00000000000..fb970451599 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/time-cell/time-cell.ts @@ -0,0 +1,81 @@ +import type { ResourceCell } from '@/app/workspace/[workspaceId]/components/resource/resource' + +const SECOND = 1000 +const MINUTE = 60 * SECOND +const HOUR = 60 * MINUTE +const DAY = 24 * HOUR + +const ORDINAL_RULES: [number, string][] = [ + [1, 'st'], + [2, 'nd'], + [3, 'rd'], +] + +function ordinalSuffix(day: number): string { + if (day >= 11 && day <= 13) return 'th' + return ORDINAL_RULES.find(([d]) => day % 10 === d)?.[1] ?? 'th' +} + +const MONTH_NAMES = [ + 'January', + 'February', + 'March', + 'April', + 'May', + 'June', + 'July', + 'August', + 'September', + 'October', + 'November', + 'December', +] as const + +function formatFullDate(date: Date): string { + const month = MONTH_NAMES[date.getMonth()] + const day = date.getDate() + const year = date.getFullYear() + return `${month} ${day}${ordinalSuffix(day)}, ${year}` +} + +function pluralize(value: number, unit: string): string { + return `${value} ${unit}${value === 1 ? '' : 's'}` +} + +/** + * Formats a date string into a human-friendly relative time label. + * + * - Within ~1 minute of now: "Now" + * - Under 1 hour: "X minute(s) ago" / "X minute(s)" + * - Under 24 hours: "X hour(s) ago" / "X hour(s)" + * - Under 2 days: "X day(s) ago" / "X day(s)" + * - Beyond 2 days: full date (e.g. "March 4th, 2026") + */ +export function timeCell(dateValue: string | Date | null | undefined): ResourceCell { + if (!dateValue) return { label: null } + + const date = dateValue instanceof Date ? dateValue : new Date(dateValue) + const now = new Date() + const diff = now.getTime() - date.getTime() + const absDiff = Math.abs(diff) + const isPast = diff > 0 + + if (absDiff < MINUTE) return { label: 'Now' } + + if (absDiff < HOUR) { + const minutes = Math.floor(absDiff / MINUTE) + return { label: isPast ? `${pluralize(minutes, 'minute')} ago` : pluralize(minutes, 'minute') } + } + + if (absDiff < DAY) { + const hours = Math.floor(absDiff / HOUR) + return { label: isPast ? `${pluralize(hours, 'hour')} ago` : pluralize(hours, 'hour') } + } + + if (absDiff < 2 * DAY) { + const days = Math.floor(absDiff / DAY) + return { label: isPast ? `${pluralize(days, 'day')} ago` : pluralize(days, 'day') } + } + + return { label: formatFullDate(date) } +} diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/resource.tsx b/apps/sim/app/workspace/[workspaceId]/components/resource/resource.tsx new file mode 100644 index 00000000000..63300948f9f --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/resource.tsx @@ -0,0 +1,552 @@ +'use client' +import { memo, type ReactNode, useCallback, useEffect, useMemo, useRef, useState } from 'react' +import { ChevronLeft, ChevronRight } from 'lucide-react' +import { ArrowDown, ArrowUp, Button, Checkbox, Loader, Plus, Skeleton } from '@/components/emcn' +import { cn } from '@/lib/core/utils/cn' +import type { BreadcrumbItem, CreateAction, HeaderAction } from './components/resource-header' +import { ResourceHeader } from './components/resource-header' +import type { FilterTag, SearchConfig, SortConfig } from './components/resource-options-bar' +import { ResourceOptionsBar } from './components/resource-options-bar' + +export interface ResourceColumn { + id: string + header: string + widthMultiplier?: number +} + +export interface ResourceCell { + icon?: ReactNode + label?: string | null + content?: ReactNode +} + +export interface ResourceRow { + id: string + cells: Record + sortValues?: Record +} + +export interface SelectableConfig { + selectedIds: Set + onSelectRow: (id: string, checked: boolean) => void + onSelectAll: (checked: boolean) => void + isAllSelected: boolean + disabled?: boolean +} + +export interface PaginationConfig { + currentPage: number + totalPages: number + onPageChange: (page: number) => void +} + +interface ResourceProps { + icon: React.ElementType + title: string + breadcrumbs?: BreadcrumbItem[] + create?: CreateAction + search?: SearchConfig + defaultSort?: string + sort?: SortConfig + headerActions?: HeaderAction[] + columns: ResourceColumn[] + rows: ResourceRow[] + selectedRowId?: string | null + selectable?: SelectableConfig + onRowClick?: (rowId: string) => void + onRowHover?: (rowId: string) => void + onRowContextMenu?: (e: React.MouseEvent, rowId: string) => void + isLoading?: boolean + onContextMenu?: (e: React.MouseEvent) => void + filter?: ReactNode + filterTags?: FilterTag[] + extras?: ReactNode + pagination?: PaginationConfig + emptyMessage?: string + overlay?: ReactNode +} + +const EMPTY_CELL_PLACEHOLDER = '- - -' +const SKELETON_ROW_COUNT = 5 + +/** + * Shared page shell for resource list pages (tables, files, knowledge, schedules, logs). + * Renders the header, toolbar with search, and a data table from column/row definitions. + */ +export function Resource({ + icon, + title, + breadcrumbs, + create, + search, + defaultSort, + sort: sortOverride, + headerActions, + columns, + rows, + selectedRowId, + selectable, + onRowClick, + onRowHover, + onRowContextMenu, + isLoading, + onContextMenu, + filter, + filterTags, + extras, + pagination, + emptyMessage, + overlay, +}: ResourceProps) { + return ( +
+ + + +
+ ) +} + +export interface ResourceTableProps { + columns: ResourceColumn[] + rows: ResourceRow[] + defaultSort?: string + sort?: SortConfig + selectedRowId?: string | null + selectable?: SelectableConfig + onRowClick?: (rowId: string) => void + onRowHover?: (rowId: string) => void + onRowContextMenu?: (e: React.MouseEvent, rowId: string) => void + isLoading?: boolean + create?: CreateAction + onLoadMore?: () => void + hasMore?: boolean + isLoadingMore?: boolean + pagination?: PaginationConfig + emptyMessage?: string + overlay?: ReactNode +} + +/** + * Data table body extracted from Resource for independent composition. + * Use directly when rendering a table without the Resource header/toolbar. + */ +export const ResourceTable = memo(function ResourceTable({ + columns, + rows, + defaultSort, + sort: externalSort, + selectedRowId, + selectable, + onRowClick, + onRowHover, + onRowContextMenu, + isLoading, + create, + onLoadMore, + hasMore, + isLoadingMore, + pagination, + emptyMessage, + overlay, +}: ResourceTableProps) { + const headerRef = useRef(null) + const loadMoreRef = useRef(null) + const sortEnabled = defaultSort != null + const [internalSort, setInternalSort] = useState<{ column: string; direction: 'asc' | 'desc' }>({ + column: defaultSort ?? '', + direction: 'desc', + }) + + const handleBodyScroll = useCallback((e: React.UIEvent) => { + if (headerRef.current) { + headerRef.current.scrollLeft = e.currentTarget.scrollLeft + } + }, []) + + const handleSort = useCallback((column: string, direction: 'asc' | 'desc') => { + setInternalSort({ column, direction }) + }, []) + + const displayRows = useMemo(() => { + if (!sortEnabled || externalSort) return rows + return [...rows].sort((a, b) => { + const col = internalSort.column + const aVal = a.sortValues?.[col] ?? a.cells[col]?.label ?? '' + const bVal = b.sortValues?.[col] ?? b.cells[col]?.label ?? '' + const cmp = + typeof aVal === 'number' && typeof bVal === 'number' + ? aVal - bVal + : String(aVal).localeCompare(String(bVal)) + return internalSort.direction === 'asc' ? -cmp : cmp + }) + }, [rows, internalSort, sortEnabled, externalSort]) + + useEffect(() => { + if (!onLoadMore || !hasMore) return + const el = loadMoreRef.current + if (!el) return + const observer = new IntersectionObserver( + ([entry]) => { + if (entry.isIntersecting) onLoadMore() + }, + { rootMargin: '200px' } + ) + observer.observe(el) + return () => observer.disconnect() + }, [onLoadMore, hasMore]) + + const hasCheckbox = selectable != null + const totalColSpan = columns.length + (hasCheckbox ? 1 : 0) + + if (isLoading) { + return ( + + ) + } + + if (rows.length === 0 && emptyMessage) { + return ( +
+ {emptyMessage} +
+ ) + } + + return ( +
+
+ + + + + {hasCheckbox && ( + + )} + {columns.map((col) => { + if (!sortEnabled) { + return ( + + ) + } + const isActive = internalSort.column === col.id + const SortIcon = internalSort.direction === 'asc' ? ArrowUp : ArrowDown + return ( + + ) + })} + + +
+ selectable.onSelectAll(checked as boolean)} + disabled={selectable.disabled} + aria-label='Select all' + /> + + {col.header} + + +
+
+
+ + + + {displayRows.map((row) => { + const isSelected = selectable?.selectedIds.has(row.id) ?? false + return ( + onRowClick?.(row.id)} + onMouseEnter={onRowHover ? () => onRowHover(row.id) : undefined} + onContextMenu={(e) => onRowContextMenu?.(e, row.id)} + > + {hasCheckbox && ( + + )} + {columns.map((col, colIdx) => { + const cell = row.cells[col.id] + return ( + + ) + })} + + ) + })} + {create && ( + + + + )} + +
+ + selectable.onSelectRow(row.id, checked as boolean) + } + disabled={selectable.disabled} + aria-label='Select row' + onClick={(e) => e.stopPropagation()} + /> + + +
+ + + {create.label} + +
+ {hasMore && ( +
+ {isLoadingMore && ( + + )} +
+ )} +
+ {overlay} + {pagination && pagination.totalPages > 1 && ( + + )} +
+ ) +}) + +function Pagination({ + currentPage, + totalPages, + onPageChange, +}: { + currentPage: number + totalPages: number + onPageChange: (page: number) => void +}) { + return ( +
+
+ +
+ {Array.from({ length: Math.min(totalPages, 5) }, (_, i) => { + let page: number + if (totalPages <= 5) { + page = i + 1 + } else if (currentPage <= 3) { + page = i + 1 + } else if (currentPage >= totalPages - 2) { + page = totalPages - 4 + i + } else { + page = currentPage - 2 + i + } + if (page < 1 || page > totalPages) return null + return ( + + ) + })} +
+ +
+
+ ) +} + +function CellContent({ cell, primary }: { cell: ResourceCell; primary?: boolean }) { + if (cell.content) return <>{cell.content} + return ( + + {cell.icon && {cell.icon}} + {cell.label} + + ) +} + +function ResourceColGroup({ + columns, + hasCheckbox, +}: { + columns: ResourceColumn[] + hasCheckbox?: boolean +}) { + return ( + + {hasCheckbox && } + {columns.map((col, colIdx) => ( + + ))} + + ) +} + +function DataTableSkeleton({ + columns, + rowCount, + hasCheckbox, +}: { + columns: ResourceColumn[] + rowCount: number + hasCheckbox?: boolean +}) { + return ( + <> +
+ + + + + {hasCheckbox && ( + + )} + {columns.map((col) => ( + + ))} + + +
+ + +
+ +
+
+
+
+ + + + {Array.from({ length: rowCount }, (_, i) => ( + + {hasCheckbox && ( + + )} + {columns.map((col, colIdx) => ( + + ))} + + ))} + +
+ + + + {colIdx === 0 && } + + +
+
+ + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/error.tsx b/apps/sim/app/workspace/[workspaceId]/error.tsx index 2504830ecb4..c681cf70ee3 100644 --- a/apps/sim/app/workspace/[workspaceId]/error.tsx +++ b/apps/sim/app/workspace/[workspaceId]/error.tsx @@ -1,5 +1,20 @@ 'use client' -import { NextError } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/error' +import { ErrorState } from '@/app/workspace/[workspaceId]/components' -export default NextError +interface WorkspaceErrorProps { + error: Error & { digest?: string } + reset: () => void +} + +export default function WorkspaceError({ error, reset }: WorkspaceErrorProps) { + return ( + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer.tsx b/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer.tsx index fb858aa0d4f..b939d50898d 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer.tsx @@ -13,7 +13,7 @@ export function FileViewer({ file }: FileViewerProps) { const serveUrl = `/api/files/serve/${encodeURIComponent(file.key)}?context=workspace` return ( -
+