diff --git a/.changeset/big-carrots-fail.md b/.changeset/big-carrots-fail.md deleted file mode 100644 index 0dc095499b..0000000000 --- a/.changeset/big-carrots-fail.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Fix dev runs diff --git a/.changeset/blue-eyes-tickle.md b/.changeset/blue-eyes-tickle.md deleted file mode 100644 index ab4ca8b92c..0000000000 --- a/.changeset/blue-eyes-tickle.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -The dev command will now use the platform-provided engine URL diff --git a/.changeset/breezy-turtles-talk.md b/.changeset/breezy-turtles-talk.md deleted file mode 100644 index da208cb2c8..0000000000 --- a/.changeset/breezy-turtles-talk.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -"@trigger.dev/react-hooks": patch -"@trigger.dev/sdk": patch -"trigger.dev": patch -"@trigger.dev/build": patch -"@trigger.dev/core": patch -"@trigger.dev/rsc": patch ---- - -Run Engine 2.0 (alpha) diff --git a/.changeset/cool-elephants-carry.md b/.changeset/cool-elephants-carry.md new file mode 100644 index 0000000000..ab9668e1e7 --- /dev/null +++ b/.changeset/cool-elephants-carry.md @@ -0,0 +1,5 @@ +--- +"trigger.dev": patch +--- + +fix(cli): update command should preserve existing package.json order diff --git a/.changeset/cuddly-boats-press.md b/.changeset/cuddly-boats-press.md deleted file mode 100644 index 7d44263936..0000000000 --- a/.changeset/cuddly-boats-press.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Add external log exporters and fix missing external trace exporters in deployed tasks diff --git a/.changeset/curvy-dogs-share.md b/.changeset/curvy-dogs-share.md deleted file mode 100644 index a0071042aa..0000000000 --- a/.changeset/curvy-dogs-share.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/sdk": patch ---- - -When you create a Waitpoint token using `wait.createToken()` you get a URL back that can be used to complete it by making an HTTP POST request. diff --git a/.changeset/eighty-rings-divide.md b/.changeset/eighty-rings-divide.md deleted file mode 100644 index 193b46ca7b..0000000000 --- a/.changeset/eighty-rings-divide.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/core": patch ---- - -Configurable queue consumer count in supervisor session diff --git a/.changeset/flat-pianos-live.md b/.changeset/flat-pianos-live.md deleted file mode 100644 index c1f915195c..0000000000 --- a/.changeset/flat-pianos-live.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -Runtime agnostic SDK config via env vars diff --git a/.changeset/four-needles-add.md b/.changeset/four-needles-add.md deleted file mode 100644 index 7dd18092df..0000000000 --- a/.changeset/four-needles-add.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -"@trigger.dev/redis-worker": major -"@trigger.dev/react-hooks": major -"@trigger.dev/sdk": major -"trigger.dev": major -"@trigger.dev/python": major -"@trigger.dev/build": major -"@trigger.dev/core": major -"@trigger.dev/rsc": major ---- - -Trigger.dev v4 release. Please see our upgrade to v4 docs to view the full changelog: https://trigger.dev/docs/upgrade-to-v4 diff --git a/.changeset/fuzzy-snakes-beg.md b/.changeset/fuzzy-snakes-beg.md deleted file mode 100644 index f5bd55cdeb..0000000000 --- a/.changeset/fuzzy-snakes-beg.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/core": patch ---- - -Add supervisor http client option to disable debug logs diff --git a/.changeset/gentle-waves-suffer.md b/.changeset/gentle-waves-suffer.md deleted file mode 100644 index d96452b775..0000000000 --- a/.changeset/gentle-waves-suffer.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/sdk": patch ---- - -Fixed an issue with realtime streams that timeout and resume streaming dropping chunks diff --git a/.changeset/gold-insects-invite.md b/.changeset/gold-insects-invite.md deleted file mode 100644 index 7b260ed607..0000000000 --- a/.changeset/gold-insects-invite.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -Expose esbuild `keepNames` option (experimental) diff --git a/.changeset/green-lions-relate.md b/.changeset/green-lions-relate.md deleted file mode 100644 index da2a96b29d..0000000000 --- a/.changeset/green-lions-relate.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/sdk": patch ---- - -The envvars.list() and retrieve() functions receive isSecret for each value. Secret values are always redacted. diff --git a/.changeset/grumpy-wasps-fold.md b/.changeset/grumpy-wasps-fold.md deleted file mode 100644 index 78b74f73b7..0000000000 --- a/.changeset/grumpy-wasps-fold.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -Add `experimental_autoDetectExternal` trigger config option diff --git a/.changeset/hip-cups-wave.md b/.changeset/hip-cups-wave.md deleted file mode 100644 index c21b94e37b..0000000000 --- a/.changeset/hip-cups-wave.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/sdk": patch ---- - -Fix issue where realtime streams would cut off after 5 minutes diff --git a/.changeset/honest-files-decide.md b/.changeset/honest-files-decide.md deleted file mode 100644 index 6bc65f34cc..0000000000 --- a/.changeset/honest-files-decide.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/sdk": patch ---- - -Deprecate toolTask and replace with `ai.tool(mySchemaTask)` diff --git a/.changeset/itchy-frogs-care.md b/.changeset/itchy-frogs-care.md deleted file mode 100644 index 72cb57c867..0000000000 --- a/.changeset/itchy-frogs-care.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Log images sizes for self-hosted deploys diff --git a/.changeset/itchy-games-sort.md b/.changeset/itchy-games-sort.md deleted file mode 100644 index 3f04f68228..0000000000 --- a/.changeset/itchy-games-sort.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"@trigger.dev/sdk": patch -"trigger.dev": patch ---- - -Display clickable links in Cursor terminal diff --git a/.changeset/late-chairs-ring.md b/.changeset/late-chairs-ring.md deleted file mode 100644 index cd7c9f3620..0000000000 --- a/.changeset/late-chairs-ring.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Fix init.ts in custom trigger dirs diff --git a/.changeset/late-dancers-smile.md b/.changeset/late-dancers-smile.md deleted file mode 100644 index 58026740d8..0000000000 --- a/.changeset/late-dancers-smile.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -Add import timings and bundle size analysis, the dev command will now warn about slow imports diff --git a/.changeset/lazy-panthers-shop.md b/.changeset/lazy-panthers-shop.md deleted file mode 100644 index fa622e087e..0000000000 --- a/.changeset/lazy-panthers-shop.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/core": patch ---- - -Improve structured logs diff --git a/.changeset/lazy-plums-fetch.md b/.changeset/lazy-plums-fetch.md deleted file mode 100644 index 515d6f85d0..0000000000 --- a/.changeset/lazy-plums-fetch.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/build": patch ---- - -syncVercelEnvVars() fix for syncing the wrong preview branch env vars diff --git a/.changeset/light-peas-melt.md b/.changeset/light-peas-melt.md deleted file mode 100644 index 52d184b6cc..0000000000 --- a/.changeset/light-peas-melt.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Fix update command version range handling diff --git a/.changeset/moody-squids-count.md b/.changeset/moody-squids-count.md deleted file mode 100644 index e475088102..0000000000 --- a/.changeset/moody-squids-count.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Init command will now correctly install v4-beta packages diff --git a/.changeset/nasty-cobras-wonder.md b/.changeset/nasty-cobras-wonder.md deleted file mode 100644 index 0cd7c417d1..0000000000 --- a/.changeset/nasty-cobras-wonder.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Fix metadata collapsing correctness diff --git a/.changeset/nice-colts-boil.md b/.changeset/nice-colts-boil.md deleted file mode 100644 index bd395ae9dd..0000000000 --- a/.changeset/nice-colts-boil.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Improve warm start times by eagerly creating the child TaskRunProcess when a previous run as completed diff --git a/.changeset/ninety-games-grow.md b/.changeset/ninety-games-grow.md deleted file mode 100644 index df22eff4ee..0000000000 --- a/.changeset/ninety-games-grow.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -- Resolve issue where CLI could get stuck during deploy finalization -- Unify local and remote build logic, with multi-platform build support -- Improve switch command; now accepts profile name as an argument -- Registry configuration is now fully managed by the webapp -- The deploy `--self-hosted` flag is no longer required -- Enhance deployment error reporting and image digest retrieval diff --git a/.changeset/orange-rocks-grow.md b/.changeset/orange-rocks-grow.md deleted file mode 100644 index c8a6bdaa23..0000000000 --- a/.changeset/orange-rocks-grow.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Fix init.ts detection when using the sentry esbuild plugin diff --git a/.changeset/plenty-dolphins-act.md b/.changeset/plenty-dolphins-act.md deleted file mode 100644 index 59d2c7fc44..0000000000 --- a/.changeset/plenty-dolphins-act.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -- Correctly resolve waitpoints that come in early -- Ensure correct state before requesting suspension -- Fix race conditions in snapshot processing diff --git a/.changeset/polite-impalas-care.md b/.changeset/polite-impalas-care.md deleted file mode 100644 index 134ff3dd4a..0000000000 --- a/.changeset/polite-impalas-care.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Fixes runLimiter check on #dequeueRuns diff --git a/.changeset/polite-lies-fix.md b/.changeset/polite-lies-fix.md deleted file mode 100644 index 6e60a77604..0000000000 --- a/.changeset/polite-lies-fix.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Update nypm package to support test-based bun.lock files diff --git a/.changeset/pre.json b/.changeset/pre.json deleted file mode 100644 index d41de9b5b0..0000000000 --- a/.changeset/pre.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "mode": "pre", - "tag": "v4-beta", - "initialVersions": { - "coordinator": "0.0.1", - "docker-provider": "0.0.1", - "kubernetes-provider": "0.0.1", - "supervisor": "0.0.1", - "webapp": "1.0.0", - "@trigger.dev/build": "3.3.17", - "trigger.dev": "3.3.17", - "@trigger.dev/core": "3.3.17", - "@trigger.dev/python": "3.3.17", - "@trigger.dev/react-hooks": "3.3.17", - "@trigger.dev/redis-worker": "3.3.17", - "@trigger.dev/rsc": "3.3.17", - "@trigger.dev/sdk": "3.3.17" - }, - "changesets": [ - "big-carrots-fail", - "blue-eyes-tickle", - "breezy-turtles-talk", - "cuddly-boats-press", - "curvy-dogs-share", - "eighty-rings-divide", - "flat-pianos-live", - "four-needles-add", - "fuzzy-snakes-beg", - "gentle-waves-suffer", - "gold-insects-invite", - "green-lions-relate", - "grumpy-wasps-fold", - "hip-cups-wave", - "honest-files-decide", - "itchy-frogs-care", - "itchy-games-sort", - "late-chairs-ring", - "late-dancers-smile", - "lazy-panthers-shop", - "lazy-plums-fetch", - "light-peas-melt", - "moody-squids-count", - "nasty-cobras-wonder", - "nice-colts-boil", - "ninety-games-grow", - "orange-rocks-grow", - "plenty-dolphins-act", - "polite-impalas-care", - "polite-lies-fix", - "rare-beds-accept", - "real-rats-drop", - "red-chairs-begin", - "red-wasps-cover", - "shiny-kiwis-beam", - "silly-cows-serve", - "silly-timers-repair", - "small-dancers-smell", - "smart-coins-hammer", - "smooth-planets-flow", - "sour-mirrors-accept", - "spotty-ducks-punch", - "spotty-pants-wink", - "sweet-dolphins-invent", - "tender-jobs-collect", - "thick-bikes-laugh", - "tidy-books-smell", - "tiny-buckets-teach", - "tricky-houses-invite", - "twelve-actors-hide", - "two-tigers-dream", - "weak-jobs-hide", - "weak-parents-sip", - "wet-deers-think", - "wet-steaks-reflect", - "wild-mirrors-return", - "witty-cherries-tan", - "witty-donkeys-unite" - ] -} diff --git a/.changeset/rare-beds-accept.md b/.changeset/rare-beds-accept.md deleted file mode 100644 index dccd97a96a..0000000000 --- a/.changeset/rare-beds-accept.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/core": patch ---- - -Add verbose structured log level diff --git a/.changeset/real-rats-drop.md b/.changeset/real-rats-drop.md deleted file mode 100644 index 953794afd4..0000000000 --- a/.changeset/real-rats-drop.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/sdk": patch ---- - -Add onCancel lifecycle hook diff --git a/.changeset/red-chairs-begin.md b/.changeset/red-chairs-begin.md deleted file mode 100644 index e54857e552..0000000000 --- a/.changeset/red-chairs-begin.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Added AI assistance link when you have build errors diff --git a/.changeset/red-wasps-cover.md b/.changeset/red-wasps-cover.md deleted file mode 100644 index 035e7549fa..0000000000 --- a/.changeset/red-wasps-cover.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/core": patch ---- - -Suppress external instrumentation for fetch calls from ApiClient diff --git a/.changeset/shiny-kiwis-beam.md b/.changeset/shiny-kiwis-beam.md deleted file mode 100644 index c01b131162..0000000000 --- a/.changeset/shiny-kiwis-beam.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Handle flush errors gracefully in dev diff --git a/.changeset/silly-cows-serve.md b/.changeset/silly-cows-serve.md deleted file mode 100644 index d655dbed80..0000000000 --- a/.changeset/silly-cows-serve.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Added support for Preview branches in v4 projects diff --git a/.changeset/silly-timers-repair.md b/.changeset/silly-timers-repair.md deleted file mode 100644 index 711fbc7f2b..0000000000 --- a/.changeset/silly-timers-repair.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Can now set project ref using the TRIGGER_PROJECT_REF env var diff --git a/.changeset/small-birds-arrive.md b/.changeset/small-birds-arrive.md deleted file mode 100644 index cf1039b83e..0000000000 --- a/.changeset/small-birds-arrive.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -"@trigger.dev/react-hooks": patch ---- - -Added the ability to specify a "createdAt" filter when subscribing to tags in our useRealtime hooks: - -```tsx -// Only subscribe to runs created in the last 10 hours -useRealtimeRunWithTags("my-tag", { createdAt: "10h" }) -``` - -You can also now choose to skip subscribing to specific columns by specifying the `skipColumns` option: - -```tsx -useRealtimeRun(run.id, { skipColumns: ["usageDurationMs"] }); -``` diff --git a/.changeset/small-dancers-smell.md b/.changeset/small-dancers-smell.md deleted file mode 100644 index ff9b68c00f..0000000000 --- a/.changeset/small-dancers-smell.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"@trigger.dev/sdk": patch -"@trigger.dev/core": patch ---- - -Improve metadata flushing efficiency by collapsing operations diff --git a/.changeset/smart-coins-hammer.md b/.changeset/smart-coins-hammer.md deleted file mode 100644 index bea810c6ca..0000000000 --- a/.changeset/smart-coins-hammer.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/core": patch ---- - -fix: Realtime streams: prevent enqueuing into closed ReadableStream diff --git a/.changeset/smooth-planets-flow.md b/.changeset/smooth-planets-flow.md deleted file mode 100644 index 708932fcca..0000000000 --- a/.changeset/smooth-planets-flow.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Update profile switcher diff --git a/.changeset/sour-mirrors-accept.md b/.changeset/sour-mirrors-accept.md deleted file mode 100644 index 34084228ca..0000000000 --- a/.changeset/sour-mirrors-accept.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -Improve usage flushing diff --git a/.changeset/spotty-ducks-punch.md b/.changeset/spotty-ducks-punch.md deleted file mode 100644 index f6c0298011..0000000000 --- a/.changeset/spotty-ducks-punch.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -fix: default machine config indexing now works diff --git a/.changeset/spotty-pants-wink.md b/.changeset/spotty-pants-wink.md deleted file mode 100644 index 7021ecc8fa..0000000000 --- a/.changeset/spotty-pants-wink.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -Prevent large outputs from overwriting each other diff --git a/.changeset/sweet-dolphins-invent.md b/.changeset/sweet-dolphins-invent.md deleted file mode 100644 index df758a89e9..0000000000 --- a/.changeset/sweet-dolphins-invent.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Always print full deploy logs in CI diff --git a/.changeset/tender-jobs-collect.md b/.changeset/tender-jobs-collect.md deleted file mode 100644 index 829c628b6d..0000000000 --- a/.changeset/tender-jobs-collect.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -TriggerApiError 4xx errors will no longer cause tasks to be retried diff --git a/.changeset/thick-bikes-laugh.md b/.changeset/thick-bikes-laugh.md deleted file mode 100644 index 7166bfca64..0000000000 --- a/.changeset/thick-bikes-laugh.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/build": patch ---- - -Add ffmpeg v7 support to existing extension: `ffmpeg({ version: "7" })` diff --git a/.changeset/tidy-books-smell.md b/.changeset/tidy-books-smell.md deleted file mode 100644 index b8ecf87f55..0000000000 --- a/.changeset/tidy-books-smell.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -- Fix polling interval reset bug that could create duplicate intervals -- Protect against unexpected attempt number changes -- Prevent run execution zombies after warm starts \ No newline at end of file diff --git a/.changeset/tiny-buckets-teach.md b/.changeset/tiny-buckets-teach.md deleted file mode 100644 index fdf3ae3a94..0000000000 --- a/.changeset/tiny-buckets-teach.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Fix stalled run detection diff --git a/.changeset/tricky-houses-invite.md b/.changeset/tricky-houses-invite.md deleted file mode 100644 index e21e7b5818..0000000000 --- a/.changeset/tricky-houses-invite.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -Managed run controller performance and reliability improvements diff --git a/.changeset/twelve-actors-hide.md b/.changeset/twelve-actors-hide.md deleted file mode 100644 index 7187b92ab0..0000000000 --- a/.changeset/twelve-actors-hide.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Fix init.ts auto-import for deployed workers diff --git a/.changeset/two-tigers-dream.md b/.changeset/two-tigers-dream.md deleted file mode 100644 index b4fee01cbe..0000000000 --- a/.changeset/two-tigers-dream.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/sdk": patch ---- - -maintain proper context in metadata.root and parent getters diff --git a/.changeset/weak-jobs-hide.md b/.changeset/weak-jobs-hide.md deleted file mode 100644 index 0be1f49588..0000000000 --- a/.changeset/weak-jobs-hide.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -"@trigger.dev/sdk": patch -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -v4: New lifecycle hooks diff --git a/.changeset/weak-parents-sip.md b/.changeset/weak-parents-sip.md deleted file mode 100644 index fb8589baea..0000000000 --- a/.changeset/weak-parents-sip.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Output esbuild metafile, can be inspected after `deploy --dry run` diff --git a/.changeset/wet-deers-think.md b/.changeset/wet-deers-think.md deleted file mode 100644 index 9002d7b94f..0000000000 --- a/.changeset/wet-deers-think.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -Fix QUEUED status snapshot handler diff --git a/.changeset/wet-steaks-reflect.md b/.changeset/wet-steaks-reflect.md deleted file mode 100644 index 3a77741689..0000000000 --- a/.changeset/wet-steaks-reflect.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -If you pass a directory when calling deploy we validate it exists and give helpful hints diff --git a/.changeset/wicked-ads-walk.md b/.changeset/wicked-ads-walk.md deleted file mode 100644 index c9190c709f..0000000000 --- a/.changeset/wicked-ads-walk.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"@trigger.dev/react-hooks": patch -"@trigger.dev/core": patch ---- - -Fixes an issue with realtime when re-subscribing to a run, that would temporarily display stale data and the changes. Now when re-subscribing to a run only the latest changes will be vended diff --git a/.changeset/wild-mirrors-return.md b/.changeset/wild-mirrors-return.md deleted file mode 100644 index baee7565a3..0000000000 --- a/.changeset/wild-mirrors-return.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"trigger.dev": patch -"@trigger.dev/core": patch ---- - -Expose esbuild `minify` option (experimental) diff --git a/.changeset/witty-cherries-tan.md b/.changeset/witty-cherries-tan.md deleted file mode 100644 index 062f1c68de..0000000000 --- a/.changeset/witty-cherries-tan.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"trigger.dev": patch ---- - -Fix `syncEnvVars` for non-preview deployments diff --git a/.changeset/witty-donkeys-unite.md b/.changeset/witty-donkeys-unite.md deleted file mode 100644 index f1a17eb7ff..0000000000 --- a/.changeset/witty-donkeys-unite.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@trigger.dev/build": patch ---- - -Add playwright extension diff --git a/.cursor/commands/deslop.md b/.cursor/commands/deslop.md new file mode 100644 index 0000000000..d82835663f --- /dev/null +++ b/.cursor/commands/deslop.md @@ -0,0 +1,11 @@ +# Remove AI code slop + +Check the diff against main, and remove all AI generated slop introduced in this branch. + +This includes: +- Extra comments that a human wouldn't add or is inconsistent with the rest of the file +- Extra defensive checks or try/catch blocks that are abnormal for that area of the codebase (especially if called by trusted / validated codepaths) +- Casts to any to get around type issues +- Any other style that is inconsistent with the file + +Report at the end with only a 1-3 sentence summary of what you changed \ No newline at end of file diff --git a/.cursor/mcp.json b/.cursor/mcp.json index 9b3221784d..da39e4ffaf 100644 --- a/.cursor/mcp.json +++ b/.cursor/mcp.json @@ -1,7 +1,3 @@ { - "mcpServers": { - "trigger.dev": { - "url": "http://localhost:3333/sse" - } - } -} \ No newline at end of file + "mcpServers": {} +} diff --git a/.cursor/rules/migrations.mdc b/.cursor/rules/migrations.mdc new file mode 100644 index 0000000000..370c87c051 --- /dev/null +++ b/.cursor/rules/migrations.mdc @@ -0,0 +1,6 @@ +--- +description: how to create and apply database migrations +alwaysApply: false +--- + +Follow our [migrations.md](mdc:ai/references/migrations.md) guide for how to create and apply database migrations. diff --git a/.cursor/rules/webapp.mdc b/.cursor/rules/webapp.mdc index 6cda973951..a362f14fe1 100644 --- a/.cursor/rules/webapp.mdc +++ b/.cursor/rules/webapp.mdc @@ -6,7 +6,7 @@ alwaysApply: false The main trigger.dev webapp, which powers it's API and dashboard and makes up the docker image that is produced as an OSS image, is a Remix 2.1.0 app that uses an express server, written in TypeScript. The following subsystems are either included in the webapp or are used by the webapp in another part of the monorepo: -- `@trigger.dev/database` exports a Prisma 5.4.1 client that is used extensively in the webapp to access a PostgreSQL instance. The schema file is [schema.prisma](mdc:internal-packages/database/prisma/schema.prisma) +- `@trigger.dev/database` exports a Prisma 6.14.0 client that is used extensively in the webapp to access a PostgreSQL instance. The schema file is [schema.prisma](mdc:internal-packages/database/prisma/schema.prisma) - `@trigger.dev/core` is a published package and is used to share code between the `@trigger.dev/sdk` and the webapp. It includes functionality but also a load of Zod schemas for data validation. When importing from `@trigger.dev/core` in the webapp, we never import the root `@trigger.dev/core` path, instead we favor one of the subpath exports that you can find in [package.json](mdc:packages/core/package.json) - `@internal/run-engine` has all the code needed to trigger a run and take it through it's lifecycle to completion. - `@trigger.dev/redis-worker` is a custom redis based background job/worker system that's used in the webapp and also used inside the run engine. @@ -31,7 +31,10 @@ We originally the Trigger.dev "Run Engine" not as a single system, but just spre - The batch trigger API endpoint is [api.v1.tasks.batch.ts](mdc:apps/webapp/app/routes/api.v1.tasks.batch.ts) - Setup code for the prisma client is in [db.server.ts](mdc:apps/webapp/app/db.server.ts) - The run engine is configured in [runEngine.server.ts](mdc:apps/webapp/app/v3/runEngine.server.ts) -- All the "services" that are found in app/v3/services/**/*.server.ts +- All the "services" that are found in app/v3/services/\*_/_.server.ts - The code for the TaskEvent data, which is the otel data sent from tasks to our servers, is in both the [eventRepository.server.ts](mdc:apps/webapp/app/v3/eventRepository.server.ts) and also the [otlpExporter.server.ts](mdc:apps/webapp/app/v3/otlpExporter.server.ts). The otel endpoints which are hit from production and development otel exporters is [otel.v1.logs.ts](mdc:apps/webapp/app/routes/otel.v1.logs.ts) and [otel.v1.traces.ts](mdc:apps/webapp/app/routes/otel.v1.traces.ts) -- We use "presenters" to move more complex loader code into a class, and you can find those are app/v3/presenters/**/*.server.ts +- We use "presenters" to move more complex loader code into a class, and you can find those are app/v3/presenters/\*_/_.server.ts +- All the "services" that are found in app/v3/services/\*_/_.server.ts +- The code for the TaskEvent data, which is the otel data sent from tasks to our servers, is in both the [eventRepository.server.ts](mdc:apps/webapp/app/v3/eventRepository.server.ts) and also the [otlpExporter.server.ts](mdc:apps/webapp/app/v3/otlpExporter.server.ts). The otel endpoints which are hit from production and development otel exporters is [otel.v1.logs.ts](mdc:apps/webapp/app/routes/otel.v1.logs.ts) and [otel.v1.traces.ts](mdc:apps/webapp/app/routes/otel.v1.traces.ts) +- We use "presenters" to move more complex loader code into a class, and you can find those are app/v3/presenters/\*_/_.server.ts diff --git a/.cursor/rules/writing-tasks.mdc b/.cursor/rules/writing-tasks.mdc index 6090b85f09..5116d083e2 100644 --- a/.cursor/rules/writing-tasks.mdc +++ b/.cursor/rules/writing-tasks.mdc @@ -431,28 +431,6 @@ export async function POST(request: Request) { } ``` -### tasks.triggerAndPoll() - -Triggers a task and polls until completion. Not recommended for web requests as it blocks until the run completes. Consider using Realtime docs for better alternatives. - -```ts -import { tasks } from "@trigger.dev/sdk/v3"; -import type { emailSequence } from "~/trigger/emails"; - -export async function POST(request: Request) { - const data = await request.json(); - const result = await tasks.triggerAndPoll( - "email-sequence", - { - to: data.email, - name: data.name, - }, - { pollIntervalMs: 5000 } - ); - return Response.json(result); -} -``` - ### batch.trigger() Triggers multiple runs of different tasks at once, useful when you need to execute multiple tasks simultaneously. diff --git a/.dockerignore b/.dockerignore index d3f8720a9e..a3ea4db8ee 100644 --- a/.dockerignore +++ b/.dockerignore @@ -16,6 +16,8 @@ **/dist **/node_modules +**/generated/prisma + apps/webapp/build apps/webapp/public/build diff --git a/.env.example b/.env.example index cf1245b434..35c8c976ff 100644 --- a/.env.example +++ b/.env.example @@ -13,6 +13,14 @@ APP_ORIGIN=http://localhost:3030 ELECTRIC_ORIGIN=http://localhost:3060 NODE_ENV=development +# Clickhouse +CLICKHOUSE_URL=http://default:password@localhost:8123 +RUN_REPLICATION_CLICKHOUSE_URL=http://default:password@localhost:8123 +RUN_REPLICATION_ENABLED=1 + +# Set this to UTC because Node.js uses the system timezone +TZ="UTC" + # Redis is used for the v3 queuing and v2 concurrency control REDIS_HOST="localhost" REDIS_PORT="6379" @@ -26,9 +34,9 @@ DEPLOY_REGISTRY_HOST=localhost:5000 # OPTIONAL VARIABLES # This is used for validating emails that are allowed to log in. Every email that do not match this regex will be rejected. -# WHITELISTED_EMAILS="authorized@yahoo\.com|authorized@gmail\.com" +# WHITELISTED_EMAILS="^(authorized@yahoo\.com|authorized@gmail\.com)$" # Accounts with these emails will get global admin rights. This grants access to the admin UI. -# ADMIN_EMAILS="admin@example\.com|another-admin@example\.com" +# ADMIN_EMAILS="^(admin@example\.com|another-admin@example\.com)$" # This is used for logging in via GitHub. You can leave these commented out if you don't want to use GitHub for authentication. # AUTH_GITHUB_CLIENT_ID= # AUTH_GITHUB_CLIENT_SECRET= @@ -77,4 +85,10 @@ POSTHOG_PROJECT_KEY= # These control the server-side internal telemetry # INTERNAL_OTEL_TRACE_EXPORTER_URL= # INTERNAL_OTEL_TRACE_LOGGING_ENABLED=1 -# INTERNAL_OTEL_TRACE_INSTRUMENT_PRISMA_ENABLED=0, +# INTERNAL_OTEL_TRACE_INSTRUMENT_PRISMA_ENABLED=0 + +# Enable local observability stack (requires `pnpm run docker` to start otel-collector) +# Uncomment these to send metrics to the local Prometheus via OTEL Collector: +# INTERNAL_OTEL_METRIC_EXPORTER_ENABLED=1 +# INTERNAL_OTEL_METRIC_EXPORTER_URL=http://localhost:4318/v1/metrics +# INTERNAL_OTEL_METRIC_EXPORTER_INTERVAL_MS=15000 \ No newline at end of file diff --git a/.github/workflows/changesets-pr.yml b/.github/workflows/changesets-pr.yml new file mode 100644 index 0000000000..ec21972361 --- /dev/null +++ b/.github/workflows/changesets-pr.yml @@ -0,0 +1,102 @@ +name: ๐Ÿฆ‹ Changesets PR + +on: + push: + branches: + - main + paths: + - "packages/**" + - ".changeset/**" + - "package.json" + - "pnpm-lock.yaml" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + release-pr: + name: Create Release PR + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + if: github.repository == 'triggerdotdev/trigger.dev' + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + + - name: Setup node + uses: buildjet/setup-node@v4 + with: + node-version: 20.19.0 + cache: "pnpm" + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Create release PR + id: changesets + uses: changesets/action@v1 + with: + version: pnpm run changeset:version + commit: "chore: release" + title: "chore: release" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Update PR title with version + if: steps.changesets.outputs.published != 'true' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + PR_NUMBER=$(gh pr list --head changeset-release/main --json number --jq '.[0].number') + if [ -n "$PR_NUMBER" ]; then + git fetch origin changeset-release/main + # we arbitrarily reference the version of the cli package here; it is the same for all package releases + VERSION=$(git show origin/changeset-release/main:packages/cli-v3/package.json | jq -r '.version') + gh pr edit "$PR_NUMBER" --title "chore: release v$VERSION" + fi + + update-lockfile: + name: Update lockfile on release PR + runs-on: ubuntu-latest + needs: release-pr + permissions: + contents: write + steps: + - name: Checkout release branch + uses: actions/checkout@v4 + with: + ref: changeset-release/main + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.23.0 + + - name: Setup node + uses: buildjet/setup-node@v4 + with: + node-version: 20.19.0 + + - name: Install and update lockfile + run: pnpm install --no-frozen-lockfile + + - name: Commit and push lockfile + run: | + set -e + if git diff --quiet pnpm-lock.yaml; then + echo "No lockfile changes" + else + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git add pnpm-lock.yaml + git commit -m "chore: update lockfile for release" + git push origin changeset-release/main + fi diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index db78308a43..97170c2225 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -31,12 +31,12 @@ jobs: - name: โŽ” Setup pnpm uses: pnpm/action-setup@v4 with: - version: 8.15.5 + version: 10.23.0 - name: โŽ” Setup node uses: buildjet/setup-node@v4 with: - node-version: 20.11.1 + node-version: 20.19.0 - name: ๐Ÿ“ฅ Download deps run: pnpm install --frozen-lockfile --filter trigger.dev... diff --git a/.github/workflows/pr_checks.yml b/.github/workflows/pr_checks.yml index b00475ebfa..b6be1eddfa 100644 --- a/.github/workflows/pr_checks.yml +++ b/.github/workflows/pr_checks.yml @@ -5,6 +5,7 @@ on: types: [opened, synchronize, reopened] paths-ignore: - "docs/**" + - ".changeset/**" concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} diff --git a/.github/workflows/publish-webapp.yml b/.github/workflows/publish-webapp.yml index ed5a259a85..6fcc30209a 100644 --- a/.github/workflows/publish-webapp.yml +++ b/.github/workflows/publish-webapp.yml @@ -86,3 +86,8 @@ jobs: BUILD_GIT_SHA=${{ steps.set_build_info.outputs.BUILD_GIT_SHA }} BUILD_GIT_REF_NAME=${{ steps.set_build_info.outputs.BUILD_GIT_REF_NAME }} BUILD_TIMESTAMP_SECONDS=${{ steps.set_build_info.outputs.BUILD_TIMESTAMP_SECONDS }} + SENTRY_RELEASE=${{ steps.set_build_info.outputs.BUILD_GIT_SHA }} + SENTRY_ORG=triggerdev + SENTRY_PROJECT=trigger-cloud + secrets: | + sentry_auth_token=${{ secrets.SENTRY_AUTH_TOKEN }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 3acda8a7f0..6213499c5a 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,6 +1,7 @@ name: ๐Ÿš€ Publish Trigger.dev Docker on: + workflow_dispatch: workflow_call: inputs: image_tag: diff --git a/.github/workflows/release-helm.yml b/.github/workflows/release-helm.yml new file mode 100644 index 0000000000..c6efd382ff --- /dev/null +++ b/.github/workflows/release-helm.yml @@ -0,0 +1,143 @@ +name: ๐Ÿงญ Helm Chart Release + +on: + push: + tags: + - 'helm-v*' + workflow_dispatch: + inputs: + chart_version: + description: 'Chart version to release' + required: true + type: string + +env: + REGISTRY: ghcr.io + CHART_NAME: trigger + +jobs: + lint-and-test: + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Helm + uses: azure/setup-helm@v4 + with: + version: "3.18.3" + + - name: Build dependencies + run: helm dependency build ./hosting/k8s/helm/ + + - name: Extract dependency charts + run: | + cd ./hosting/k8s/helm/ + for file in ./charts/*.tgz; do echo "Extracting $file"; tar -xzf "$file" -C ./charts; done + + - name: Lint Helm Chart + run: | + helm lint ./hosting/k8s/helm/ + + - name: Render templates + run: | + helm template test-release ./hosting/k8s/helm/ \ + --values ./hosting/k8s/helm/values.yaml \ + --output-dir ./helm-output + + - name: Validate manifests + uses: docker://ghcr.io/yannh/kubeconform:v0.7.0 + with: + entrypoint: '/kubeconform' + args: "-summary -output json ./helm-output" + + release: + needs: lint-and-test + runs-on: ubuntu-latest + permissions: + contents: write # for gh-release + packages: write + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Helm + uses: azure/setup-helm@v4 + with: + version: "3.18.3" + + - name: Build dependencies + run: helm dependency build ./hosting/k8s/helm/ + + - name: Extract dependency charts + run: | + cd ./hosting/k8s/helm/ + for file in ./charts/*.tgz; do echo "Extracting $file"; tar -xzf "$file" -C ./charts; done + + - name: Log in to Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract version from tag or input + id: version + run: | + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + VERSION="${{ github.event.inputs.chart_version }}" + else + VERSION="${{ github.ref_name }}" + VERSION="${VERSION#helm-v}" + fi + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "Releasing version: $VERSION" + + - name: Check Chart.yaml version matches release version + run: | + VERSION="${{ steps.version.outputs.version }}" + CHART_VERSION=$(grep '^version:' ./hosting/k8s/helm/Chart.yaml | awk '{print $2}') + echo "Chart.yaml version: $CHART_VERSION" + echo "Release version: $VERSION" + if [ "$CHART_VERSION" != "$VERSION" ]; then + echo "โŒ Chart.yaml version does not match release version!" + exit 1 + fi + echo "โœ… Chart.yaml version matches release version." + + - name: Package Helm Chart + run: | + helm package ./hosting/k8s/helm/ --destination /tmp/ + + - name: Push Helm Chart to GHCR + run: | + VERSION="${{ steps.version.outputs.version }}" + CHART_PACKAGE="/tmp/${{ env.CHART_NAME }}-${VERSION}.tgz" + + # Push to GHCR OCI registry + helm push "$CHART_PACKAGE" "oci://${{ env.REGISTRY }}/${{ github.repository_owner }}/charts" + + - name: Create GitHub Release + id: release + uses: softprops/action-gh-release@v1 + if: github.event_name == 'push' + with: + tag_name: ${{ github.ref_name }} + name: "Helm Chart ${{ steps.version.outputs.version }}" + body: | + ### Installation + ```bash + helm upgrade --install trigger \ + oci://${{ env.REGISTRY }}/${{ github.repository_owner }}/charts/${{ env.CHART_NAME }} \ + --version "${{ steps.version.outputs.version }}" + ``` + + ### Changes + See commit history for detailed changes in this release. + files: | + /tmp/${{ env.CHART_NAME }}-${{ steps.version.outputs.version }}.tgz + token: ${{ secrets.GITHUB_TOKEN }} + draft: true + prerelease: true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 686240aaef..684d36dec3 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,98 +1,185 @@ name: ๐Ÿฆ‹ Changesets Release -permissions: - contents: write - on: - push: + pull_request: + types: [closed] branches: - main - paths-ignore: - - "docs/**" - - "**.md" - - ".github/CODEOWNERS" - - ".github/ISSUE_TEMPLATE/**" + workflow_dispatch: + inputs: + type: + description: "Select release type" + required: true + type: choice + options: + - release + - prerelease + default: "prerelease" + ref: + description: "The ref (branch, tag, or SHA) to checkout and release from" + required: true + type: string + prerelease_tag: + description: "The npm dist-tag for the prerelease (e.g., 'v4-prerelease')" + required: false + type: string + default: "prerelease" concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true + group: ${{ github.workflow }} + cancel-in-progress: false jobs: + show-release-summary: + name: ๐Ÿ“‹ Release Summary + runs-on: ubuntu-latest + if: | + github.repository == 'triggerdotdev/trigger.dev' && + github.event_name == 'pull_request' && + github.event.pull_request.merged == true && + github.event.pull_request.head.ref == 'changeset-release/main' + steps: + - name: Show release summary + env: + PR_BODY: ${{ github.event.pull_request.body }} + run: | + echo "$PR_BODY" | sed -n '/^# Releases/,$p' >> $GITHUB_STEP_SUMMARY + release: - name: ๐Ÿฆ‹ Changesets Release + name: ๐Ÿš€ Release npm packages runs-on: ubuntu-latest + environment: npm-publish permissions: contents: write packages: write - pull-requests: write - if: github.repository == 'triggerdotdev/trigger.dev' + id-token: write + if: | + github.repository == 'triggerdotdev/trigger.dev' && + ( + (github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'release') || + (github.event_name == 'pull_request' && github.event.pull_request.merged == true && github.event.pull_request.head.ref == 'changeset-release/main') + ) outputs: published: ${{ steps.changesets.outputs.published }} published_packages: ${{ steps.changesets.outputs.publishedPackages }} published_package_version: ${{ steps.get_version.outputs.package_version }} steps: - - name: โฌ‡๏ธ Checkout repo + - name: Checkout repo uses: actions/checkout@v4 with: fetch-depth: 0 + ref: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.ref || github.sha }} - - name: โŽ” Setup pnpm + - name: Verify ref is on main + if: github.event_name == 'workflow_dispatch' + run: | + if ! git merge-base --is-ancestor ${{ github.event.inputs.ref }} origin/main; then + echo "Error: ref must be an ancestor of main (i.e., already merged)" + exit 1 + fi + + - name: Setup pnpm uses: pnpm/action-setup@v4 with: - version: 8.15.5 + version: 10.23.0 - - name: โŽ” Setup node + - name: Setup node uses: buildjet/setup-node@v4 with: - node-version: 20.11.1 + node-version: 20.19.0 cache: "pnpm" - - name: ๐Ÿ“ฅ Download deps + # npm v11.5.1 or newer is required for OIDC support + # https://github.blog/changelog/2025-07-31-npm-trusted-publishing-with-oidc-is-generally-available/#whats-new + - name: Setup npm 11.x for OIDC + run: npm install -g npm@11.6.4 + + - name: Install dependencies run: pnpm install --frozen-lockfile - - name: ๐Ÿ“€ Generate Prisma Client + - name: Generate Prisma client run: pnpm run generate - - name: ๐Ÿ—๏ธ Build + - name: Build run: pnpm run build --filter "@trigger.dev/*" --filter "trigger.dev" - - name: ๐Ÿ”Ž Type check + - name: Type check run: pnpm run typecheck --filter "@trigger.dev/*" --filter "trigger.dev" - - name: ๐Ÿ” Setup npm auth - run: | - echo "registry=https://registry.npmjs.org" >> ~/.npmrc - echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" >> ~/.npmrc - - # This action has two responsibilities. The first time the workflow runs - # (initial push to the `main` branch) it will create a new branch and - # then open a PR with the related changes for the new version. After the - # PR is merged, the workflow will run again and this action will build + - # publish to npm. - - name: ๐Ÿš€ PR / Publish - if: ${{ !env.ACT }} + - name: Publish id: changesets uses: changesets/action@v1 with: - version: pnpm run changeset:version - commit: "chore: Update version for release" - title: "chore: Update version for release" publish: pnpm run changeset:release createGithubReleases: true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - NPM_TOKEN: ${{ secrets.NPM_TOKEN }} - - # - name: ๐Ÿš€ PR / Publish (mock) - # if: ${{ env.ACT }} - # id: changesets - # run: | - # echo "published=true" >> "$GITHUB_OUTPUT" - # echo "publishedPackages=[{\"name\": \"@xx/xx\", \"version\": \"1.2.0\"}, {\"name\": \"@xx/xy\", \"version\": \"0.8.9\"}]" >> "$GITHUB_OUTPUT" - - name: ๐Ÿ“ฆ Get package version + - name: Show package version if: steps.changesets.outputs.published == 'true' id: get_version run: | package_version=$(echo '${{ steps.changesets.outputs.publishedPackages }}' | jq -r '.[0].version') echo "package_version=${package_version}" >> "$GITHUB_OUTPUT" + + # this triggers the publish workflow for the docker images + - name: Create and push Docker tag + if: steps.changesets.outputs.published == 'true' + run: | + set -e + git tag "v.docker.${{ steps.get_version.outputs.package_version }}" + git push origin "v.docker.${{ steps.get_version.outputs.package_version }}" + + # The prerelease job needs to be on the same workflow file due to a limitation related to how npm verifies OIDC claims. + prerelease: + name: ๐Ÿงช Prerelease + runs-on: ubuntu-latest + environment: npm-publish + permissions: + contents: read + id-token: write + if: github.repository == 'triggerdotdev/trigger.dev' && github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'prerelease' + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ github.event.inputs.ref }} + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.23.0 + + - name: Setup node + uses: buildjet/setup-node@v4 + with: + node-version: 20.19.0 + cache: "pnpm" + + # npm v11.5.1 or newer is required for OIDC support + # https://github.blog/changelog/2025-07-31-npm-trusted-publishing-with-oidc-is-generally-available/#whats-new + - name: Setup npm 11.x for OIDC + run: npm install -g npm@11.6.4 + + - name: Download deps + run: pnpm install --frozen-lockfile + + - name: Generate Prisma Client + run: pnpm run generate + + - name: Snapshot version + run: pnpm exec changeset version --snapshot ${{ github.event.inputs.prerelease_tag }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Clean + run: pnpm run clean --filter "@trigger.dev/*" --filter "trigger.dev" + + - name: Build + run: pnpm run build --filter "@trigger.dev/*" --filter "trigger.dev" + + - name: Publish prerelease + run: pnpm exec changeset publish --no-git-tag --snapshot --tag ${{ github.event.inputs.prerelease_tag }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/typecheck.yml b/.github/workflows/typecheck.yml index acd1fb3d64..3eb98e5177 100644 --- a/.github/workflows/typecheck.yml +++ b/.github/workflows/typecheck.yml @@ -19,12 +19,12 @@ jobs: - name: โŽ” Setup pnpm uses: pnpm/action-setup@v4 with: - version: 8.15.5 + version: 10.23.0 - name: โŽ” Setup node uses: buildjet/setup-node@v4 with: - node-version: 20.11.1 + node-version: 20.19.0 cache: "pnpm" - name: ๐Ÿ“ฅ Download deps @@ -35,6 +35,8 @@ jobs: - name: ๐Ÿ”Ž Type check run: pnpm run typecheck + env: + NODE_OPTIONS: --max-old-space-size=8192 - name: ๐Ÿ”Ž Check exports run: pnpm run check-exports diff --git a/.github/workflows/unit-tests-internal.yml b/.github/workflows/unit-tests-internal.yml index 5acac054a6..e903e0145a 100644 --- a/.github/workflows/unit-tests-internal.yml +++ b/.github/workflows/unit-tests-internal.yml @@ -53,12 +53,12 @@ jobs: - name: โŽ” Setup pnpm uses: pnpm/action-setup@v4 with: - version: 8.15.5 + version: 10.23.0 - name: โŽ” Setup node uses: buildjet/setup-node@v4 with: - node-version: 20.11.1 + node-version: 20.19.0 cache: "pnpm" # ..to avoid rate limits when pulling images @@ -111,12 +111,12 @@ jobs: - name: โŽ” Setup pnpm uses: pnpm/action-setup@v4 with: - version: 8.15.5 + version: 10.23.0 - name: โŽ” Setup node uses: buildjet/setup-node@v4 with: - node-version: 20.11.1 + node-version: 20.19.0 # no cache enabled, we're not installing deps - name: Download blob reports from GitHub Actions Artifacts diff --git a/.github/workflows/unit-tests-packages.yml b/.github/workflows/unit-tests-packages.yml index cfa5e88baa..d321037703 100644 --- a/.github/workflows/unit-tests-packages.yml +++ b/.github/workflows/unit-tests-packages.yml @@ -53,12 +53,12 @@ jobs: - name: โŽ” Setup pnpm uses: pnpm/action-setup@v4 with: - version: 8.15.5 + version: 10.23.0 - name: โŽ” Setup node uses: buildjet/setup-node@v4 with: - node-version: 20.11.1 + node-version: 20.19.0 cache: "pnpm" # ..to avoid rate limits when pulling images @@ -111,12 +111,12 @@ jobs: - name: โŽ” Setup pnpm uses: pnpm/action-setup@v4 with: - version: 8.15.5 + version: 10.23.0 - name: โŽ” Setup node uses: buildjet/setup-node@v4 with: - node-version: 20.11.1 + node-version: 20.19.0 # no cache enabled, we're not installing deps - name: Download blob reports from GitHub Actions Artifacts diff --git a/.github/workflows/unit-tests-webapp.yml b/.github/workflows/unit-tests-webapp.yml index e96af168c0..e587bb3891 100644 --- a/.github/workflows/unit-tests-webapp.yml +++ b/.github/workflows/unit-tests-webapp.yml @@ -12,8 +12,8 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - shardIndex: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] - shardTotal: [10] + shardIndex: [1, 2, 3, 4, 5, 6, 7, 8] + shardTotal: [8] env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} SHARD_INDEX: ${{ matrix.shardIndex }} @@ -53,12 +53,12 @@ jobs: - name: โŽ” Setup pnpm uses: pnpm/action-setup@v4 with: - version: 8.15.5 + version: 10.23.0 - name: โŽ” Setup node uses: buildjet/setup-node@v4 with: - node-version: 20.11.1 + node-version: 20.19.0 cache: "pnpm" # ..to avoid rate limits when pulling images @@ -85,8 +85,9 @@ jobs: DIRECT_URL: postgresql://postgres:postgres@localhost:5432/postgres SESSION_SECRET: "secret" MAGIC_LINK_SECRET: "secret" - ENCRYPTION_KEY: "secret" + ENCRYPTION_KEY: "dummy-encryption-keeeey-32-bytes" DEPLOY_REGISTRY_HOST: "docker.io" + CLICKHOUSE_URL: "http://default:password@localhost:8123" - name: Gather all reports if: ${{ !cancelled() }} @@ -118,12 +119,12 @@ jobs: - name: โŽ” Setup pnpm uses: pnpm/action-setup@v4 with: - version: 8.15.5 + version: 10.23.0 - name: โŽ” Setup node uses: buildjet/setup-node@v4 with: - node-version: 20.11.1 + node-version: 20.19.0 # no cache enabled, we're not installing deps - name: Download blob reports from GitHub Actions Artifacts diff --git a/.gitignore b/.gitignore index 9bee46fc27..8267c9fbab 100644 --- a/.gitignore +++ b/.gitignore @@ -29,12 +29,10 @@ yarn-debug.log* yarn-error.log* # local env files -.env.docker +.env +.env.* .docker/*.env -.env.local -.env.development.local -.env.test.local -.env.production.local +!.env.example # turbo .turbo @@ -63,4 +61,6 @@ apps/**/public/build /packages/core/src/package.json /packages/trigger-sdk/src/package.json /packages/python/src/package.json -.claude \ No newline at end of file +.claude +.mcp.log +.cursor/debug.log \ No newline at end of file diff --git a/.npmrc b/.npmrc deleted file mode 100644 index c83a08985c..0000000000 --- a/.npmrc +++ /dev/null @@ -1,5 +0,0 @@ -link-workspace-packages=false -public-hoist-pattern[]=*prisma* -prefer-workspace-packages=true -update-notifier=false -side-effects-cache=false \ No newline at end of file diff --git a/.nvmrc b/.nvmrc index 2efc7e111f..3bf34c2761 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -v20.11.1 \ No newline at end of file +v20.19.0 \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index 6d08392086..d135aa70a2 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -59,7 +59,7 @@ "request": "launch", "name": "Debug V3 Dev CLI", "command": "pnpm exec trigger dev", - "cwd": "${workspaceFolder}/references/v3-catalog", + "cwd": "${workspaceFolder}/references/hello-world", "sourceMaps": true }, { @@ -83,7 +83,7 @@ "request": "launch", "name": "Debug V3 Deploy CLI", "command": "pnpm exec trigger deploy --self-hosted --load-image", - "cwd": "${workspaceFolder}/references/v3-catalog", + "cwd": "${workspaceFolder}/references/hello-world", "sourceMaps": true }, { @@ -91,7 +91,7 @@ "request": "launch", "name": "Debug V3 list-profiles CLI", "command": "pnpm exec trigger list-profiles --log-level debug", - "cwd": "${workspaceFolder}/references/v3-catalog", + "cwd": "${workspaceFolder}/references/hello-world", "sourceMaps": true }, { @@ -99,7 +99,7 @@ "request": "launch", "name": "Debug V3 update CLI", "command": "pnpm exec trigger update", - "cwd": "${workspaceFolder}/references/v3-catalog", + "cwd": "${workspaceFolder}/references/hello-world", "sourceMaps": true }, { @@ -107,7 +107,7 @@ "request": "launch", "name": "Debug V3 Management", "command": "pnpm run management", - "cwd": "${workspaceFolder}/references/v3-catalog", + "cwd": "${workspaceFolder}/references/hello-world", "sourceMaps": true }, { diff --git a/.vscode/settings.json b/.vscode/settings.json index f8a7bd0697..12aefeb358 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -6,5 +6,6 @@ "**/node_modules/**": true, "packages/cli-v3/e2e": true }, - "vitest.disableWorkspaceWarning": true + "vitest.disableWorkspaceWarning": true, + "typescript.experimental.useTsgo": false } diff --git a/AGENTS.md b/AGENTS.md index 11e926fe94..846c6d827c 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -13,7 +13,7 @@ This repository is a pnpm monorepo managed with Turbo. It contains multiple apps See `ai/references/repo.md` for a more complete explanation of the workspaces. ## Development setup -1. Install dependencies with `pnpm i` (pnpm `8.15.5` and Node.js `20.11.1` are required). +1. Install dependencies with `pnpm i` (pnpm `10.23.0` and Node.js `20.11.1` are required). 2. Copy `.env.example` to `.env` and generate a random 16 byte hex string for `ENCRYPTION_KEY` (`openssl rand -hex 16`). Update other secrets if needed. 3. Start the local services with Docker: ```bash diff --git a/CHANGESETS.md b/CHANGESETS.md index cf66007661..722fe64eb4 100644 --- a/CHANGESETS.md +++ b/CHANGESETS.md @@ -30,14 +30,16 @@ Please follow the best-practice of adding changesets in the same commit as the c ## Snapshot instructions -1. Delete the `.changeset/pre.json` file (if it exists) +1. Update the `.changeset/config.json` file to set the `"changelog"` field to this: -2. Do a temporary commit (do NOT push this, you should undo it after) +```json +"changelog": "@changesets/cli/changelog", +``` -3. Copy the `GITHUB_TOKEN` line from the .env file +2. Do a temporary commit (do NOT push this, you should undo it after) -4. Run `GITHUB_TOKEN=github_pat_12345 ./scripts/publish-prerelease.sh re2` +3. Run `./scripts/publish-prerelease.sh prerelease` -Make sure to replace the token with yours. `re2` is the tag that will be used for the pre-release. +You can choose a different tag if you want, but usually `prerelease` is fine. -5. Undo the commit where you deleted the pre.json file. +5. Undo the commit where you updated the config.json file. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 709dcca47c..5924f89da3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -15,7 +15,7 @@ branch are tagged into a release periodically. ### Prerequisites - [Node.js](https://nodejs.org/en) version 20.11.1 -- [pnpm package manager](https://pnpm.io/installation) version 8.15.5 +- [pnpm package manager](https://pnpm.io/installation) version 10.23.0 - [Docker](https://www.docker.com/get-started/) - [protobuf](https://github.com/protocolbuffers/protobuf) @@ -36,7 +36,7 @@ branch are tagged into a release periodically. ``` 3. Ensure you are on the correct version of Node.js (20.11.1). If you are using `nvm`, there is an `.nvmrc` file that will automatically select the correct version of Node.js when you navigate to the repository. -4. Run `corepack enable` to use the correct version of pnpm (`8.15.5`) as specified in the root `package.json` file. +4. Run `corepack enable` to use the correct version of pnpm (`10.23.0`) as specified in the root `package.json` file. 5. Install the required packages using pnpm. ``` @@ -84,17 +84,17 @@ branch are tagged into a release periodically. 2. Once the app is running click the magic link button and enter your email. You will automatically be logged in, since you are running locally. Create an Org and your first project in the dashboard. -## Manual testing using v3-catalog +## Manual testing using hello-world -We use the `/references/v3-catalog` subdirectory as a staging ground for testing changes to the SDK (`@trigger.dev/sdk` at `/packages/trigger-sdk`), the Core package (`@trigger.dev/core` at `packages/core`), the CLI (`trigger.dev` at `/packages/cli-v3`) and the platform (The remix app at `/apps/webapp`). The instructions below will get you started on using the `v3-catalog` for local development of Trigger.dev (v3). +We use the `/references/hello-world` subdirectory as a staging ground for testing changes to the SDK (`@trigger.dev/sdk` at `/packages/trigger-sdk`), the Core package (`@trigger.dev/core` at `packages/core`), the CLI (`trigger.dev` at `/packages/cli-v3`) and the platform (The remix app at `/apps/webapp`). The instructions below will get you started on using the `hello-world` for local development of Trigger.dev. ### First-time setup First, make sure you are running the webapp according to the instructions above. Then: -1. Visit http://localhost:3030 in your browser and create a new V3 project called "v3-catalog". +1. Visit http://localhost:3030 in your browser and create a new V3 project called "hello-world". -2. In Postgres go to the "Projects" table and for the project you create change the `externalRef` to `yubjwjsfkxnylobaqvqz`. +2. In Postgres go to the "Projects" table and for the project you create change the `externalRef` to `proj_rrkpdguyagvsoktglnod`. 3. Build the CLI @@ -105,10 +105,10 @@ pnpm run build --filter trigger.dev pnpm i ``` -4. Change into the `/references/v3-catalog` directory and authorize the CLI to the local server: +4. Change into the `/references/hello-world` directory and authorize the CLI to the local server: ```sh -cd references/v3-catalog +cd references/hello-world cp .env.example .env pnpm exec trigger login -a http://localhost:3030 ``` @@ -118,7 +118,7 @@ This will open a new browser window and authorize the CLI against your local use You can optionally pass a `--profile` flag to the `login` command, which will allow you to use the CLI with separate accounts/servers. We suggest using a profile called `local` for your local development: ```sh -cd references/v3-catalog +cd references/hello-world pnpm exec trigger login -a http://localhost:3030 --profile local # later when you run the dev or deploy command: pnpm exec trigger dev --profile local @@ -137,84 +137,29 @@ The following steps should be followed any time you start working on a new featu pnpm run dev --filter trigger.dev --filter "@trigger.dev/*" ``` -3. Open another terminal window, and change into the `/references/v3-catalog` directory. +3. Open another terminal window, and change into the `/references/hello-world` directory. -4. You'll need to run the following commands to setup prisma and migrate the database: +4. Run the `dev` command, which will register all the local tasks with the platform and allow you to start testing task execution: ```sh -pnpm exec prisma migrate deploy -pnpm run generate:prisma -``` - -5. Run the `dev` command, which will register all the local tasks with the platform and allow you to start testing task execution: - -```sh -# in /references/v3-catalog +# in /references/hello-world pnpm exec trigger dev ``` If you want additional debug logging, you can use the `--log-level debug` flag: ```sh -# in /references/v3-catalog +# in /references/hello-world pnpm exec trigger dev --log-level debug ``` -6. If you make any changes in the CLI/Core/SDK, you'll need to `CTRL+C` to exit the `dev` command and restart it to pickup changes. Any changes to the files inside of the `v3-catalog/src/trigger` dir will automatically be rebuilt by the `dev` command. - -7. Navigate to the `v3-catalog` project in your local dashboard at localhost:3030 and you should see the list of tasks. - -8. Go to the "Test" page in the sidebar and select a task. Then enter a payload and click "Run test". You can tell what the payloads should be by looking at the relevant task file inside the `/references/v3-catalog/src/trigger` folder. Many of them accept an empty payload. - -9. Feel free to add additional files in `v3-catalog/src/trigger` to test out specific aspects of the system, or add in edge cases. - -## Running end-to-end webapp tests (deprecated) +6. If you make any changes in the CLI/Core/SDK, you'll need to `CTRL+C` to exit the `dev` command and restart it to pickup changes. Any changes to the files inside of the `hello-world/src/trigger` dir will automatically be rebuilt by the `dev` command. -To run the end-to-end tests, follow the steps below: - -1. Set up environment variables (copy example envs into the correct place) - -```sh -cp ./.env.example ./.env -cp ./references/nextjs-test/.env.example ./references/nextjs-test/.env.local -``` - -2. Set up dependencies - -```sh -# Build packages -pnpm run build --filter @references/nextjs-test^... -pnpm --filter @trigger.dev/database generate +7. Navigate to the `hello-world` project in your local dashboard at localhost:3030 and you should see the list of tasks. -# Move trigger-cli bin to correct place -pnpm install --frozen-lockfile +8. Go to the "Test" page in the sidebar and select a task. Then enter a payload and click "Run test". You can tell what the payloads should be by looking at the relevant task file inside the `/references/hello-world/src/trigger` folder. Many of them accept an empty payload. -# Install playwrite browsers (ONE TIME ONLY) -npx playwright install -``` - -3. Set up the database - -```sh -pnpm run docker -pnpm run db:migrate -pnpm run db:seed -``` - -4. Run the end-to-end tests - -```sh -pnpm run test:e2e -``` - -### Cleanup - -The end-to-end tests use a `setup` and `teardown` script to seed the database with test data. If the test runner doesn't exit cleanly, then the database can be left in a state where the tests can't run because the `setup` script will try to create data that already exists. If this happens, you can manually delete the `users` and `organizations` from the database using prisma studio: - -```sh -# With the database running (i.e. pnpm run docker) -pnpm run db:studio -``` +9. Feel free to add additional files in `hello-world/src/trigger` to test out specific aspects of the system, or add in edge cases. ## Adding and running migrations diff --git a/README.md b/README.md index dab0551dc0..0d7f1ca293 100644 --- a/README.md +++ b/README.md @@ -1,44 +1,76 @@
- - - - Trigger.dev logo - - -### Open source background jobs and AI infrastructure -[Discord](https://trigger.dev/discord) | [Website](https://trigger.dev) | [Issues](https://github.com/triggerdotdev/trigger.dev/issues) | [Docs](https://trigger.dev/docs) +![Trigger.dev logo](https://content.trigger.dev/github-header-banner.jpg) -[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/triggerdotdev.svg?style=social&label=Follow%20%40trigger.dev)](https://twitter.com/triggerdotdev) +### Build and deploy fullyโ€‘managed AI agents and workflows + +[Website](https://trigger.dev) | [Docs](https://trigger.dev/docs) | [Issues](https://github.com/triggerdotdev/trigger.dev/issues) | [Example projects](https://github.com/triggerdotdev/examples) | [Feature requests](https://triggerdev.featurebase.app/) | [Public roadmap](https://triggerdev.featurebase.app/roadmap) | [Self-hosting](https://trigger.dev/docs/self-hosting/overview) + +[![Open Source](https://img.shields.io/badge/Open%20Source-%E2%9D%A4-red.svg)](https://github.com/triggerdotdev/trigger.dev) +[![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/triggerdotdev/trigger.dev/blob/main/LICENSE) +[![npm](https://img.shields.io/npm/v/@trigger.dev/sdk.svg?label=npm)](https://www.npmjs.com/package/@trigger.dev/sdk) +[![SDK downloads](https://img.shields.io/npm/dm/@trigger.dev/sdk.svg?label=SDK%20downloads)](https://www.npmjs.com/package/@trigger.dev/sdk) + +[![Twitter Follow](https://img.shields.io/twitter/follow/triggerdotdev?style=social)](https://twitter.com/triggerdotdev) +[![Discord](https://img.shields.io/discord/1066956501299777596?logo=discord&logoColor=white&color=7289da)](https://discord.gg/nkqV9xBYWy) +[![Ask DeepWiki](https://deepwiki.com/badge.svg)](https://deepwiki.com/triggerdotdev/trigger.dev) +[![GitHub stars](https://img.shields.io/github/stars/triggerdotdev/trigger.dev?style=social)](https://github.com/triggerdotdev/trigger.dev)
## About Trigger.dev -Trigger.dev is an open source platform and SDK which allows you to create long-running background jobs. Write normal async code, deploy, and never hit a timeout. +Trigger.dev is the open-source platform for building AI workflows in TypeScript. Long-running tasks with retries, queues, observability, and elastic scaling. + +## The platform designed for building AI agents + +Build [AI agents](https://trigger.dev/product/ai-agents) using all the frameworks, services and LLMs you're used to, deploy them to Trigger.dev and get durable, long-running tasks with retries, queues, observability, and elastic scaling out of the box. + +- **Long-running without timeouts**: Execute your tasks with absolutely no timeouts, unlike AWS Lambda, Vercel, and other serverless platforms. + +- **Durability, retries & queues**: Build rock solid agents and AI applications using our durable tasks, retries, queues and idempotency. -### Key features: +- **True runtime freedom**: Customize your deployed tasks with system packages โ€“ run browsers, Python scripts, FFmpeg and more. -- JavaScript and TypeScript SDK -- No timeouts -- Retries (with exponential backoff) -- Queues and concurrency controls -- Schedules and crons -- Full Observability; logs, live trace views, advanced filtering -- React hooks to interact with the Trigger API from your React app -- Pipe LLM streams straight to your users through the Realtime API -- Trigger tasks and display the run status and metadata anywhere in your app -- Custom alerts, get notified by email, Slack or webhooks -- No infrastructure to manage -- Elastic (scaling) -- Works with your existing tech stack +- **Human-in-the-loop**: Programmatically pause your tasks until a human can approve, reject or give feedback. -## In your codebase +- **Realtime apps & streaming**: Move your background jobs to the foreground by subscribing to runs or streaming AI responses to your app. + +- **Observability & monitoring**: Each run has full tracing and logs. Configure error alerts to catch bugs fast. + +## Key features: + +- **[JavaScript and TypeScript SDK](https://trigger.dev/docs/tasks/overview)** - Build background tasks using familiar programming models +- **[Long-running tasks](https://trigger.dev/docs/runs/max-duration)** - Handle resource-heavy tasks without timeouts +- **[Durable cron schedules](https://trigger.dev/docs/tasks/scheduled#scheduled-tasks-cron)** - Create and attach recurring schedules of up to a year +- **[Trigger.dev Realtime](https://trigger.dev/docs/realtime/overview)** - Trigger, subscribe to, and get real-time updates for runs, with LLM streaming support +- **[Build extensions](https://trigger.dev/docs/config/extensions/overview#build-extensions)** - Hook directly into the build system and customize the build process. Run Python scripts, FFmpeg, browsers, and more. +- **[React hooks](https://trigger.dev/docs/frontend/react-hooks#react-hooks)** - Interact with the Trigger.dev API on your frontend using our React hooks package +- **[Batch triggering](https://trigger.dev/docs/triggering#tasks-batchtrigger)** - Use batchTrigger() to initiate multiple runs of a task with custom payloads and options +- **[Structured inputs / outputs](https://trigger.dev/docs/tasks/schemaTask#schematask)** - Define precise data schemas for your tasks with runtime payload validation +- **[Waits](https://trigger.dev/docs/wait)** - Add waits to your tasks to pause execution for a specified duration +- **[Preview branches](https://trigger.dev/docs/deployment/preview-branches)** - Create isolated environments for testing and development. Integrates with Vercel and git workflows +- **[Waitpoints](https://trigger.dev/docs/wait-for-token#wait-for-token)** - Add human-in-the-loop judgment at critical decision points without disrupting workflow +- **[Concurrency & queues](https://trigger.dev/docs/queue-concurrency#concurrency-and-queues)** - Set concurrency rules to manage how multiple tasks execute +- **[Multiple environments](https://trigger.dev/docs/how-it-works#dev-mode)** - Support for DEV, PREVIEW, STAGING, and PROD environments +- **[No infrastructure to manage](https://trigger.dev/docs/how-it-works#trigger-dev-architecture)** - Auto-scaling infrastructure that eliminates timeouts and server management +- **[Automatic retries](https://trigger.dev/docs/errors-retrying)** - If your task encounters an uncaught error, we automatically attempt to run it again +- **[Checkpointing](https://trigger.dev/docs/how-it-works#the-checkpoint-resume-system)** - Tasks are inherently durable, thanks to our checkpointing feature +- **[Versioning](https://trigger.dev/docs/versioning)** - Atomic versioning allows you to deploy new versions without affecting running tasks +- **[Machines](https://trigger.dev/docs/machines)** - Configure the number of vCPUs and GBs of RAM you want the task to use +- **[Observability & monitoring](https://trigger.dev/product/observability-and-monitoring)** - Monitor every aspect of your tasks' performance with comprehensive logging and visualization tools +- **[Logging & tracing](https://trigger.dev/docs/logging)** - Comprehensive logging and tracing for all your tasks +- **[Tags](https://trigger.dev/docs/tags#tags)** - Attach up to ten tags to each run, allowing you to filter via the dashboard, realtime, and the SDK +- **[Run metadata](https://trigger.dev/docs/runs/metadata#run-metadata)** - Attach metadata to runs which updates as the run progresses and is available to use in your frontend for live updates +- **[Bulk actions](https://trigger.dev/docs/bulk-actions)** - Perform actions on multiple runs simultaneously, including replaying and cancelling +- **[Real-time alerts](https://trigger.dev/docs/troubleshooting-alerts#alerts)** - Choose your preferred notification method for run failures and deployments + +## Write tasks in your codebase Create tasks where they belong: in your codebase. Version control, localhost, test and review like you're already used to. ```ts -import { task } from "@trigger.dev/sdk/v3"; +import { task } from "@trigger.dev/sdk"; //1. You need to export each task export const helloWorld = task({ @@ -58,13 +90,13 @@ Use our SDK to write tasks in your codebase. There's no infrastructure to manage ## Environments -We support `Development`, `Staging`, and `Production` environments, allowing you to test your tasks before deploying them to production. +We support `Development`, `Staging`, `Preview`, and `Production` environments, allowing you to test your tasks before deploying them to production. ## Full visibility of every job run View every task in every run so you can tell exactly what happened. We provide a full trace view of every task run so you can see what happened at every step. -![Trace view image](https://imagedelivery.net/3TbraffuDZ4aEf8KWOmI_w/7c1b347f-004c-4482-38a7-3f6fa9c00d00/public) +![Trace view image](https://content.trigger.dev/trace-view.png) # Getting started @@ -73,14 +105,19 @@ The quickest way to get started is to create an account and project in our [web ### Useful links: - [Quick start](https://trigger.dev/docs/quick-start) - get up and running in minutes -- [How it works](https://trigger.dev/docs/v3/how-it-works) - understand how Trigger.dev works under the hood +- [How it works](https://trigger.dev/docs/how-it-works) - understand how Trigger.dev works under the hood - [Guides and examples](https://trigger.dev/docs/guides/introduction) - walk-through guides and code examples for popular frameworks and use cases ## Self-hosting -If you prefer to self-host Trigger.dev, you can follow our [self-hosting guide](https://trigger.dev/docs/v3/open-source-self-hosting#overview). +If you prefer to self-host Trigger.dev, you can follow our [self-hosting guides](https://trigger.dev/docs/self-hosting/overview): + +- [Docker self-hosting guide](https://trigger.dev/docs/self-hosting/docker) - use Docker Compose to spin up a Trigger.dev instance +- [Kubernetes self-hosting guide](https://trigger.dev/docs/self-hosting/kubernetes) - use our official Helm chart to deploy Trigger.dev to your Kubernetes cluster + +## Support and community -We also have a dedicated self-hosting channel in our [Discord server](https://trigger.dev/discord) for support. +We have a large active community in our official [Discord server](https://trigger.dev/discord) for support, including a dedicated channel for self-hosting. ## Development diff --git a/ai/references/migrations.md b/ai/references/migrations.md new file mode 100644 index 0000000000..c6fbf79e9d --- /dev/null +++ b/ai/references/migrations.md @@ -0,0 +1,121 @@ +## Creating and applying migrations + +We use prisma migrations to manage the database schema. Please follow the following steps when editing the `internal-packages/database/prisma/schema.prisma` file: + +Edit the `schema.prisma` file to add or modify the schema. + +Create a new migration file but don't apply it yet: + +```bash +cd internal-packages/database +pnpm run db:migrate:dev:create --name "add_new_column_to_table" +``` + +The migration file will be created in the `prisma/migrations` directory, but it will have a bunch of edits to the schema that are not needed and will need to be removed before we can apply the migration. Here's an example of what the migration file might look like: + +```sql +-- AlterEnum +ALTER TYPE "public"."TaskRunExecutionStatus" ADD VALUE 'DELAYED'; + +-- AlterTable +ALTER TABLE "public"."TaskRun" ADD COLUMN "debounce" JSONB; + +-- AlterTable +ALTER TABLE "public"."_BackgroundWorkerToBackgroundWorkerFile" ADD CONSTRAINT "_BackgroundWorkerToBackgroundWorkerFile_AB_pkey" PRIMARY KEY ("A", "B"); + +-- DropIndex +DROP INDEX "public"."_BackgroundWorkerToBackgroundWorkerFile_AB_unique"; + +-- AlterTable +ALTER TABLE "public"."_BackgroundWorkerToTaskQueue" ADD CONSTRAINT "_BackgroundWorkerToTaskQueue_AB_pkey" PRIMARY KEY ("A", "B"); + +-- DropIndex +DROP INDEX "public"."_BackgroundWorkerToTaskQueue_AB_unique"; + +-- AlterTable +ALTER TABLE "public"."_TaskRunToTaskRunTag" ADD CONSTRAINT "_TaskRunToTaskRunTag_AB_pkey" PRIMARY KEY ("A", "B"); + +-- DropIndex +DROP INDEX "public"."_TaskRunToTaskRunTag_AB_unique"; + +-- AlterTable +ALTER TABLE "public"."_WaitpointRunConnections" ADD CONSTRAINT "_WaitpointRunConnections_AB_pkey" PRIMARY KEY ("A", "B"); + +-- DropIndex +DROP INDEX "public"."_WaitpointRunConnections_AB_unique"; + +-- AlterTable +ALTER TABLE "public"."_completedWaitpoints" ADD CONSTRAINT "_completedWaitpoints_AB_pkey" PRIMARY KEY ("A", "B"); + +-- DropIndex +DROP INDEX "public"."_completedWaitpoints_AB_unique"; + +-- CreateIndex +CREATE INDEX "SecretStore_key_idx" ON "public"."SecretStore"("key" text_pattern_ops); + +-- CreateIndex +CREATE INDEX "TaskRun_runtimeEnvironmentId_id_idx" ON "public"."TaskRun"("runtimeEnvironmentId", "id" DESC); + +-- CreateIndex +CREATE INDEX "TaskRun_runtimeEnvironmentId_createdAt_idx" ON "public"."TaskRun"("runtimeEnvironmentId", "createdAt" DESC); +``` + +All the following lines should be removed: + +```sql +-- AlterTable +ALTER TABLE "public"."_BackgroundWorkerToBackgroundWorkerFile" ADD CONSTRAINT "_BackgroundWorkerToBackgroundWorkerFile_AB_pkey" PRIMARY KEY ("A", "B"); + +-- DropIndex +DROP INDEX "public"."_BackgroundWorkerToBackgroundWorkerFile_AB_unique"; + +-- AlterTable +ALTER TABLE "public"."_BackgroundWorkerToTaskQueue" ADD CONSTRAINT "_BackgroundWorkerToTaskQueue_AB_pkey" PRIMARY KEY ("A", "B"); + +-- DropIndex +DROP INDEX "public"."_BackgroundWorkerToTaskQueue_AB_unique"; + +-- AlterTable +ALTER TABLE "public"."_TaskRunToTaskRunTag" ADD CONSTRAINT "_TaskRunToTaskRunTag_AB_pkey" PRIMARY KEY ("A", "B"); + +-- DropIndex +DROP INDEX "public"."_TaskRunToTaskRunTag_AB_unique"; + +-- AlterTable +ALTER TABLE "public"."_WaitpointRunConnections" ADD CONSTRAINT "_WaitpointRunConnections_AB_pkey" PRIMARY KEY ("A", "B"); + +-- DropIndex +DROP INDEX "public"."_WaitpointRunConnections_AB_unique"; + +-- AlterTable +ALTER TABLE "public"."_completedWaitpoints" ADD CONSTRAINT "_completedWaitpoints_AB_pkey" PRIMARY KEY ("A", "B"); + +-- DropIndex +DROP INDEX "public"."_completedWaitpoints_AB_unique"; + +-- CreateIndex +CREATE INDEX "SecretStore_key_idx" ON "public"."SecretStore"("key" text_pattern_ops); + +-- CreateIndex +CREATE INDEX "TaskRun_runtimeEnvironmentId_id_idx" ON "public"."TaskRun"("runtimeEnvironmentId", "id" DESC); + +-- CreateIndex +CREATE INDEX "TaskRun_runtimeEnvironmentId_createdAt_idx" ON "public"."TaskRun"("runtimeEnvironmentId", "createdAt" DESC); +``` + +Leaving only this: + +```sql +-- AlterEnum +ALTER TYPE "public"."TaskRunExecutionStatus" ADD VALUE 'DELAYED'; + +-- AlterTable +ALTER TABLE "public"."TaskRun" ADD COLUMN "debounce" JSONB; +``` + +After editing the migration file, apply the migration: + +```bash +cd internal-packages/database +pnpm run db:migrate:deploy && pnpm run generate +``` diff --git a/ai/references/repo.md b/ai/references/repo.md index 0e9b49b460..4f67bde2b4 100644 --- a/ai/references/repo.md +++ b/ai/references/repo.md @@ -1,6 +1,6 @@ ## Repo Overview -This is a pnpm 8.15.5 monorepo that uses turborepo @turbo.json. The following workspaces are relevant +This is a pnpm 10.23.0 monorepo that uses turborepo @turbo.json. The following workspaces are relevant ## Apps diff --git a/apps/supervisor/Containerfile b/apps/supervisor/Containerfile index b0b1ba9271..d5bb5862e9 100644 --- a/apps/supervisor/Containerfile +++ b/apps/supervisor/Containerfile @@ -5,7 +5,7 @@ WORKDIR /app FROM node-22-alpine AS pruner COPY --chown=node:node . . -RUN npx -q turbo@1.10.9 prune --scope=supervisor --docker +RUN npx -q turbo@2.5.4 prune --scope=supervisor --docker FROM node-22-alpine AS base @@ -16,7 +16,7 @@ COPY --from=pruner --chown=node:node /app/out/json/ . COPY --from=pruner --chown=node:node /app/out/pnpm-lock.yaml ./pnpm-lock.yaml COPY --from=pruner --chown=node:node /app/out/pnpm-workspace.yaml ./pnpm-workspace.yaml -RUN corepack enable && corepack prepare --activate +RUN corepack enable && corepack prepare pnpm@10.23.0 --activate FROM base AS deps-fetcher RUN apk add --no-cache python3-dev py3-setuptools make g++ gcc linux-headers @@ -37,7 +37,7 @@ COPY --chown=node:node scripts/updateVersion.ts scripts/updateVersion.ts RUN pnpm run generate && \ pnpm run --filter supervisor... build&& \ - pnpm deploy --filter=supervisor --prod /prod/supervisor + pnpm deploy --legacy --filter=supervisor --prod /prod/supervisor FROM base AS runner diff --git a/apps/supervisor/package.json b/apps/supervisor/package.json index ae36549272..e9609bf154 100644 --- a/apps/supervisor/package.json +++ b/apps/supervisor/package.json @@ -13,13 +13,14 @@ "typecheck": "tsc --noEmit" }, "dependencies": { + "@aws-sdk/client-ecr": "^3.839.0", "@kubernetes/client-node": "^1.0.0", "@trigger.dev/core": "workspace:*", "dockerode": "^4.0.6", "prom-client": "^15.1.0", "socket.io": "4.7.4", "std-env": "^3.8.0", - "zod": "3.23.8" + "zod": "3.25.76" }, "devDependencies": { "@types/dockerode": "^3.3.33" diff --git a/apps/supervisor/src/env.ts b/apps/supervisor/src/env.ts index fd6bd61050..1605a21637 100644 --- a/apps/supervisor/src/env.ts +++ b/apps/supervisor/src/env.ts @@ -15,7 +15,7 @@ const Env = z.object({ OTEL_EXPORTER_OTLP_ENDPOINT: z.string().url(), // set on the runners // Workload API settings (coordinator mode) - the workload API is what the run controller connects to - TRIGGER_WORKLOAD_API_ENABLED: BoolEnv.default("true"), + TRIGGER_WORKLOAD_API_ENABLED: BoolEnv.default(true), TRIGGER_WORKLOAD_API_PROTOCOL: z .string() .transform((s) => z.enum(["http", "https"]).parse(s.toLowerCase())) @@ -32,11 +32,19 @@ const Env = z.object({ RUNNER_PRETTY_LOGS: BoolEnv.default(false), // Dequeue settings (provider mode) - TRIGGER_DEQUEUE_ENABLED: BoolEnv.default("true"), + TRIGGER_DEQUEUE_ENABLED: BoolEnv.default(true), TRIGGER_DEQUEUE_INTERVAL_MS: z.coerce.number().int().default(250), TRIGGER_DEQUEUE_IDLE_INTERVAL_MS: z.coerce.number().int().default(1000), - TRIGGER_DEQUEUE_MAX_RUN_COUNT: z.coerce.number().int().default(10), - TRIGGER_DEQUEUE_MAX_CONSUMER_COUNT: z.coerce.number().int().default(1), + TRIGGER_DEQUEUE_MAX_RUN_COUNT: z.coerce.number().int().default(1), + TRIGGER_DEQUEUE_MIN_CONSUMER_COUNT: z.coerce.number().int().default(1), + TRIGGER_DEQUEUE_MAX_CONSUMER_COUNT: z.coerce.number().int().default(10), + TRIGGER_DEQUEUE_SCALING_STRATEGY: z.enum(["none", "smooth", "aggressive"]).default("none"), + TRIGGER_DEQUEUE_SCALING_UP_COOLDOWN_MS: z.coerce.number().int().default(5000), // 5 seconds + TRIGGER_DEQUEUE_SCALING_DOWN_COOLDOWN_MS: z.coerce.number().int().default(30000), // 30 seconds + TRIGGER_DEQUEUE_SCALING_TARGET_RATIO: z.coerce.number().default(1.0), // Target ratio of queue items to consumers (1.0 = 1 item per consumer) + TRIGGER_DEQUEUE_SCALING_EWMA_ALPHA: z.coerce.number().min(0).max(1).default(0.3), // Smooths queue length measurements (0=historical, 1=current) + TRIGGER_DEQUEUE_SCALING_BATCH_WINDOW_MS: z.coerce.number().int().positive().default(1000), // Batch window for metrics processing (ms) + TRIGGER_DEQUEUE_SCALING_DAMPING_FACTOR: z.coerce.number().min(0).max(1).default(0.7), // Smooths consumer count changes after EWMA (0=no scaling, 1=immediate) // Optional services TRIGGER_WARM_START_URL: z.string().optional(), @@ -49,7 +57,7 @@ const Env = z.object({ RESOURCE_MONITOR_OVERRIDE_MEMORY_TOTAL_GB: z.coerce.number().optional(), // Docker settings - DOCKER_API_VERSION: z.string().default("v1.41"), + DOCKER_API_VERSION: z.string().optional(), DOCKER_PLATFORM: z.string().optional(), // e.g. linux/amd64, linux/arm64 DOCKER_STRIP_IMAGE_DIGEST: BoolEnv.default(true), DOCKER_REGISTRY_USERNAME: z.string().optional(), @@ -76,6 +84,17 @@ const Env = z.object({ KUBERNETES_IMAGE_PULL_SECRETS: z.string().optional(), // csv KUBERNETES_EPHEMERAL_STORAGE_SIZE_LIMIT: z.string().default("10Gi"), KUBERNETES_EPHEMERAL_STORAGE_SIZE_REQUEST: z.string().default("2Gi"), + KUBERNETES_STRIP_IMAGE_DIGEST: BoolEnv.default(false), + KUBERNETES_CPU_REQUEST_MIN_CORES: z.coerce.number().min(0).default(0), + KUBERNETES_CPU_REQUEST_RATIO: z.coerce.number().min(0).max(1).default(0.75), // Ratio of CPU limit, so 0.75 = 75% of CPU limit + KUBERNETES_MEMORY_REQUEST_MIN_GB: z.coerce.number().min(0).default(0), + KUBERNETES_MEMORY_REQUEST_RATIO: z.coerce.number().min(0).max(1).default(1), // Ratio of memory limit, so 1 = 100% of memory limit + KUBERNETES_MEMORY_OVERHEAD_GB: z.coerce.number().min(0).optional(), // Optional memory overhead to add to the limit in GB + KUBERNETES_SCHEDULER_NAME: z.string().optional(), // Custom scheduler name for pods + + // Placement tags settings + PLACEMENT_TAGS_ENABLED: BoolEnv.default(false), + PLACEMENT_TAGS_PREFIX: z.string().default("node.cluster.x-k8s.io"), // Metrics METRICS_ENABLED: BoolEnv.default(true), diff --git a/apps/supervisor/src/envUtil.ts b/apps/supervisor/src/envUtil.ts index 95d44d6c45..917f984cc3 100644 --- a/apps/supervisor/src/envUtil.ts +++ b/apps/supervisor/src/envUtil.ts @@ -3,7 +3,7 @@ import { SimpleStructuredLogger } from "@trigger.dev/core/v3/utils/structuredLog const logger = new SimpleStructuredLogger("env-util"); -export const BoolEnv = z.preprocess((val) => { +const baseBoolEnv = z.preprocess((val) => { if (typeof val !== "string") { return val; } @@ -11,6 +11,11 @@ export const BoolEnv = z.preprocess((val) => { return ["true", "1"].includes(val.toLowerCase().trim()); }, z.boolean()); +// Create a type-safe version that only accepts boolean defaults +export const BoolEnv = baseBoolEnv as Omit & { + default: (value: boolean) => z.ZodDefault; +}; + export const AdditionalEnvVars = z.preprocess((val) => { if (typeof val !== "string") { return val; diff --git a/apps/supervisor/src/index.ts b/apps/supervisor/src/index.ts index 83fe89c1ed..0e274b3039 100644 --- a/apps/supervisor/src/index.ts +++ b/apps/supervisor/src/index.ts @@ -128,7 +128,18 @@ class ManagedSupervisor { dequeueIdleIntervalMs: env.TRIGGER_DEQUEUE_IDLE_INTERVAL_MS, queueConsumerEnabled: env.TRIGGER_DEQUEUE_ENABLED, maxRunCount: env.TRIGGER_DEQUEUE_MAX_RUN_COUNT, - maxConsumerCount: env.TRIGGER_DEQUEUE_MAX_CONSUMER_COUNT, + metricsRegistry: register, + scaling: { + strategy: env.TRIGGER_DEQUEUE_SCALING_STRATEGY, + minConsumerCount: env.TRIGGER_DEQUEUE_MIN_CONSUMER_COUNT, + maxConsumerCount: env.TRIGGER_DEQUEUE_MAX_CONSUMER_COUNT, + scaleUpCooldownMs: env.TRIGGER_DEQUEUE_SCALING_UP_COOLDOWN_MS, + scaleDownCooldownMs: env.TRIGGER_DEQUEUE_SCALING_DOWN_COOLDOWN_MS, + targetRatio: env.TRIGGER_DEQUEUE_SCALING_TARGET_RATIO, + ewmaAlpha: env.TRIGGER_DEQUEUE_SCALING_EWMA_ALPHA, + batchWindowMs: env.TRIGGER_DEQUEUE_SCALING_BATCH_WINDOW_MS, + dampingFactor: env.TRIGGER_DEQUEUE_SCALING_DAMPING_FACTOR, + }, runNotificationsEnabled: env.TRIGGER_WORKLOAD_API_ENABLED, heartbeatIntervalSeconds: env.TRIGGER_WORKER_HEARTBEAT_INTERVAL_SECONDS, sendRunDebugLogs: env.SEND_RUN_DEBUG_LOGS, @@ -233,6 +244,12 @@ class ManagedSupervisor { } try { + if (!message.deployment.friendlyId) { + // mostly a type guard, deployments always exists for deployed environments + // a proper fix would be to use a discriminated union schema to differentiate between dequeued runs in dev and in deployed environments. + throw new Error("Deployment is missing"); + } + await this.workloadManager.create({ dequeuedAt: message.dequeuedAt, envId: message.environment.id, @@ -241,12 +258,15 @@ class ManagedSupervisor { machine: message.run.machine, orgId: message.organization.id, projectId: message.project.id, + deploymentFriendlyId: message.deployment.friendlyId, + deploymentVersion: message.backgroundWorker.version, runId: message.run.id, runFriendlyId: message.run.friendlyId, version: message.version, nextAttemptNumber: message.run.attemptNumber, snapshotId: message.snapshot.id, snapshotFriendlyId: message.snapshot.friendlyId, + placementTags: message.placementTags, }); // Disabled for now diff --git a/apps/supervisor/src/services/failedPodHandler.ts b/apps/supervisor/src/services/failedPodHandler.ts index 26a589e677..0721724376 100644 --- a/apps/supervisor/src/services/failedPodHandler.ts +++ b/apps/supervisor/src/services/failedPodHandler.ts @@ -25,6 +25,7 @@ export class FailedPodHandler { private readonly informer: Informer; private readonly reconnectIntervalMs: number; + private reconnecting = false; // Metrics private readonly register: Registry; @@ -250,21 +251,48 @@ export class FailedPodHandler { } private makeOnError(informerName: string) { - return () => this.onError(informerName); + return (err?: unknown) => this.onError(informerName, err); } - private async onError(informerName: string) { + private async onError(informerName: string, err?: unknown) { if (!this.isRunning) { this.logger.warn("onError: informer not running"); return; } - this.logger.error("error event fired", { informerName }); - this.informerEventsTotal.inc({ namespace: this.namespace, verb: "error" }); + // Guard against multiple simultaneous reconnections + if (this.reconnecting) { + this.logger.debug("onError: reconnection already in progress, skipping", { + informerName, + }); + return; + } - // Reconnect on errors - await setTimeout(this.reconnectIntervalMs); - await this.informer.start(); + this.reconnecting = true; + + try { + const error = err instanceof Error ? err : undefined; + this.logger.error("error event fired", { + informerName, + error: error?.message, + errorType: error?.name, + }); + this.informerEventsTotal.inc({ namespace: this.namespace, verb: "error" }); + + // Reconnect on errors + await setTimeout(this.reconnectIntervalMs); + await this.informer.start(); + } catch (handlerError) { + const error = handlerError instanceof Error ? handlerError : undefined; + this.logger.error("onError: reconnection attempt failed", { + informerName, + error: error?.message, + errorType: error?.name, + errorStack: error?.stack, + }); + } finally { + this.reconnecting = false; + } } private makeOnConnect(informerName: string) { diff --git a/apps/supervisor/src/util.ts b/apps/supervisor/src/util.ts index 7cb554cd03..4fcda27b2a 100644 --- a/apps/supervisor/src/util.ts +++ b/apps/supervisor/src/util.ts @@ -1,5 +1,15 @@ import { isMacOS, isWindows } from "std-env"; +export function normalizeDockerHostUrl(url: string) { + const $url = new URL(url); + + if ($url.hostname === "localhost") { + $url.hostname = getDockerHostDomain(); + } + + return $url.toString(); +} + export function getDockerHostDomain() { return isMacOS || isWindows ? "host.docker.internal" : "localhost"; } diff --git a/apps/supervisor/src/workloadManager/docker.ts b/apps/supervisor/src/workloadManager/docker.ts index e3b39bfed6..d6651d325a 100644 --- a/apps/supervisor/src/workloadManager/docker.ts +++ b/apps/supervisor/src/workloadManager/docker.ts @@ -5,17 +5,19 @@ import { type WorkloadManagerOptions, } from "./types.js"; import { env } from "../env.js"; -import { getDockerHostDomain, getRunnerId } from "../util.js"; +import { getDockerHostDomain, getRunnerId, normalizeDockerHostUrl } from "../util.js"; import Docker from "dockerode"; import { tryCatch } from "@trigger.dev/core"; +import { ECRAuthService } from "./ecrAuth.js"; export class DockerWorkloadManager implements WorkloadManager { private readonly logger = new SimpleStructuredLogger("docker-workload-manager"); private readonly docker: Docker; private readonly runnerNetworks: string[]; - private readonly auth?: Docker.AuthConfig; + private readonly staticAuth?: Docker.AuthConfig; private readonly platformOverride?: string; + private readonly ecrAuthService?: ECRAuthService; constructor(private opts: WorkloadManagerOptions) { this.docker = new Docker({ @@ -44,13 +46,18 @@ export class DockerWorkloadManager implements WorkloadManager { url: env.DOCKER_REGISTRY_URL, }); - this.auth = { + this.staticAuth = { username: env.DOCKER_REGISTRY_USERNAME, password: env.DOCKER_REGISTRY_PASSWORD, serveraddress: env.DOCKER_REGISTRY_URL, }; + } else if (ECRAuthService.hasAWSCredentials()) { + this.logger.info("๐Ÿ‹ AWS credentials found, initializing ECR auth service"); + this.ecrAuthService = new ECRAuthService(); } else { - this.logger.warn("๐Ÿ‹ No Docker registry credentials provided, skipping auth"); + this.logger.warn( + "๐Ÿ‹ No Docker registry credentials or AWS credentials provided, skipping auth" + ); } } @@ -65,6 +72,8 @@ export class DockerWorkloadManager implements WorkloadManager { `TRIGGER_DEQUEUED_AT_MS=${opts.dequeuedAt.getTime()}`, `TRIGGER_POD_SCHEDULED_AT_MS=${Date.now()}`, `TRIGGER_ENV_ID=${opts.envId}`, + `TRIGGER_DEPLOYMENT_ID=${opts.deploymentFriendlyId}`, + `TRIGGER_DEPLOYMENT_VERSION=${opts.deploymentVersion}`, `TRIGGER_RUN_ID=${opts.runFriendlyId}`, `TRIGGER_SNAPSHOT_ID=${opts.snapshotFriendlyId}`, `TRIGGER_SUPERVISOR_API_PROTOCOL=${this.opts.workloadApiProtocol}`, @@ -78,7 +87,7 @@ export class DockerWorkloadManager implements WorkloadManager { ]; if (this.opts.warmStartUrl) { - envVars.push(`TRIGGER_WARM_START_URL=${this.opts.warmStartUrl}`); + envVars.push(`TRIGGER_WARM_START_URL=${normalizeDockerHostUrl(this.opts.warmStartUrl)}`); } if (this.opts.metadataUrl) { @@ -160,9 +169,12 @@ export class DockerWorkloadManager implements WorkloadManager { imageArchitecture: inspectResult?.Architecture, }); + // Get auth config (static or ECR) + const authConfig = await this.getAuthConfig(); + // Ensure the image is present const [createImageError, imageResponseReader] = await tryCatch( - this.docker.createImage(this.auth, { + this.docker.createImage(authConfig, { fromImage: imageRef, ...(this.platformOverride ? { platform: this.platformOverride } : {}), }) @@ -216,6 +228,26 @@ export class DockerWorkloadManager implements WorkloadManager { logger.debug("create succeeded", { startResult, containerId: container.id }); } + /** + * Get authentication config for Docker operations + * Uses static credentials if available, otherwise attempts ECR auth + */ + private async getAuthConfig(): Promise { + // Use static credentials if available + if (this.staticAuth) { + return this.staticAuth; + } + + // Use ECR auth if service is available + if (this.ecrAuthService) { + const ecrAuth = await this.ecrAuthService.getAuthConfig(); + return ecrAuth || undefined; + } + + // No auth available + return undefined; + } + private async attachContainerToNetworks({ containerId, networkNames, diff --git a/apps/supervisor/src/workloadManager/ecrAuth.ts b/apps/supervisor/src/workloadManager/ecrAuth.ts new file mode 100644 index 0000000000..33e98f6319 --- /dev/null +++ b/apps/supervisor/src/workloadManager/ecrAuth.ts @@ -0,0 +1,144 @@ +import { ECRClient, GetAuthorizationTokenCommand } from "@aws-sdk/client-ecr"; +import { SimpleStructuredLogger } from "@trigger.dev/core/v3/utils/structuredLogger"; +import { tryCatch } from "@trigger.dev/core"; +import Docker from "dockerode"; + +interface ECRTokenCache { + token: string; + username: string; + serverAddress: string; + expiresAt: Date; +} + +export class ECRAuthService { + private readonly logger = new SimpleStructuredLogger("ecr-auth-service"); + private readonly ecrClient: ECRClient; + private tokenCache: ECRTokenCache | null = null; + + constructor() { + this.ecrClient = new ECRClient(); + + this.logger.info("๐Ÿ” ECR Auth Service initialized", { + region: this.ecrClient.config.region, + }); + } + + /** + * Check if we have AWS credentials configured + */ + static hasAWSCredentials(): boolean { + if (process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY) { + return true; + } + + if ( + process.env.AWS_PROFILE || + process.env.AWS_ROLE_ARN || + process.env.AWS_WEB_IDENTITY_TOKEN_FILE + ) { + return true; + } + + return false; + } + + /** + * Check if the current token is still valid with a 10-minute buffer + */ + private isTokenValid(): boolean { + if (!this.tokenCache) { + return false; + } + + const now = new Date(); + const bufferMs = 10 * 60 * 1000; // 10 minute buffer before expiration + return now < new Date(this.tokenCache.expiresAt.getTime() - bufferMs); + } + + /** + * Get a fresh ECR authorization token from AWS + */ + private async fetchNewToken(): Promise { + const [error, response] = await tryCatch( + this.ecrClient.send(new GetAuthorizationTokenCommand({})) + ); + + if (error) { + this.logger.error("Failed to get ECR authorization token", { error }); + return null; + } + + const authData = response.authorizationData?.[0]; + if (!authData?.authorizationToken || !authData.proxyEndpoint) { + this.logger.error("Invalid ECR authorization response", { authData }); + return null; + } + + // Decode the base64 token to get username:password + const decoded = Buffer.from(authData.authorizationToken, "base64").toString("utf-8"); + const [username, password] = decoded.split(":", 2); + + if (!username || !password) { + this.logger.error("Failed to parse ECR authorization token"); + return null; + } + + const expiresAt = authData.expiresAt || new Date(Date.now() + 12 * 60 * 60 * 1000); // Default 12 hours + + const tokenCache: ECRTokenCache = { + token: password, + username, + serverAddress: authData.proxyEndpoint, + expiresAt, + }; + + this.logger.info("๐Ÿ” Successfully fetched ECR token", { + username, + serverAddress: authData.proxyEndpoint, + expiresAt: expiresAt.toISOString(), + }); + + return tokenCache; + } + + /** + * Get ECR auth config for Docker operations + * Returns cached token if valid, otherwise fetches a new one + */ + async getAuthConfig(): Promise { + // Check if cached token is still valid + if (this.isTokenValid()) { + this.logger.debug("Using cached ECR token"); + return { + username: this.tokenCache!.username, + password: this.tokenCache!.token, + serveraddress: this.tokenCache!.serverAddress, + }; + } + + // Fetch new token + this.logger.info("Fetching new ECR authorization token"); + const newToken = await this.fetchNewToken(); + + if (!newToken) { + return null; + } + + // Cache the new token + this.tokenCache = newToken; + + return { + username: newToken.username, + password: newToken.token, + serveraddress: newToken.serverAddress, + }; + } + + /** + * Clear the cached token (useful for testing or forcing refresh) + */ + clearCache(): void { + this.tokenCache = null; + this.logger.debug("ECR token cache cleared"); + } +} diff --git a/apps/supervisor/src/workloadManager/kubernetes.ts b/apps/supervisor/src/workloadManager/kubernetes.ts index d596e18ed6..96fbd7a274 100644 --- a/apps/supervisor/src/workloadManager/kubernetes.ts +++ b/apps/supervisor/src/workloadManager/kubernetes.ts @@ -4,7 +4,8 @@ import { type WorkloadManagerCreateOptions, type WorkloadManagerOptions, } from "./types.js"; -import type { EnvironmentType, MachinePreset } from "@trigger.dev/core/v3"; +import type { EnvironmentType, MachinePreset, PlacementTag } from "@trigger.dev/core/v3"; +import { PlacementTagProcessor } from "@trigger.dev/core/v3/serverOnly"; import { env } from "../env.js"; import { type K8sApi, createK8sApi, type k8s } from "../clients/kubernetes.js"; import { getRunnerId } from "../util.js"; @@ -17,9 +18,21 @@ export class KubernetesWorkloadManager implements WorkloadManager { private readonly logger = new SimpleStructuredLogger("kubernetes-workload-provider"); private k8s: K8sApi; private namespace = env.KUBERNETES_NAMESPACE; + private placementTagProcessor: PlacementTagProcessor; + + // Resource settings + private readonly cpuRequestMinCores = env.KUBERNETES_CPU_REQUEST_MIN_CORES; + private readonly cpuRequestRatio = env.KUBERNETES_CPU_REQUEST_RATIO; + private readonly memoryRequestMinGb = env.KUBERNETES_MEMORY_REQUEST_MIN_GB; + private readonly memoryRequestRatio = env.KUBERNETES_MEMORY_REQUEST_RATIO; + private readonly memoryOverheadGb = env.KUBERNETES_MEMORY_OVERHEAD_GB; constructor(private opts: WorkloadManagerOptions) { this.k8s = createK8sApi(); + this.placementTagProcessor = new PlacementTagProcessor({ + enabled: env.PLACEMENT_TAGS_ENABLED, + prefix: env.PLACEMENT_TAGS_PREFIX, + }); if (opts.workloadApiDomain) { this.logger.warn("[KubernetesWorkloadManager] โš ๏ธ Custom workload API domain", { @@ -28,6 +41,39 @@ export class KubernetesWorkloadManager implements WorkloadManager { } } + private addPlacementTags( + podSpec: Omit, + placementTags?: PlacementTag[] + ): Omit { + const nodeSelector = this.placementTagProcessor.convertToNodeSelector( + placementTags, + podSpec.nodeSelector + ); + + return { + ...podSpec, + nodeSelector, + }; + } + + private stripImageDigest(imageRef: string): string { + if (!env.KUBERNETES_STRIP_IMAGE_DIGEST) { + return imageRef; + } + + const atIndex = imageRef.lastIndexOf("@"); + + if (atIndex === -1) { + return imageRef; + } + + return imageRef.substring(0, atIndex); + } + + private clamp(value: number, min: number, max: number): number { + return Math.min(Math.max(value, min), max); + } + async create(opts: WorkloadManagerCreateOptions) { this.logger.log("[KubernetesWorkloadManager] Creating container", { opts }); @@ -48,12 +94,12 @@ export class KubernetesWorkloadManager implements WorkloadManager { }, }, spec: { - ...this.#defaultPodSpec, + ...this.addPlacementTags(this.#defaultPodSpec, opts.placementTags), terminationGracePeriodSeconds: 60 * 60, containers: [ { name: "run-controller", - image: opts.image, + image: this.stripImageDigest(opts.image), ports: [ { containerPort: 8000, @@ -77,6 +123,14 @@ export class KubernetesWorkloadManager implements WorkloadManager { name: "TRIGGER_ENV_ID", value: opts.envId, }, + { + name: "TRIGGER_DEPLOYMENT_ID", + value: opts.deploymentFriendlyId, + }, + { + name: "TRIGGER_DEPLOYMENT_VERSION", + value: opts.deploymentVersion, + }, { name: "TRIGGER_SNAPSHOT_ID", value: opts.snapshotFriendlyId, @@ -228,9 +282,18 @@ export class KubernetesWorkloadManager implements WorkloadManager { restartPolicy: "Never", automountServiceAccountToken: false, imagePullSecrets: this.getImagePullSecrets(), - nodeSelector: { - nodetype: env.KUBERNETES_WORKER_NODETYPE_LABEL, - }, + ...(env.KUBERNETES_SCHEDULER_NAME + ? { + schedulerName: env.KUBERNETES_SCHEDULER_NAME, + } + : {}), + ...(env.KUBERNETES_WORKER_NODETYPE_LABEL + ? { + nodeSelector: { + nodetype: env.KUBERNETES_WORKER_NODETYPE_LABEL, + }, + } + : {}), }; } @@ -252,20 +315,32 @@ export class KubernetesWorkloadManager implements WorkloadManager { envtype: this.#envTypeToLabelValue(opts.envType), org: opts.orgId, project: opts.projectId, + machine: opts.machine.name, }; } #getResourceRequestsForMachine(preset: MachinePreset): ResourceQuantities { + const cpuRequest = preset.cpu * this.cpuRequestRatio; + const memoryRequest = preset.memory * this.memoryRequestRatio; + + // Clamp between min and max + const clampedCpu = this.clamp(cpuRequest, this.cpuRequestMinCores, preset.cpu); + const clampedMemory = this.clamp(memoryRequest, this.memoryRequestMinGb, preset.memory); + return { - cpu: `${preset.cpu * 0.75}`, - memory: `${preset.memory}G`, + cpu: `${clampedCpu}`, + memory: `${clampedMemory}G`, }; } #getResourceLimitsForMachine(preset: MachinePreset): ResourceQuantities { + const memoryLimit = this.memoryOverheadGb + ? preset.memory + this.memoryOverheadGb + : preset.memory; + return { cpu: `${preset.cpu}`, - memory: `${preset.memory}G`, + memory: `${memoryLimit}G`, }; } diff --git a/apps/supervisor/src/workloadManager/types.ts b/apps/supervisor/src/workloadManager/types.ts index b3cd418f1e..90b6195779 100644 --- a/apps/supervisor/src/workloadManager/types.ts +++ b/apps/supervisor/src/workloadManager/types.ts @@ -1,4 +1,4 @@ -import { type EnvironmentType, type MachinePreset } from "@trigger.dev/core/v3"; +import type { EnvironmentType, MachinePreset, PlacementTag } from "@trigger.dev/core/v3"; export interface WorkloadManagerOptions { workloadApiProtocol: "http" | "https"; @@ -23,11 +23,14 @@ export interface WorkloadManagerCreateOptions { version: string; nextAttemptNumber?: number; dequeuedAt: Date; + placementTags?: PlacementTag[]; // identifiers envId: string; envType: EnvironmentType; orgId: string; projectId: string; + deploymentFriendlyId: string; + deploymentVersion: string; runId: string; runFriendlyId: string; snapshotId: string; diff --git a/apps/supervisor/src/workloadServer/index.ts b/apps/supervisor/src/workloadServer/index.ts index e7e391bce3..35d53d3609 100644 --- a/apps/supervisor/src/workloadServer/index.ts +++ b/apps/supervisor/src/workloadServer/index.ts @@ -16,7 +16,6 @@ import { type WorkloadRunAttemptCompleteResponseBody, WorkloadRunAttemptStartRequestBody, type WorkloadRunAttemptStartResponseBody, - type WorkloadRunLatestSnapshotResponseBody, WorkloadRunSnapshotsSinceResponseBody, type WorkloadServerToClientEvents, type WorkloadSuspendRunResponseBody, @@ -126,7 +125,7 @@ export class WorkloadServer extends EventEmitter { } private createHttpServer({ host, port }: { host: string; port: number }) { - return new HttpServer({ + const httpServer = new HttpServer({ port, host, metrics: { @@ -322,28 +321,6 @@ export class WorkloadServer extends EventEmitter { }, } ) - .route("/api/v1/workload-actions/runs/:runFriendlyId/snapshots/latest", "GET", { - paramsSchema: WorkloadActionParams.pick({ runFriendlyId: true }), - handler: async ({ req, reply, params }) => { - const latestSnapshotResponse = await this.workerClient.getLatestSnapshot( - params.runFriendlyId, - this.runnerIdFromRequest(req) - ); - - if (!latestSnapshotResponse.success) { - this.logger.error("Failed to get latest snapshot", { - runId: params.runFriendlyId, - error: latestSnapshotResponse.error, - }); - reply.empty(500); - return; - } - - reply.json({ - execution: latestSnapshotResponse.data.execution, - } satisfies WorkloadRunLatestSnapshotResponseBody); - }, - }) .route( "/api/v1/workload-actions/runs/:runFriendlyId/snapshots/since/:snapshotFriendlyId", "GET", @@ -369,23 +346,6 @@ export class WorkloadServer extends EventEmitter { }, } ) - .route("/api/v1/workload-actions/runs/:runFriendlyId/logs/debug", "POST", { - paramsSchema: WorkloadActionParams.pick({ runFriendlyId: true }), - bodySchema: WorkloadDebugLogRequestBody, - handler: async ({ req, reply, params, body }) => { - reply.empty(204); - - if (!env.SEND_RUN_DEBUG_LOGS) { - return; - } - - await this.workerClient.sendDebugLog( - params.runFriendlyId, - body, - this.runnerIdFromRequest(req) - ); - }, - }) .route("/api/v1/workload-actions/deployments/:deploymentId/dequeue", "GET", { paramsSchema: z.object({ deploymentId: z.string(), @@ -410,6 +370,31 @@ export class WorkloadServer extends EventEmitter { reply.json(dequeueResponse.data satisfies WorkloadDequeueFromVersionResponseBody); }, }); + + if (env.SEND_RUN_DEBUG_LOGS) { + httpServer.route("/api/v1/workload-actions/runs/:runFriendlyId/logs/debug", "POST", { + paramsSchema: WorkloadActionParams.pick({ runFriendlyId: true }), + bodySchema: WorkloadDebugLogRequestBody, + handler: async ({ req, reply, params, body }) => { + reply.empty(204); + + await this.workerClient.sendDebugLog( + params.runFriendlyId, + body, + this.runnerIdFromRequest(req) + ); + }, + }); + } else { + // Lightweight mock route without schemas + httpServer.route("/api/v1/workload-actions/runs/:runFriendlyId/logs/debug", "POST", { + handler: async ({ reply }) => { + reply.empty(204); + }, + }); + } + + return httpServer; } private createWebsocketServer() { diff --git a/apps/webapp/.env b/apps/webapp/.env new file mode 120000 index 0000000000..c7360fb82d --- /dev/null +++ b/apps/webapp/.env @@ -0,0 +1 @@ +../../.env \ No newline at end of file diff --git a/apps/webapp/.gitignore b/apps/webapp/.gitignore index 8b81451ead..595ab180e1 100644 --- a/apps/webapp/.gitignore +++ b/apps/webapp/.gitignore @@ -9,7 +9,8 @@ node_modules /app/styles/tailwind.css - +# Ensure the .env symlink is not removed by accident +!.env # Storybook build outputs build-storybook.log @@ -18,4 +19,5 @@ build-storybook.log storybook-static /prisma/seed.js -/prisma/populate.js \ No newline at end of file +/prisma/populate.js +.memory-snapshots \ No newline at end of file diff --git a/apps/webapp/app/api/versions.ts b/apps/webapp/app/api/versions.ts new file mode 100644 index 0000000000..250d214b07 --- /dev/null +++ b/apps/webapp/app/api/versions.ts @@ -0,0 +1,57 @@ +import { + API_VERSION_HEADER_NAME, + API_VERSION as CORE_API_VERSION, +} from "@trigger.dev/core/v3/serverOnly"; +import { z } from "zod"; + +export const CURRENT_API_VERSION = CORE_API_VERSION; + +export const NON_SPECIFIC_API_VERSION = "none"; + +export type API_VERSIONS = typeof CURRENT_API_VERSION | typeof NON_SPECIFIC_API_VERSION; + +export function getApiVersion(request: Request): API_VERSIONS { + const apiVersion = request.headers.get(API_VERSION_HEADER_NAME); + + if (apiVersion === CURRENT_API_VERSION) { + return apiVersion; + } + + return NON_SPECIFIC_API_VERSION; +} + +// This has been copied from the core package to allow us to use these types in the webapp +export const RunStatusUnspecifiedApiVersion = z.enum([ + /// Task is waiting for a version update because it cannot execute without additional information (task, queue, etc.). Replaces WAITING_FOR_DEPLOY + "PENDING_VERSION", + /// Task hasn't been deployed yet but is waiting to be executed + "WAITING_FOR_DEPLOY", + /// Task is waiting to be executed by a worker + "QUEUED", + /// Task is currently being executed by a worker + "EXECUTING", + /// Task has failed and is waiting to be retried + "REATTEMPTING", + /// Task has been paused by the system, and will be resumed by the system + "FROZEN", + /// Task has been completed successfully + "COMPLETED", + /// Task has been canceled by the user + "CANCELED", + /// Task has been completed with errors + "FAILED", + /// Task has crashed and won't be retried, most likely the worker ran out of resources, e.g. memory or storage + "CRASHED", + /// Task was interrupted during execution, mostly this happens in development environments + "INTERRUPTED", + /// Task has failed to complete, due to an error in the system + "SYSTEM_FAILURE", + /// Task has been scheduled to run at a specific time + "DELAYED", + /// Task has expired and won't be executed + "EXPIRED", + /// Task has reached it's maxDuration and has been stopped + "TIMED_OUT", +]); + +export type RunStatusUnspecifiedApiVersion = z.infer; diff --git a/apps/webapp/app/assets/icons/BunLogoIcon.tsx b/apps/webapp/app/assets/icons/BunLogoIcon.tsx new file mode 100644 index 0000000000..b7357189f7 --- /dev/null +++ b/apps/webapp/app/assets/icons/BunLogoIcon.tsx @@ -0,0 +1,94 @@ +export function BunLogoIcon({ className }: { className?: string }) { + return ( + + + + + + + + + + + + + + + + + + + + + + + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/ClockRotateLeftIcon.tsx b/apps/webapp/app/assets/icons/ClockRotateLeftIcon.tsx new file mode 100644 index 0000000000..edef4f87b7 --- /dev/null +++ b/apps/webapp/app/assets/icons/ClockRotateLeftIcon.tsx @@ -0,0 +1,15 @@ +export function ClockRotateLeftIcon({ className }: { className?: string }) { + return ( + + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/CloudProviderIcon.tsx b/apps/webapp/app/assets/icons/CloudProviderIcon.tsx new file mode 100644 index 0000000000..6c16252824 --- /dev/null +++ b/apps/webapp/app/assets/icons/CloudProviderIcon.tsx @@ -0,0 +1,76 @@ +export function CloudProviderIcon({ + provider, + className, +}: { + provider: "aws" | "digitalocean" | (string & {}); + className?: string; +}) { + switch (provider) { + case "aws": + return ; + case "digitalocean": + return ; + default: + return null; + } +} + +export function AWS({ className }: { className?: string }) { + return ( + + + + + + ); +} + +export function DigitalOcean({ className }: { className?: string }) { + return ( + + + + + + + + + + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/ConcurrencyIcon.tsx b/apps/webapp/app/assets/icons/ConcurrencyIcon.tsx new file mode 100644 index 0000000000..710ba4e6fa --- /dev/null +++ b/apps/webapp/app/assets/icons/ConcurrencyIcon.tsx @@ -0,0 +1,13 @@ +export function ConcurrencyIcon({ className }: { className?: string }) { + return ( + + + + + + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/ListBulletIcon.tsx b/apps/webapp/app/assets/icons/ListBulletIcon.tsx new file mode 100644 index 0000000000..3ca7636a90 --- /dev/null +++ b/apps/webapp/app/assets/icons/ListBulletIcon.tsx @@ -0,0 +1,30 @@ +export function ListBulletIcon({ className }: { className?: string }) { + return ( + + + + + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/ListCheckedIcon.tsx b/apps/webapp/app/assets/icons/ListCheckedIcon.tsx new file mode 100644 index 0000000000..29cb828f5d --- /dev/null +++ b/apps/webapp/app/assets/icons/ListCheckedIcon.tsx @@ -0,0 +1,48 @@ +export function ListCheckedIcon({ className }: { className?: string }) { + return ( + + + + + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/MachineIcon.tsx b/apps/webapp/app/assets/icons/MachineIcon.tsx new file mode 100644 index 0000000000..f07e7467b0 --- /dev/null +++ b/apps/webapp/app/assets/icons/MachineIcon.tsx @@ -0,0 +1,221 @@ +import { cn } from "~/utils/cn"; + +export function MachineIcon({ preset, className }: { preset?: string; className?: string }) { + if (!preset) { + return ; + } + + switch (preset) { + case "no-machine": + return ; + case "micro": + return ; + case "small-1x": + return ; + case "small-2x": + return ; + case "medium-1x": + return ; + case "medium-2x": + return ; + case "large-1x": + return ; + case "large-2x": + return ; + default: + return ; + } +} + +export function MachineDefaultIcon({ className }: { className?: string }) { + return ( + + + + + + ); +} + +function MachineIconNoMachine({ className }: { className?: string }) { + return ( + + + + + + + ); +} + +function MachineIconMicro({ className }: { className?: string }) { + return ( + + + + + + ); +} + +function MachineIconSmall1x({ className }: { className?: string }) { + return ( + + + + + + ); +} + +function MachineIconSmall2x({ className }: { className?: string }) { + return ( + + + + + + ); +} + +function MachineIconMedium1x({ className }: { className?: string }) { + return ( + + + + + + ); +} + +function MachineIconMedium2x({ className }: { className?: string }) { + return ( + + + + + + ); +} + +function MachineIconLarge1x({ className }: { className?: string }) { + return ( + + + + + + ); +} + +function MachineIconLarge2x({ className }: { className?: string }) { + return ( + + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/MoveToBottomIcon.tsx b/apps/webapp/app/assets/icons/MoveToBottomIcon.tsx new file mode 100644 index 0000000000..997550e926 --- /dev/null +++ b/apps/webapp/app/assets/icons/MoveToBottomIcon.tsx @@ -0,0 +1,27 @@ +export function MoveToBottomIcon({ className }: { className?: string }) { + return ( + + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/MoveToTopIcon.tsx b/apps/webapp/app/assets/icons/MoveToTopIcon.tsx new file mode 100644 index 0000000000..46938fd391 --- /dev/null +++ b/apps/webapp/app/assets/icons/MoveToTopIcon.tsx @@ -0,0 +1,34 @@ +export function MoveToTopIcon({ className }: { className?: string }) { + return ( + + + + + + + + + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/MoveUpIcon.tsx b/apps/webapp/app/assets/icons/MoveUpIcon.tsx new file mode 100644 index 0000000000..6e5d8a84ba --- /dev/null +++ b/apps/webapp/app/assets/icons/MoveUpIcon.tsx @@ -0,0 +1,41 @@ +export function MoveUpIcon({ className }: { className?: string }) { + return ( + + + + + + + + + + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/NodejsLogoIcon.tsx b/apps/webapp/app/assets/icons/NodejsLogoIcon.tsx new file mode 100644 index 0000000000..234dd079e1 --- /dev/null +++ b/apps/webapp/app/assets/icons/NodejsLogoIcon.tsx @@ -0,0 +1,15 @@ +export function NodejsLogoIcon({ className }: { className?: string }) { + return ( + + + + + + + + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/RegionIcons.tsx b/apps/webapp/app/assets/icons/RegionIcons.tsx new file mode 100644 index 0000000000..098d5bc98c --- /dev/null +++ b/apps/webapp/app/assets/icons/RegionIcons.tsx @@ -0,0 +1,106 @@ +export function FlagIcon({ + region, + className, +}: { + region: "usa" | "europe" | (string & {}); + className?: string; +}) { + switch (region) { + case "usa": + return ; + case "europe": + return ; + default: + return null; + } +} + +export function FlagUSA({ className }: { className?: string }) { + return ( + + + + + + + + + + + + + + + + + + ); +} + +export function FlagEurope({ className }: { className?: string }) { + return ( + + + + + + + + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/SnakedArrowIcon.tsx b/apps/webapp/app/assets/icons/SnakedArrowIcon.tsx new file mode 100644 index 0000000000..0766cce1b4 --- /dev/null +++ b/apps/webapp/app/assets/icons/SnakedArrowIcon.tsx @@ -0,0 +1,20 @@ +export function SnakedArrowIcon({ className }: { className?: string }) { + return ( + + + + + ); +} diff --git a/apps/webapp/app/assets/icons/StreamsIcon.tsx b/apps/webapp/app/assets/icons/StreamsIcon.tsx new file mode 100644 index 0000000000..73cc480f4d --- /dev/null +++ b/apps/webapp/app/assets/icons/StreamsIcon.tsx @@ -0,0 +1,10 @@ +export function StreamsIcon({ className }: { className?: string }) { + return ( + + + + + + ); +} + diff --git a/apps/webapp/app/assets/images/blurred-dashboard-background-menu-bottom.jpg b/apps/webapp/app/assets/images/blurred-dashboard-background-menu-bottom.jpg new file mode 100644 index 0000000000..2a993f8212 Binary files /dev/null and b/apps/webapp/app/assets/images/blurred-dashboard-background-menu-bottom.jpg differ diff --git a/apps/webapp/app/assets/images/blurred-dashboard-background-menu-top.jpg b/apps/webapp/app/assets/images/blurred-dashboard-background-menu-top.jpg new file mode 100644 index 0000000000..8aca8563cd Binary files /dev/null and b/apps/webapp/app/assets/images/blurred-dashboard-background-menu-top.jpg differ diff --git a/apps/webapp/app/assets/images/blurred-dashboard-background-table.jpg b/apps/webapp/app/assets/images/blurred-dashboard-background-table.jpg new file mode 100644 index 0000000000..a2ae4029fe Binary files /dev/null and b/apps/webapp/app/assets/images/blurred-dashboard-background-table.jpg differ diff --git a/apps/webapp/app/assets/images/open-bulk-actions-panel.png b/apps/webapp/app/assets/images/open-bulk-actions-panel.png new file mode 100644 index 0000000000..a1b48f3864 Binary files /dev/null and b/apps/webapp/app/assets/images/open-bulk-actions-panel.png differ diff --git a/apps/webapp/app/assets/images/select-runs-individually.png b/apps/webapp/app/assets/images/select-runs-individually.png new file mode 100644 index 0000000000..31a5d048a8 Binary files /dev/null and b/apps/webapp/app/assets/images/select-runs-individually.png differ diff --git a/apps/webapp/app/assets/images/select-runs-using-filters.png b/apps/webapp/app/assets/images/select-runs-using-filters.png new file mode 100644 index 0000000000..78ce487d0f Binary files /dev/null and b/apps/webapp/app/assets/images/select-runs-using-filters.png differ diff --git a/apps/webapp/app/assets/logos/GoogleLogo.tsx b/apps/webapp/app/assets/logos/GoogleLogo.tsx new file mode 100644 index 0000000000..e0ff9597f0 --- /dev/null +++ b/apps/webapp/app/assets/logos/GoogleLogo.tsx @@ -0,0 +1,22 @@ +export function GoogleLogo({ className }: { className?: string }) { + return ( + + + + + + + ); +} diff --git a/apps/webapp/app/components/BackgroundWrapper.tsx b/apps/webapp/app/components/BackgroundWrapper.tsx new file mode 100644 index 0000000000..ecff3af6dd --- /dev/null +++ b/apps/webapp/app/components/BackgroundWrapper.tsx @@ -0,0 +1,48 @@ +import { type ReactNode } from "react"; +import blurredDashboardBackgroundMenuTop from "~/assets/images/blurred-dashboard-background-menu-top.jpg"; +import blurredDashboardBackgroundMenuBottom from "~/assets/images/blurred-dashboard-background-menu-bottom.jpg"; +import blurredDashboardBackgroundTable from "~/assets/images/blurred-dashboard-background-table.jpg"; + +export function BackgroundWrapper({ children }: { children: ReactNode }) { + return ( +
+ {/* Left menu top background - fixed width 260px, maintains aspect ratio */} +
+ + {/* Left menu bottom background - fixed width 260px, maintains aspect ratio */} +
+ + {/* Right table background - fixed width 2000px, positioned next to menu */} +
+ + {/* Content layer */} +
{children}
+
+ ); +} diff --git a/apps/webapp/app/components/BlankStatePanels.tsx b/apps/webapp/app/components/BlankStatePanels.tsx index f3a4b3faa5..d0e798f168 100644 --- a/apps/webapp/app/components/BlankStatePanels.tsx +++ b/apps/webapp/app/components/BlankStatePanels.tsx @@ -5,27 +5,36 @@ import { ChatBubbleLeftRightIcon, ClockIcon, PlusIcon, + QuestionMarkCircleIcon, RectangleGroupIcon, RectangleStackIcon, ServerStackIcon, Squares2X2Icon, } from "@heroicons/react/20/solid"; import { useLocation } from "react-use"; -import { TaskIcon } from "~/assets/icons/TaskIcon"; +import { BranchEnvironmentIconSmall } from "~/assets/icons/EnvironmentIcons"; +import { WaitpointTokenIcon } from "~/assets/icons/WaitpointTokenIcon"; +import openBulkActionsPanel from "~/assets/images/open-bulk-actions-panel.png"; +import selectRunsIndividually from "~/assets/images/select-runs-individually.png"; +import selectRunsUsingFilters from "~/assets/images/select-runs-using-filters.png"; import { useEnvironment } from "~/hooks/useEnvironment"; +import { useFeatures } from "~/hooks/useFeatures"; import { useOrganization } from "~/hooks/useOrganizations"; import { useProject } from "~/hooks/useProject"; import { type MinimumEnvironment } from "~/presenters/SelectBestEnvironmentPresenter.server"; +import { NewBranchPanel } from "~/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.branches/route"; import { docsPath, v3BillingPath, + v3CreateBulkActionPath, v3EnvironmentPath, v3EnvironmentVariablesPath, v3NewProjectAlertPath, v3NewSchedulePath, } from "~/utils/pathBuilder"; +import { AskAI } from "./AskAI"; import { InlineCode } from "./code/InlineCode"; -import { environmentFullTitle } from "./environments/EnvironmentLabel"; +import { environmentFullTitle, EnvironmentIcon } from "./environments/EnvironmentLabel"; import { Feedback } from "./Feedback"; import { EnvironmentSelector } from "./navigation/EnvironmentSelector"; import { Button, LinkButton } from "./primitives/Buttons"; @@ -34,14 +43,22 @@ import { InfoPanel } from "./primitives/InfoPanel"; import { Paragraph } from "./primitives/Paragraph"; import { StepNumber } from "./primitives/StepNumber"; import { TextLink } from "./primitives/TextLink"; -import { InitCommandV3, PackageManagerProvider, TriggerDevStepV3 } from "./SetupCommands"; +import { SimpleTooltip } from "./primitives/Tooltip"; +import { + InitCommandV3, + PackageManagerProvider, + TriggerDeployStep, + TriggerDevStepV3, +} from "./SetupCommands"; import { StepContentContainer } from "./StepContentContainer"; -import { WaitpointTokenIcon } from "~/assets/icons/WaitpointTokenIcon"; -import { BranchEnvironmentIconSmall } from "~/assets/icons/EnvironmentIcons"; -import { useFeatures } from "~/hooks/useFeatures"; -import { DialogContent, DialogTrigger, Dialog } from "./primitives/Dialog"; import { V4Badge } from "./V4Badge"; -import { NewBranchPanel } from "~/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.branches/route"; +import { + ClientTabs, + ClientTabsContent, + ClientTabsList, + ClientTabsTrigger, +} from "./primitives/ClientTabs"; +import { GitHubSettingsPanel } from "~/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.github"; export function HasNoTasksDev() { return ( @@ -83,28 +100,7 @@ export function HasNoTasksDev() { } export function HasNoTasksDeployed({ environment }: { environment: MinimumEnvironment }) { - return ( - - How to deploy tasks - - } - > - - Run the CLI deploy command to - deploy your tasks to the {environmentFullTitle(environment)} environment. - - - ); + return ; } export function SchedulesNoPossibleTaskPanel() { @@ -222,45 +218,7 @@ export function TestHasNoTasks() { } export function DeploymentsNone() { - const organization = useOrganization(); - const project = useProject(); - const environment = useEnvironment(); - - return ( - - - There are several ways to deploy your tasks. You can use the CLI or a Continuous Integration - service like GitHub Actions. Make sure you{" "} - - set your environment variables - {" "} - first. - -
- - Deploy with the CLI - - - Deploy with GitHub actions - -
-
- ); + return ; } export function DeploymentsNoneDev() { @@ -269,46 +227,52 @@ export function DeploymentsNoneDev() { const environment = useEnvironment(); return ( -
- - + <> +
+
+ + Deploy your tasks +
+
+ + } + content="Deploy docs" + /> + + } + content="Troubleshooting docs" + /> + +
+
+ + + This is the Development environment. When you're ready to deploy your tasks, switch to a different environment. - - There are several ways to deploy your tasks. You can use the CLI or a Continuous - Integration service like GitHub Actions. Make sure you{" "} - - set your environment variables - {" "} - first. - -
- - Deploy with the CLI - - - Deploy with GitHub actions - -
-
- -
+ + + ); } @@ -474,6 +438,10 @@ export function BranchesNoBranchableEnvironment() { Preview branches in Trigger.dev create isolated environments for testing new features before production. + + You must be on to access preview branches. Read our{" "} + upgrade to v4 guide to learn more. + ); } @@ -569,3 +537,152 @@ export function SwitcherPanel({ title = "Switch to a deployed environment" }: {
); } + +export function BulkActionsNone() { + const organization = useOrganization(); + const project = useProject(); + const environment = useEnvironment(); + + return ( +
+
+ Create a bulk action +
+ + New bulk action + +
+
+ + + Select runs from the runs page individually. +
+ Select runs individually +
+
+
+
+ + OR + +
+
+ + + + Use the filter menu on the runs page to select just the runs you want to bulk action. + +
+ Select runs using filters +
+
+ + + Click the โ€œBulk actionsโ€ button in the top right of the runs page. +
+ Open the bulk action panel +
+
+
+ ); +} + +function DeploymentOnboardingSteps() { + const environment = useEnvironment(); + const organization = useOrganization(); + const project = useProject(); + + return ( + +
+
+ + Deploy your tasks to {environmentFullTitle(environment)} +
+
+ + } + content="Deploy docs" + /> + + } + content="Troubleshooting docs" + /> + +
+
+ + + + GitHub + + + Manual + + + GitHub Actions + + + + + + + Deploy automatically with every push. Read the{" "} + full guide. + +
+ +
+
+
+ + + + + This will deploy your tasks to the {environmentFullTitle(environment)} environment. + Read the full guide. + + + + + + + + + Read the GitHub Actions guide to + get started. + + + +
+ + + + This page will automatically refresh when your tasks are deployed. + +
+ ); +} diff --git a/apps/webapp/app/components/BulkActionFilterSummary.tsx b/apps/webapp/app/components/BulkActionFilterSummary.tsx new file mode 100644 index 0000000000..073940d7d0 --- /dev/null +++ b/apps/webapp/app/components/BulkActionFilterSummary.tsx @@ -0,0 +1,268 @@ +import { z } from "zod"; +import { + filterIcon, + filterTitle, + type TaskRunListSearchFilterKey, + type TaskRunListSearchFilters, +} from "./runs/v3/RunFilters"; +import { Paragraph } from "./primitives/Paragraph"; +import simplur from "simplur"; +import { appliedSummary, dateFromString, timeFilterRenderValues } from "./runs/v3/SharedFilters"; +import { formatNumber } from "~/utils/numberFormatter"; +import { SpinnerWhite } from "./primitives/Spinner"; +import { ArrowPathIcon, CheckIcon, XCircleIcon } from "@heroicons/react/20/solid"; +import { XCircleIcon as XCircleIconOutline } from "@heroicons/react/24/outline"; +import assertNever from "assert-never"; +import { AppliedFilter } from "./primitives/AppliedFilter"; +import { runStatusTitle } from "./runs/v3/TaskRunStatus"; +import type { TaskRunStatus } from "@trigger.dev/database"; + +export const BulkActionMode = z.union([z.literal("selected"), z.literal("filter")]); +export type BulkActionMode = z.infer; +export const BulkActionAction = z.union([z.literal("cancel"), z.literal("replay")]); +export type BulkActionAction = z.infer; + +export function BulkActionFilterSummary({ + selected, + final = false, + mode, + action, + filters, +}: { + selected?: number; + final?: boolean; + mode: BulkActionMode; + action: BulkActionAction; + filters: TaskRunListSearchFilters; +}) { + switch (mode) { + case "selected": + return ( + + You {!final ? "have " : " "}individually selected {simplur`${selected} run[|s]`} to be{" "} + . + + ); + case "filter": { + const { label, valueLabel, rangeType } = timeFilterRenderValues({ + from: filters.from ? dateFromString(`${filters.from}`) : undefined, + to: filters.to ? dateFromString(`${filters.to}`) : undefined, + period: filters.period, + }); + + return ( +
+ + You {!final ? "have " : " "}selected{" "} + + {final ? selected : } + {" "} + runs to be using these filters: + +
+ + {Object.entries(filters).map(([key, value]) => { + if (!value && key !== "period") { + return null; + } + + const typedKey = key as TaskRunListSearchFilterKey; + + switch (typedKey) { + case "cursor": + case "direction": + case "environments": + //We need to handle time differently because we have a default + case "period": + case "from": + case "to": { + return null; + } + case "tasks": { + const values = Array.isArray(value) ? value : [`${value}`]; + return ( + + ); + } + case "versions": { + const values = Array.isArray(value) ? value : [`${value}`]; + return ( + + ); + } + case "statuses": { + const values = Array.isArray(value) ? value : [`${value}`]; + return ( + runStatusTitle(v as TaskRunStatus)))} + removable={false} + /> + ); + } + case "tags": { + const values = Array.isArray(value) ? value : [`${value}`]; + return ( + + ); + } + case "bulkId": { + return ( + + ); + } + case "rootOnly": { + return ( + + ) : ( + + ) + } + removable={false} + /> + ); + } + case "runId": { + return ( + + ); + } + case "batchId": { + return ( + + ); + } + case "scheduleId": { + return ( + + ); + } + case "queues": { + const values = Array.isArray(value) ? value : [`${value}`]; + return ( + v.replace("task/", "")))} + removable={false} + /> + ); + } + case "machines": { + const values = Array.isArray(value) ? value : [`${value}`]; + return ( + + ); + } + default: { + assertNever(typedKey); + } + } + })} +
+
+ ); + } + } +} + +function Action({ action }: { action: BulkActionAction }) { + switch (action) { + case "cancel": + return ( + + + Canceled + + ); + case "replay": + return ( + + + Replayed + + ); + } +} + +export function EstimatedCount({ count }: { count?: number }) { + if (typeof count === "number") { + return <>~{formatNumber(count)}; + } + + return ; +} diff --git a/apps/webapp/app/components/CloudProvider.tsx b/apps/webapp/app/components/CloudProvider.tsx new file mode 100644 index 0000000000..acf8cff550 --- /dev/null +++ b/apps/webapp/app/components/CloudProvider.tsx @@ -0,0 +1,10 @@ +export function cloudProviderTitle(provider: "aws" | "digitalocean" | (string & {})) { + switch (provider) { + case "aws": + return "Amazon Web Services"; + case "digitalocean": + return "Digital Ocean"; + default: + return provider; + } +} diff --git a/apps/webapp/app/components/DefinitionTooltip.tsx b/apps/webapp/app/components/DefinitionTooltip.tsx index 0e2d4d4363..5bb3a71399 100644 --- a/apps/webapp/app/components/DefinitionTooltip.tsx +++ b/apps/webapp/app/components/DefinitionTooltip.tsx @@ -14,7 +14,7 @@ export function DefinitionTip({ return ( - + {children} diff --git a/apps/webapp/app/components/Feedback.tsx b/apps/webapp/app/components/Feedback.tsx index cba709aba4..ecfd4e88c9 100644 --- a/apps/webapp/app/components/Feedback.tsx +++ b/apps/webapp/app/components/Feedback.tsx @@ -2,7 +2,7 @@ import { conform, useForm } from "@conform-to/react"; import { parse } from "@conform-to/zod"; import { InformationCircleIcon, ArrowUpCircleIcon } from "@heroicons/react/20/solid"; import { EnvelopeIcon } from "@heroicons/react/24/solid"; -import { Form, useActionData, useLocation, useNavigation } from "@remix-run/react"; +import { Form, useActionData, useLocation, useNavigation, useSearchParams } from "@remix-run/react"; import { type ReactNode, useEffect, useState } from "react"; import { type FeedbackType, feedbackTypeLabel, schema } from "~/routes/resources.feedback"; import { Button } from "./primitives/Buttons"; @@ -23,10 +23,12 @@ import { DialogClose } from "@radix-ui/react-dialog"; type FeedbackProps = { button: ReactNode; defaultValue?: FeedbackType; + onOpenChange?: (open: boolean) => void; }; -export function Feedback({ button, defaultValue = "bug" }: FeedbackProps) { +export function Feedback({ button, defaultValue = "bug", onOpenChange }: FeedbackProps) { const [open, setOpen] = useState(false); + const [searchParams, setSearchParams] = useSearchParams(); const location = useLocation(); const lastSubmission = useActionData(); const navigation = useNavigation(); @@ -52,8 +54,26 @@ export function Feedback({ button, defaultValue = "bug" }: FeedbackProps) { } }, [navigation, form]); + // Handle URL param functionality + useEffect(() => { + const open = searchParams.get("feedbackPanel"); + if (open) { + setType(open as FeedbackType); + setOpen(true); + // Clone instead of mutating in place + const next = new URLSearchParams(searchParams); + next.delete("feedbackPanel"); + setSearchParams(next); + } + }, [searchParams]); + + const handleOpenChange = (value: boolean) => { + setOpen(value); + onOpenChange?.(value); + }; + return ( - + {button} Contact us diff --git a/apps/webapp/app/components/ImpersonationBanner.tsx b/apps/webapp/app/components/ImpersonationBanner.tsx index 1ef5bff641..a459f78a05 100644 --- a/apps/webapp/app/components/ImpersonationBanner.tsx +++ b/apps/webapp/app/components/ImpersonationBanner.tsx @@ -9,7 +9,7 @@ export function ImpersonationBanner() {
- +
; +export function AppContainer({ + children, + className, +}: { + children: React.ReactNode; + className?: string; +}) { + return ( +
+ {children} +
+ ); } export function MainBody({ children }: { children: React.ReactNode }) { diff --git a/apps/webapp/app/components/metrics/BigNumber.tsx b/apps/webapp/app/components/metrics/BigNumber.tsx index 7c4441be34..df3fa9e0a4 100644 --- a/apps/webapp/app/components/metrics/BigNumber.tsx +++ b/apps/webapp/app/components/metrics/BigNumber.tsx @@ -14,7 +14,7 @@ interface BigNumberProps { valueClassName?: string; defaultValue?: number; accessory?: ReactNode; - suffix?: string; + suffix?: ReactNode; suffixClassName?: string; compactThreshold?: number; } diff --git a/apps/webapp/app/components/navigation/AccountSideMenu.tsx b/apps/webapp/app/components/navigation/AccountSideMenu.tsx index 0c04044d91..30bf2c1925 100644 --- a/apps/webapp/app/components/navigation/AccountSideMenu.tsx +++ b/apps/webapp/app/components/navigation/AccountSideMenu.tsx @@ -1,8 +1,13 @@ -import { ShieldCheckIcon, UserCircleIcon } from "@heroicons/react/20/solid"; +import { LockClosedIcon, ShieldCheckIcon, UserCircleIcon } from "@heroicons/react/20/solid"; import { ArrowLeftIcon } from "@heroicons/react/24/solid"; -import { type User } from "@trigger.dev/database"; +import type { User } from "@trigger.dev/database"; import { cn } from "~/utils/cn"; -import { accountPath, personalAccessTokensPath, rootPath } from "~/utils/pathBuilder"; +import { + accountPath, + accountSecurityPath, + personalAccessTokensPath, + rootPath, +} from "~/utils/pathBuilder"; import { LinkButton } from "../primitives/Buttons"; import { SideMenuHeader } from "./SideMenuHeader"; import { SideMenuItem } from "./SideMenuItem"; @@ -42,6 +47,13 @@ export function AccountSideMenu({ user }: { user: User }) { to={personalAccessTokensPath()} data-action="tokens" /> +
diff --git a/apps/webapp/app/components/navigation/OrganizationSettingsSideMenu.tsx b/apps/webapp/app/components/navigation/OrganizationSettingsSideMenu.tsx index ad6543756a..7303142f3d 100644 --- a/apps/webapp/app/components/navigation/OrganizationSettingsSideMenu.tsx +++ b/apps/webapp/app/components/navigation/OrganizationSettingsSideMenu.tsx @@ -1,4 +1,5 @@ import { + BellAlertIcon, ChartBarIcon, Cog8ToothIcon, CreditCardIcon, @@ -12,6 +13,7 @@ import { organizationSettingsPath, organizationTeamPath, rootPath, + v3BillingAlertsPath, v3BillingPath, v3UsagePath, } from "~/utils/pathBuilder"; @@ -67,27 +69,34 @@ export function OrganizationSettingsSideMenu({
{isManagedCloud && ( - - )} - {isManagedCloud && ( - {currentPlan?.v3Subscription?.plan?.title} - ) : undefined - } - /> + <> + + {currentPlan?.v3Subscription?.plan?.title} + ) : undefined + } + /> + + )} { const handleScroll = () => { @@ -140,16 +148,18 @@ export function SideMenu({ >
- +
+ +
{isAdmin && !user.isImpersonating ? ( @@ -270,6 +280,13 @@ export function SideMenu({ + } /> + {isManagedCloud && ( + + )} + } + />
-
+ -
+
+ +
+
{organization.title}
@@ -584,8 +624,6 @@ function SelectorDivider() { } function HelpAndAI() { - const features = useFeatures(); - return ( <> diff --git a/apps/webapp/app/components/primitives/Accordion.tsx b/apps/webapp/app/components/primitives/Accordion.tsx index beb61b2012..c39a245b98 100644 --- a/apps/webapp/app/components/primitives/Accordion.tsx +++ b/apps/webapp/app/components/primitives/Accordion.tsx @@ -4,6 +4,7 @@ import * as React from "react"; import * as AccordionPrimitive from "@radix-ui/react-accordion"; import { ChevronDown } from "lucide-react"; import { cn } from "~/utils/cn"; +import { Icon, type RenderIcon } from "./Icon"; const Accordion = AccordionPrimitive.Root; @@ -13,26 +14,36 @@ const AccordionItem = React.forwardRef< >(({ className, ...props }, ref) => ( )); AccordionItem.displayName = "AccordionItem"; +type AccordionTriggerProps = React.ComponentPropsWithoutRef & { + leadingIcon?: RenderIcon; + leadingIconClassName?: string; +}; + const AccordionTrigger = React.forwardRef< React.ElementRef, - React.ComponentPropsWithoutRef ->(({ className, children, ...props }, ref) => ( + AccordionTriggerProps +>(({ className, children, leadingIcon, leadingIconClassName, ...props }, ref) => ( svg]:rotate-180", + "flex flex-1 items-center justify-between py-2 pl-2 pr-3 text-sm text-text-bright transition group-hover:border-grid-bright hover:bg-grid-dimmed [&[data-state=open]>svg]:rotate-180", className )} {...props} > - {children} +
+ {leadingIcon && ( + + )} +
{children}
+
diff --git a/apps/webapp/app/components/primitives/AppliedFilter.tsx b/apps/webapp/app/components/primitives/AppliedFilter.tsx index c67cc82a9e..b1ba1cb81e 100644 --- a/apps/webapp/app/components/primitives/AppliedFilter.tsx +++ b/apps/webapp/app/components/primitives/AppliedFilter.tsx @@ -1,21 +1,26 @@ import { XMarkIcon } from "@heroicons/react/20/solid"; -import { ReactNode } from "react"; +import { type ReactNode } from "react"; import { cn } from "~/utils/cn"; const variants = { + "secondary/small": { + box: "h-6 bg-secondary rounded pl-1.5 gap-1.5 text-xs divide-x divide-black/15 group-hover:bg-charcoal-600 group-hover:border-charcoal-550 text-text-bright border border-charcoal-600", + clear: "size-6 text-text-bright hover:text-text-bright transition-colors", + }, "tertiary/small": { box: "h-6 bg-tertiary rounded pl-1.5 gap-1.5 text-xs divide-x divide-black/15 group-hover:bg-charcoal-600", clear: "size-6 text-text-dimmed hover:text-text-bright transition-colors", }, - "minimal/small": { - box: "h-6 hover:bg-tertiary rounded pl-1.5 gap-1.5 text-xs", - clear: "size-6 text-text-dimmed hover:text-text-bright transition-colors", + "minimal/medium": { + box: "rounded gap-1.5 text-sm", + clear: "size-6 text-text-dimmed transition-colors", }, }; type Variant = keyof typeof variants; type AppliedFilterProps = { + icon?: ReactNode; label: ReactNode; value: ReactNode; removable?: boolean; @@ -25,11 +30,12 @@ type AppliedFilterProps = { }; export function AppliedFilter({ + icon, label, value, removable = true, onRemove, - variant = "tertiary/small", + variant = "secondary/small", className, }: AppliedFilterProps) { const variantClassName = variants[variant]; @@ -42,11 +48,14 @@ export function AppliedFilter({ className )} > -
-
- {label}: +
+
+ {icon} +
+ {label}: +
-
+
{value}
diff --git a/apps/webapp/app/components/primitives/Avatar.tsx b/apps/webapp/app/components/primitives/Avatar.tsx index a774dd27f9..0cb74c2ba6 100644 --- a/apps/webapp/app/components/primitives/Avatar.tsx +++ b/apps/webapp/app/components/primitives/Avatar.tsx @@ -6,7 +6,7 @@ import { RocketLaunchIcon, StarIcon, } from "@heroicons/react/20/solid"; -import { type Prisma } from "@trigger.dev/database"; +import type { Prisma } from "@trigger.dev/database"; import { z } from "zod"; import { logger } from "~/services/logger.server"; import { cn } from "~/utils/cn"; diff --git a/apps/webapp/app/components/primitives/Badge.tsx b/apps/webapp/app/components/primitives/Badge.tsx index 04a033ba02..a92957e268 100644 --- a/apps/webapp/app/components/primitives/Badge.tsx +++ b/apps/webapp/app/components/primitives/Badge.tsx @@ -4,14 +4,14 @@ import { cn } from "~/utils/cn"; const variants = { default: "grid place-items-center rounded-full px-2 h-5 tracking-wider text-xxs bg-charcoal-750 text-text-bright uppercase whitespace-nowrap", - small: - "grid place-items-center rounded-full px-[0.4rem] h-4 tracking-wider text-xxs bg-background-dimmed text-text-dimmed uppercase whitespace-nowrap", "extra-small": "grid place-items-center border border-charcoal-650 rounded-sm px-1 h-4 text-xxs bg-background-bright text-blue-500 whitespace-nowrap", - outline: - "grid place-items-center rounded-sm px-1.5 h-5 tracking-wider text-xxs border border-dimmed text-text-dimmed uppercase whitespace-nowrap", + small: + "grid place-items-center border border-charcoal-650 rounded-sm px-1 h-5 text-xs bg-background-bright text-blue-500 whitespace-nowrap", "outline-rounded": "grid place-items-center rounded-full px-1 h-4 tracking-wider text-xxs border border-blue-500 text-blue-500 uppercase whitespace-nowrap", + rounded: + "grid place-items-center rounded-full px-1.5 h-4 text-xxs border bg-blue-600 text-text-bright uppercase whitespace-nowrap", }; type BadgeProps = React.HTMLAttributes & { diff --git a/apps/webapp/app/components/primitives/Buttons.tsx b/apps/webapp/app/components/primitives/Buttons.tsx index bafd772b0a..c2845f1040 100644 --- a/apps/webapp/app/components/primitives/Buttons.tsx +++ b/apps/webapp/app/components/primitives/Buttons.tsx @@ -163,6 +163,8 @@ const allVariants = { variant: variant, }; +export type ButtonVariant = keyof typeof variant; + export type ButtonContentPropsType = { children?: React.ReactNode; LeadingIcon?: RenderIcon; @@ -173,7 +175,7 @@ export type ButtonContentPropsType = { textAlignLeft?: boolean; className?: string; shortcut?: ShortcutDefinition; - variant: keyof typeof variant; + variant: ButtonVariant; shortcutPosition?: "before-trailing-icon" | "after-trailing-icon"; tooltip?: ReactNode; iconSpacing?: string; @@ -274,7 +276,7 @@ export function ButtonContent(props: ButtonContentPropsType) { {buttonContent} - + {tooltip} {shortcut && renderShortcutKey()} @@ -296,19 +298,17 @@ export const Button = forwardRef( const innerRef = useRef(null); useImperativeHandle(ref, () => innerRef.current as HTMLButtonElement); - if (props.shortcut) { - useShortcutKeys({ - shortcut: props.shortcut, - action: (e) => { - if (innerRef.current) { - innerRef.current.click(); - e.preventDefault(); - e.stopPropagation(); - } - }, - disabled, - }); - } + useShortcutKeys({ + shortcut: props.shortcut, + action: (e) => { + if (innerRef.current) { + innerRef.current.click(); + e.preventDefault(); + e.stopPropagation(); + } + }, + disabled: disabled || !props.shortcut, + }); return ( ); diff --git a/apps/webapp/app/components/primitives/CopyableText.tsx b/apps/webapp/app/components/primitives/CopyableText.tsx index 99664b3dc3..fa02e56472 100644 --- a/apps/webapp/app/components/primitives/CopyableText.tsx +++ b/apps/webapp/app/components/primitives/CopyableText.tsx @@ -3,55 +3,95 @@ import { useState } from "react"; import { SimpleTooltip } from "~/components/primitives/Tooltip"; import { useCopy } from "~/hooks/useCopy"; import { cn } from "~/utils/cn"; +import { Button } from "./Buttons"; export function CopyableText({ value, copyValue, className, + asChild, + variant, }: { value: string; copyValue?: string; className?: string; + asChild?: boolean; + variant?: "icon-right" | "text-below"; }) { const [isHovered, setIsHovered] = useState(false); const { copy, copied } = useCopy(copyValue ?? value); - return ( - setIsHovered(false)} - > - setIsHovered(true)}>{value} + const resolvedVariant = variant ?? "icon-right"; + + if (resolvedVariant === "icon-right") { + return ( e.stopPropagation()} - className={cn( - "absolute -right-6 top-0 z-10 size-6 font-sans", - isHovered ? "flex" : "hidden" - )} + className={cn("group relative inline-flex h-6 items-center", className)} + onMouseLeave={() => setIsHovered(false)} > - - {copied ? ( - - ) : ( - - )} - - } - content={copied ? "Copied!" : "Copy"} - className="font-sans" - disableHoverableContent - /> + setIsHovered(true)}>{value} + e.stopPropagation()} + className={cn( + "absolute -right-6 top-0 z-10 size-6 font-sans", + isHovered ? "flex" : "hidden" + )} + > + + {copied ? ( + + ) : ( + + )} + + } + content={copied ? "Copied!" : "Copy"} + className="font-sans" + disableHoverableContent + asChild={asChild} + /> + - - ); + ); + } + + if (resolvedVariant === "text-below") { + return ( + { + e.stopPropagation(); + copy(); + }} + className={cn( + "cursor-pointer bg-transparent py-0 px-1 text-left text-text-bright transition-colors hover:text-white hover:bg-transparent", + className + )} + > + {value} + + } + content={copied ? "Copied" : "Click to copy"} + className="font-sans px-2 py-1" + disableHoverableContent + open={isHovered || copied} + onOpenChange={setIsHovered} + /> + ); + } + + return null; } diff --git a/apps/webapp/app/components/primitives/DateTime.tsx b/apps/webapp/app/components/primitives/DateTime.tsx index 9ce1b7957c..258a18d538 100644 --- a/apps/webapp/app/components/primitives/DateTime.tsx +++ b/apps/webapp/app/components/primitives/DateTime.tsx @@ -13,7 +13,9 @@ type DateTimeProps = { includeTime?: boolean; showTimezone?: boolean; showTooltip?: boolean; + hideDate?: boolean; previousDate?: Date | string | null; // Add optional previous date for comparison + hour12?: boolean; }; export const DateTime = ({ @@ -23,6 +25,7 @@ export const DateTime = ({ includeTime = true, showTimezone = false, showTooltip = true, + hour12 = true, }: DateTimeProps) => { const locales = useLocales(); const [localTimeZone, setLocalTimeZone] = useState("UTC"); @@ -50,7 +53,8 @@ export const DateTime = ({ timeZone ?? localTimeZone, locales, includeSeconds, - includeTime + includeTime, + hour12 ).replace(/\s/g, String.fromCharCode(32))} {showTimezone ? ` (${timeZone ?? "UTC"})` : null} @@ -66,7 +70,8 @@ export function formatDateTime( timeZone: string, locales: string[], includeSeconds: boolean, - includeTime: boolean + includeTime: boolean, + hour12: boolean = true ): string { return new Intl.DateTimeFormat(locales, { year: "numeric", @@ -76,6 +81,7 @@ export function formatDateTime( minute: includeTime ? "numeric" : undefined, second: includeTime && includeSeconds ? "numeric" : undefined, timeZone, + hour12, }).format(date); } @@ -122,7 +128,7 @@ export function formatDateTimeISO(date: Date, timeZone: string): string { } // New component that only shows date when it changes -export const SmartDateTime = ({ date, previousDate = null, timeZone = "UTC" }: DateTimeProps) => { +export const SmartDateTime = ({ date, previousDate = null, timeZone = "UTC", hour12 = true }: DateTimeProps) => { const locales = useLocales(); const realDate = typeof date === "string" ? new Date(date) : date; const realPrevDate = previousDate @@ -132,8 +138,8 @@ export const SmartDateTime = ({ date, previousDate = null, timeZone = "UTC" }: D : null; // Initial formatted values - const initialTimeOnly = formatTimeOnly(realDate, timeZone, locales); - const initialWithDate = formatSmartDateTime(realDate, timeZone, locales); + const initialTimeOnly = formatTimeOnly(realDate, timeZone, locales, hour12); + const initialWithDate = formatSmartDateTime(realDate, timeZone, locales, hour12); // State for the formatted time const [formattedDateTime, setFormattedDateTime] = useState( @@ -150,10 +156,10 @@ export const SmartDateTime = ({ date, previousDate = null, timeZone = "UTC" }: D // Format with appropriate function setFormattedDateTime( showDatePart - ? formatSmartDateTime(realDate, userTimeZone, locales) - : formatTimeOnly(realDate, userTimeZone, locales) + ? formatSmartDateTime(realDate, userTimeZone, locales, hour12) + : formatTimeOnly(realDate, userTimeZone, locales, hour12) ); - }, [locales, realDate, realPrevDate]); + }, [locales, realDate, realPrevDate, hour12]); return {formattedDateTime.replace(/\s/g, String.fromCharCode(32))}; }; @@ -168,7 +174,7 @@ function isSameDay(date1: Date, date2: Date): boolean { } // Format with date and time -function formatSmartDateTime(date: Date, timeZone: string, locales: string[]): string { +function formatSmartDateTime(date: Date, timeZone: string, locales: string[], hour12: boolean = true): string { return new Intl.DateTimeFormat(locales, { month: "short", day: "numeric", @@ -178,18 +184,20 @@ function formatSmartDateTime(date: Date, timeZone: string, locales: string[]): s timeZone, // @ts-ignore fractionalSecondDigits works in most modern browsers fractionalSecondDigits: 3, + hour12, }).format(date); } // Format time only -function formatTimeOnly(date: Date, timeZone: string, locales: string[]): string { +function formatTimeOnly(date: Date, timeZone: string, locales: string[], hour12: boolean = true): string { return new Intl.DateTimeFormat(locales, { - hour: "numeric", + hour: "2-digit", minute: "numeric", second: "numeric", timeZone, // @ts-ignore fractionalSecondDigits works in most modern browsers fractionalSecondDigits: 3, + hour12, }).format(date); } @@ -198,6 +206,8 @@ export const DateTimeAccurate = ({ timeZone = "UTC", previousDate = null, showTooltip = true, + hideDate = false, + hour12 = true, }: DateTimeProps) => { const locales = useLocales(); const [localTimeZone, setLocalTimeZone] = useState("UTC"); @@ -214,11 +224,13 @@ export const DateTimeAccurate = ({ }, []); // Smart formatting based on whether date changed - const formattedDateTime = realPrevDate + const formattedDateTime = hideDate + ? formatTimeOnly(realDate, localTimeZone, locales, hour12) + : realPrevDate ? isSameDay(realDate, realPrevDate) - ? formatTimeOnly(realDate, localTimeZone, locales) - : formatDateTimeAccurate(realDate, localTimeZone, locales) - : formatDateTimeAccurate(realDate, localTimeZone, locales); + ? formatTimeOnly(realDate, localTimeZone, locales, hour12) + : formatDateTimeAccurate(realDate, localTimeZone, locales, hour12) + : formatDateTimeAccurate(realDate, localTimeZone, locales, hour12); if (!showTooltip) return {formattedDateTime.replace(/\s/g, String.fromCharCode(32))}; @@ -241,7 +253,7 @@ export const DateTimeAccurate = ({ ); }; -function formatDateTimeAccurate(date: Date, timeZone: string, locales: string[]): string { +function formatDateTimeAccurate(date: Date, timeZone: string, locales: string[], hour12: boolean = true): string { const formattedDateTime = new Intl.DateTimeFormat(locales, { month: "short", day: "numeric", @@ -251,26 +263,27 @@ function formatDateTimeAccurate(date: Date, timeZone: string, locales: string[]) timeZone, // @ts-ignore fractionalSecondDigits works in most modern browsers fractionalSecondDigits: 3, + hour12, }).format(date); return formattedDateTime; } -export const DateTimeShort = ({ date, timeZone = "UTC" }: DateTimeProps) => { +export const DateTimeShort = ({ date, timeZone = "UTC", hour12 = true }: DateTimeProps) => { const locales = useLocales(); const realDate = typeof date === "string" ? new Date(date) : date; - const initialFormattedDateTime = formatDateTimeShort(realDate, timeZone, locales); + const initialFormattedDateTime = formatDateTimeShort(realDate, timeZone, locales, hour12); const [formattedDateTime, setFormattedDateTime] = useState(initialFormattedDateTime); useEffect(() => { const resolvedOptions = Intl.DateTimeFormat().resolvedOptions(); - setFormattedDateTime(formatDateTimeShort(realDate, resolvedOptions.timeZone, locales)); - }, [locales, realDate]); + setFormattedDateTime(formatDateTimeShort(realDate, resolvedOptions.timeZone, locales, hour12)); + }, [locales, realDate, hour12]); return {formattedDateTime.replace(/\s/g, String.fromCharCode(32))}; }; -function formatDateTimeShort(date: Date, timeZone: string, locales: string[]): string { +function formatDateTimeShort(date: Date, timeZone: string, locales: string[], hour12: boolean = true): string { const formattedDateTime = new Intl.DateTimeFormat(locales, { hour: "numeric", minute: "numeric", @@ -278,6 +291,7 @@ function formatDateTimeShort(date: Date, timeZone: string, locales: string[]): s timeZone, // @ts-ignore fractionalSecondDigits works in most modern browsers fractionalSecondDigits: 3, + hour12, }).format(date); return formattedDateTime; diff --git a/apps/webapp/app/components/primitives/Dialog.tsx b/apps/webapp/app/components/primitives/Dialog.tsx index 5ac179646b..7c28193717 100644 --- a/apps/webapp/app/components/primitives/Dialog.tsx +++ b/apps/webapp/app/components/primitives/Dialog.tsx @@ -36,8 +36,10 @@ DialogOverlay.displayName = DialogPrimitive.Overlay.displayName; const DialogContent = React.forwardRef< React.ElementRef, - React.ComponentPropsWithoutRef ->(({ className, children, ...props }, ref) => ( + React.ComponentPropsWithoutRef & { + showCloseButton?: boolean; + } +>(({ className, children, showCloseButton = true, ...props }, ref) => (
{children} - - - - Close - + {showCloseButton && ( + + + + Close + + )}
)); diff --git a/apps/webapp/app/components/primitives/DurationPicker.tsx b/apps/webapp/app/components/primitives/DurationPicker.tsx new file mode 100644 index 0000000000..e4f5af6520 --- /dev/null +++ b/apps/webapp/app/components/primitives/DurationPicker.tsx @@ -0,0 +1,201 @@ +import { Input } from "~/components/primitives/Input"; +import { cn } from "~/utils/cn"; +import React, { useRef, useState, useEffect } from "react"; +import { Button } from "./Buttons"; + +export interface DurationPickerProps { + id?: string; // used for the hidden input for form submission + name?: string; // used for the hidden input for form submission + defaultValueSeconds?: number; + value?: number; + onChange?: (totalSeconds: number) => void; + variant?: "small" | "medium"; + showClearButton?: boolean; +} + +export function DurationPicker({ + name, + defaultValueSeconds: defaultValue = 0, + value: controlledValue, + onChange, + variant = "small", + showClearButton = true, +}: DurationPickerProps) { + // Use controlled value if provided, otherwise use default + const initialValue = controlledValue ?? defaultValue; + + const defaultHours = Math.floor(initialValue / 3600); + const defaultMinutes = Math.floor((initialValue % 3600) / 60); + const defaultSeconds = initialValue % 60; + + const [hours, setHours] = useState(defaultHours); + const [minutes, setMinutes] = useState(defaultMinutes); + const [seconds, setSeconds] = useState(defaultSeconds); + + const minuteRef = useRef(null); + const hourRef = useRef(null); + const secondRef = useRef(null); + + const totalSeconds = hours * 3600 + minutes * 60 + seconds; + + const isEmpty = hours === 0 && minutes === 0 && seconds === 0; + + // Sync internal state with external value changes + useEffect(() => { + if (controlledValue !== undefined && controlledValue !== totalSeconds) { + const newHours = Math.floor(controlledValue / 3600); + const newMinutes = Math.floor((controlledValue % 3600) / 60); + const newSeconds = controlledValue % 60; + + setHours(newHours); + setMinutes(newMinutes); + setSeconds(newSeconds); + } + }, [controlledValue]); + + useEffect(() => { + onChange?.(totalSeconds); + }, [totalSeconds, onChange]); + + const handleHoursChange = (e: React.ChangeEvent) => { + const value = parseInt(e.target.value) || 0; + setHours(Math.max(0, value)); + }; + + const handleMinutesChange = (e: React.ChangeEvent) => { + const value = parseInt(e.target.value) || 0; + if (value >= 60) { + setHours((prev) => prev + Math.floor(value / 60)); + setMinutes(value % 60); + return; + } + + setMinutes(Math.max(0, Math.min(59, value))); + }; + + const handleSecondsChange = (e: React.ChangeEvent) => { + const value = parseInt(e.target.value) || 0; + if (value >= 60) { + setMinutes((prev) => { + const newMinutes = prev + Math.floor(value / 60); + if (newMinutes >= 60) { + setHours((prevHours) => prevHours + Math.floor(newMinutes / 60)); + return newMinutes % 60; + } + return newMinutes; + }); + setSeconds(value % 60); + return; + } + + setSeconds(Math.max(0, Math.min(59, value))); + }; + + const handleKeyDown = ( + e: React.KeyboardEvent, + nextRef?: React.RefObject, + prevRef?: React.RefObject + ) => { + if (e.key === "Tab") { + return; + } + + if (e.key === "ArrowRight" && nextRef) { + e.preventDefault(); + nextRef.current?.focus(); + nextRef.current?.select(); + return; + } + + if (e.key === "ArrowLeft" && prevRef) { + e.preventDefault(); + prevRef.current?.focus(); + prevRef.current?.select(); + return; + } + }; + + const clearDuration = () => { + setHours(0); + setMinutes(0); + setSeconds(0); + hourRef.current?.focus(); + }; + + return ( +
+ + +
+
+ handleKeyDown(e, minuteRef)} + onFocus={(e) => e.target.select()} + type="number" + min={0} + inputMode="numeric" + /> + + h + +
+
+ handleKeyDown(e, secondRef, hourRef)} + onFocus={(e) => e.target.select()} + type="number" + min={0} + max={59} + inputMode="numeric" + /> + + m + +
+
+ handleKeyDown(e, undefined, minuteRef)} + onFocus={(e) => e.target.select()} + type="number" + min={0} + max={59} + inputMode="numeric" + /> + + s + +
+
+ + {showClearButton && ( + + )} +
+ ); +} diff --git a/apps/webapp/app/components/primitives/Input.tsx b/apps/webapp/app/components/primitives/Input.tsx index 7cb4b8a32d..3364e48bed 100644 --- a/apps/webapp/app/components/primitives/Input.tsx +++ b/apps/webapp/app/components/primitives/Input.tsx @@ -1,16 +1,13 @@ import * as React from "react"; import { useImperativeHandle, useRef } from "react"; import { cn } from "~/utils/cn"; -import { Icon, RenderIcon } from "./Icon"; +import { Icon, type RenderIcon } from "./Icon"; const containerBase = "has-[:focus-visible]:outline-none has-[:focus-visible]:ring-1 has-[:focus-visible]:ring-charcoal-650 has-[:focus-visible]:ring-offset-0 has-[:focus]:border-ring has-[:focus]:outline-none has-[:focus]:ring-1 has-[:focus]:ring-ring has-[:disabled]:cursor-not-allowed has-[:disabled]:opacity-50 ring-offset-background transition cursor-text"; const inputBase = - "h-full w-full text-text-bright bg-transparent file:border-0 file:bg-transparent file:text-base file:font-medium placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-0 disabled:cursor-not-allowed outline-none ring-0 border-none"; - -const shortcutBase = - "grid h-fit place-content-center border border-dimmed/40 font-normal text-text-dimmed"; + "h-full w-full text-text-bright bg-transparent file:border-0 file:bg-transparent file:text-base file:font-medium placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-0 disabled:cursor-not-allowed outline-none ring-0 border-none [&::-webkit-outer-spin-button]:appearance-none [&::-webkit-inner-spin-button]:appearance-none [&::-webkit-outer-spin-button]:m-0 [&::-webkit-inner-spin-button]:m-0 [&]:[-moz-appearance:textfield]"; const variants = { large: { @@ -18,52 +15,90 @@ const variants = { "px-1 w-full h-10 rounded-[3px] border border-charcoal-800 bg-charcoal-750 hover:border-charcoal-600 hover:bg-charcoal-650", input: "px-2 text-sm", iconSize: "size-4 ml-1", - shortcut: "mr-1 min-w-[22px] rounded-sm py-[3px] px-[5px] text-[0.6rem] select-none", + accessory: "pr-1", }, medium: { container: "px-1 h-8 w-full rounded border border-charcoal-800 bg-charcoal-750 hover:border-charcoal-600 hover:bg-charcoal-650", input: "px-1.5 rounded text-sm", iconSize: "size-4 ml-0.5", - shortcut: "min-w-[22px] rounded-sm py-[3px] px-[5px] text-[0.6rem]", + accessory: "pr-1", }, small: { container: "px-1 h-6 w-full rounded border border-charcoal-800 bg-charcoal-750 hover:border-charcoal-600 hover:bg-charcoal-650", input: "px-1 rounded text-xs", iconSize: "size-3 ml-0.5", - shortcut: "min-w-[22px] rounded-[2px] py-px px-[3px] text-[0.5rem]", + accessory: "pr-0.5", }, tertiary: { container: "px-1 h-6 w-full rounded hover:bg-charcoal-750", input: "px-1 rounded text-xs", iconSize: "size-3 ml-0.5", - shortcut: "min-w-[22px] rounded-[2px] py-px px-[3px] text-[0.5rem]", + accessory: "pr-0.5", + }, + "secondary-small": { + container: + "px-1 h-6 w-full rounded border border-charcoal-600 hover:border-charcoal-550 bg-grid-dimmed hover:bg-charcoal-650", + input: "px-1 rounded text-xs", + iconSize: "size-3 ml-0.5", + accessory: "pr-0.5", + }, + "outline/large": { + container: "px-1 h-10 w-full rounded border border-grid-bright hover:border-charcoal-550", + input: "px-2 rounded text-sm", + iconSize: "size-4 ml-1", + accessory: "pr-1", + }, + "outline/medium": { + container: "px-1 h-8 w-full rounded border border-grid-bright hover:border-charcoal-550", + input: "px-1 rounded text-sm", + iconSize: "size-4 ml-0.5", + accessory: "pr-1", + }, + "outline/small": { + container: "px-1 h-6 w-full rounded border border-grid-bright hover:border-charcoal-550", + input: "px-1 rounded text-xs", + iconSize: "size-3 ml-0.5", + accessory: "pr-0.5", }, }; export type InputProps = React.InputHTMLAttributes & { variant?: keyof typeof variants; icon?: RenderIcon; - shortcut?: string; + accessory?: React.ReactNode; fullWidth?: boolean; + containerClassName?: string; }; const Input = React.forwardRef( - ({ className, type, shortcut, fullWidth = true, variant = "medium", icon, ...props }, ref) => { + ( + { + className, + type, + accessory, + fullWidth = true, + variant = "medium", + icon, + containerClassName, + ...props + }, + ref + ) => { const innerRef = useRef(null); useImperativeHandle(ref, () => innerRef.current as HTMLInputElement); - const containerClassName = variants[variant].container; + const variantContainerClassName = variants[variant].container; const inputClassName = variants[variant].input; const iconClassName = variants[variant].iconSize; - const shortcutClassName = variants[variant].shortcut; return (
( ref={innerRef} {...props} /> - {shortcut &&
{shortcut}
} + {accessory &&
{accessory}
}
); } diff --git a/apps/webapp/app/components/primitives/InputNumberStepper.tsx b/apps/webapp/app/components/primitives/InputNumberStepper.tsx new file mode 100644 index 0000000000..f4aafd5cae --- /dev/null +++ b/apps/webapp/app/components/primitives/InputNumberStepper.tsx @@ -0,0 +1,220 @@ +import { MinusIcon, PlusIcon } from "@heroicons/react/20/solid"; +import { type ChangeEvent, useRef } from "react"; +import { cn } from "~/utils/cn"; + +type InputNumberStepperProps = Omit & { + step?: number; + min?: number; + max?: number; + round?: boolean; + controlSize?: "base" | "large"; +}; + +export function InputNumberStepper({ + value, + onChange, + step = 50, + min, + max, + round = true, + controlSize = "base", + name, + id, + disabled = false, + readOnly = false, + className, + placeholder = "Type a number", + ...props +}: InputNumberStepperProps) { + const inputRef = useRef(null); + + const handleStepUp = () => { + if (!inputRef.current || disabled) return; + + // If rounding is enabled, ensure we start from a rounded base before stepping + if (round) { + // If field is empty, treat as 0 (or min if provided) before stepping up + if (inputRef.current.value === "") { + inputRef.current.value = String(min ?? 0); + } else { + commitRoundedFromInput(); + } + } + inputRef.current.stepUp(); + const event = new Event("change", { bubbles: true }); + inputRef.current.dispatchEvent(event); + }; + + const handleStepDown = () => { + if (!inputRef.current || disabled) return; + + // If rounding is enabled, ensure we start from a rounded base before stepping + if (round) { + // If field is empty, treat as 0 (or min if provided) before stepping down + if (inputRef.current.value === "") { + inputRef.current.value = String(min ?? 0); + } else { + commitRoundedFromInput(); + } + } + inputRef.current.stepDown(); + const event = new Event("change", { bubbles: true }); + inputRef.current.dispatchEvent(event); + }; + + const numericValue = value === "" ? NaN : (value as number); + const isMinDisabled = min !== undefined && !Number.isNaN(numericValue) && numericValue <= min; + const isMaxDisabled = max !== undefined && !Number.isNaN(numericValue) && numericValue >= max; + + function clamp(val: number): number { + if (Number.isNaN(val)) return typeof value === "number" ? value : min ?? 0; + let next = val; + if (min !== undefined) next = Math.max(min, next); + if (max !== undefined) next = Math.min(max, next); + return next; + } + + function roundToStep(val: number): number { + if (step <= 0) return val; + const base = min ?? 0; + const shifted = val - base; + const quotient = shifted / step; + const floored = Math.floor(quotient); + const ceiled = Math.ceil(quotient); + const down = base + floored * step; + const up = base + ceiled * step; + const distDown = Math.abs(val - down); + const distUp = Math.abs(up - val); + return distUp < distDown ? up : down; + } + + function commitRoundedFromInput() { + if (!inputRef.current || disabled || readOnly) return; + const el = inputRef.current; + const raw = el.value; + if (raw === "") return; // do not coerce empty to 0; keep placeholder visible + const numeric = Number(raw); + if (Number.isNaN(numeric)) return; // ignore non-numeric + const rounded = clamp(roundToStep(numeric)); + if (String(rounded) === String(value)) return; + // Update the real input's value for immediate UI feedback + el.value = String(rounded); + // Invoke consumer onChange with the real element as target/currentTarget + onChange?.({ + target: el, + currentTarget: el, + } as unknown as ChangeEvent); + } + + const sizeStyles = { + base: { + container: "h-9", + input: "text-sm px-3", + button: "size-6", + icon: "size-3.5", + gap: "gap-1 pr-1.5", + }, + large: { + container: "h-11 rounded-md", + input: "text-base px-3.5", + button: "size-8", + icon: "size-5", + gap: "gap-[0.3125rem] pr-[0.3125rem]", + }, + } as const; + + const size = sizeStyles[controlSize]; + + return ( +
+ { + // Allow empty string to pass through so user can clear the field + if (e.currentTarget.value === "") { + // reflect emptiness in the input and notify consumer as empty + if (inputRef.current) inputRef.current.value = ""; + onChange?.({ + target: e.currentTarget, + currentTarget: e.currentTarget, + } as ChangeEvent); + return; + } + onChange?.(e); + }} + onBlur={(e) => { + // If blur is caused by clicking our step buttons, we prevent pointerdown + // so blur shouldn't fire. This is for safety in case of keyboard focus move. + if (round) commitRoundedFromInput(); + }} + onKeyDown={(e) => { + if (e.key === "Enter" && round) { + e.preventDefault(); + commitRoundedFromInput(); + } + }} + step={step} + min={min} + max={max} + disabled={disabled} + readOnly={readOnly} + className={cn( + "placeholder:text-muted-foreground h-full grow border-0 bg-transparent text-left text-text-bright outline-none ring-0 focus:border-0 focus:outline-none focus:ring-0 disabled:cursor-not-allowed", + size.input, + // Hide number input arrows + "[type=number]:border-0 [appearance:textfield] [&::-webkit-inner-spin-button]:appearance-none [&::-webkit-outer-spin-button]:appearance-none" + )} + {...props} + /> + +
+ + + +
+
+ ); +} diff --git a/apps/webapp/app/components/primitives/InputOTP.tsx b/apps/webapp/app/components/primitives/InputOTP.tsx new file mode 100644 index 0000000000..54ca070ac0 --- /dev/null +++ b/apps/webapp/app/components/primitives/InputOTP.tsx @@ -0,0 +1,110 @@ +"use client"; + +import * as React from "react"; +import { OTPInput, OTPInputContext } from "input-otp"; +import { MinusIcon } from "lucide-react"; + +import { cn } from "~/utils/cn"; + +const variants = { + default: { + container: "flex items-center gap-2 has-disabled:opacity-50", + group: "flex items-center", + slot: "data-[active=true]:border-ring data-[active=true]:ring-ring/50 data-[active=true]:aria-invalid:ring-destructive/20 dark:data-[active=true]:aria-invalid:ring-destructive/40 aria-invalid:border-destructive data-[active=true]:aria-invalid:border-destructive dark:bg-input/30 border-input relative flex size-9 items-center justify-center border-y border-r text-sm outline-none transition-all first:rounded-l-md first:border-l last:rounded-r-md data-[active=true]:z-10 data-[active=true]:ring-[3px]", + }, + large: { + container: "flex items-center gap-3 has-disabled:opacity-50", + group: "flex items-center gap-1", + slot: "data-[active=true]:border-ring data-[active=true]:ring-ring/50 data-[active=true]:aria-invalid:ring-destructive/20 dark:data-[active=true]:aria-invalid:ring-destructive/40 aria-invalid:border-destructive data-[active=true]:aria-invalid:border-destructive bg-charcoal-750 border-charcoal-700 hover:border-charcoal-600 hover:bg-charcoal-650 relative flex h-12 w-12 items-center justify-center border text-base outline-none transition-all rounded-md data-[active=true]:z-10 data-[active=true]:ring-[3px] data-[active=true]:border-indigo-500", + }, + minimal: { + container: "flex items-center gap-2 has-disabled:opacity-50", + group: "flex items-center", + slot: "data-[active=true]:border-ring data-[active=true]:ring-ring/50 border-transparent bg-transparent relative flex h-9 w-9 items-center justify-center border-b-2 border-b-charcoal-600 text-sm outline-none transition-all data-[active=true]:border-b-indigo-500 data-[active=true]:z-10", + }, +}; + +function InputOTP({ + className, + containerClassName, + variant = "default", + fullWidth = false, + ...props +}: React.ComponentProps & { + containerClassName?: string; + variant?: keyof typeof variants; + fullWidth?: boolean; +}) { + const variantStyles = variants[variant]; + + return ( + + ); +} + +function InputOTPGroup({ + className, + variant = "default", + fullWidth = false, + ...props +}: React.ComponentProps<"div"> & { + variant?: keyof typeof variants; + fullWidth?: boolean; +}) { + const variantStyles = variants[variant]; + + return ( +
+ ); +} + +function InputOTPSlot({ + index, + className, + variant = "default", + fullWidth = false, + ...props +}: React.ComponentProps<"div"> & { + index: number; + variant?: keyof typeof variants; + fullWidth?: boolean; +}) { + const inputOTPContext = React.useContext(OTPInputContext); + const { char, hasFakeCaret, isActive } = inputOTPContext?.slots[index] ?? {}; + const variantStyles = variants[variant]; + + return ( +
+ {char} + {hasFakeCaret && ( +
+
+
+ )} +
+ ); +} + +function InputOTPSeparator({ ...props }: React.ComponentProps<"div">) { + return ( +
+ +
+ ); +} + +export { InputOTP, InputOTPGroup, InputOTPSlot, InputOTPSeparator }; diff --git a/apps/webapp/app/components/primitives/Label.tsx b/apps/webapp/app/components/primitives/Label.tsx index 000b407911..a9f15f68e3 100644 --- a/apps/webapp/app/components/primitives/Label.tsx +++ b/apps/webapp/app/components/primitives/Label.tsx @@ -4,7 +4,7 @@ import { InfoIconTooltip, SimpleTooltip } from "./Tooltip"; const variants = { small: { - text: "font-sans text-sm font-normal text-text-bright leading-tight flex items-center gap-1", + text: "font-sans text-[0.8125rem] font-normal text-text-bright leading-tight flex items-center gap-1", }, medium: { text: "font-sans text-sm text-text-bright leading-tight flex items-center gap-1", diff --git a/apps/webapp/app/components/primitives/Pagination.tsx b/apps/webapp/app/components/primitives/Pagination.tsx index 20a1a93be2..f465083710 100644 --- a/apps/webapp/app/components/primitives/Pagination.tsx +++ b/apps/webapp/app/components/primitives/Pagination.tsx @@ -19,36 +19,75 @@ export function PaginationControls({ } return ( -
} - variant={"minimal/small"} + variant={"secondary/small"} shortcut={shortcut} - tooltipTitle={"Filter runs"} + tooltipTitle={"Filter batches"} > Filter @@ -276,10 +281,12 @@ function AppliedStatusFilter() { }> } value={appliedSummary( statuses.map((v) => batchStatusTitle(v as BatchTaskRunStatus)) )} onRemove={() => del(["statuses", "cursor", "direction"])} + variant="secondary/small" /> } @@ -396,8 +403,10 @@ function AppliedBatchIdFilter() { }> } value={batchId} onRemove={() => del(["id", "cursor", "direction"])} + variant="secondary/small" /> } diff --git a/apps/webapp/app/components/runs/v3/BatchStatus.tsx b/apps/webapp/app/components/runs/v3/BatchStatus.tsx index c67b1b4016..2d6f83cc01 100644 --- a/apps/webapp/app/components/runs/v3/BatchStatus.tsx +++ b/apps/webapp/app/components/runs/v3/BatchStatus.tsx @@ -1,17 +1,27 @@ -import { CheckCircleIcon, XCircleIcon } from "@heroicons/react/20/solid"; -import { BatchTaskRunStatus } from "@trigger.dev/database"; +import { + CheckCircleIcon, + ExclamationTriangleIcon, + XCircleIcon, +} from "@heroicons/react/20/solid"; +import type { BatchTaskRunStatus } from "@trigger.dev/database"; import assertNever from "assert-never"; import { Spinner } from "~/components/primitives/Spinner"; import { cn } from "~/utils/cn"; -export const allBatchStatuses = ["PENDING", "COMPLETED", "ABORTED"] as const satisfies Readonly< - Array ->; +export const allBatchStatuses = [ + "PROCESSING", + "PENDING", + "COMPLETED", + "PARTIAL_FAILED", + "ABORTED", +] as const satisfies Readonly>; const descriptions: Record = { + PROCESSING: "The batch is being processed and runs are being created.", PENDING: "The batch has child runs that have not yet completed.", COMPLETED: "All the batch child runs have finished.", - ABORTED: "The batch was aborted because some child tasks could not be triggered.", + PARTIAL_FAILED: "Some runs failed to be created. Successfully created runs are still executing.", + ABORTED: "The batch was aborted because child tasks could not be triggered.", }; export function descriptionForBatchStatus(status: BatchTaskRunStatus): string { @@ -47,10 +57,14 @@ export function BatchStatusIcon({ className: string; }) { switch (status) { + case "PROCESSING": + return ; case "PENDING": return ; case "COMPLETED": return ; + case "PARTIAL_FAILED": + return ; case "ABORTED": return ; default: { @@ -61,10 +75,14 @@ export function BatchStatusIcon({ export function batchStatusColor(status: BatchTaskRunStatus): string { switch (status) { + case "PROCESSING": + return "text-blue-500"; case "PENDING": return "text-pending"; case "COMPLETED": return "text-success"; + case "PARTIAL_FAILED": + return "text-warning"; case "ABORTED": return "text-error"; default: { @@ -75,10 +93,14 @@ export function batchStatusColor(status: BatchTaskRunStatus): string { export function batchStatusTitle(status: BatchTaskRunStatus): string { switch (status) { + case "PROCESSING": + return "Processing"; case "PENDING": return "In progress"; case "COMPLETED": return "Completed"; + case "PARTIAL_FAILED": + return "Partial failure"; case "ABORTED": return "Aborted"; default: { diff --git a/apps/webapp/app/components/runs/v3/BulkAction.tsx b/apps/webapp/app/components/runs/v3/BulkAction.tsx index ab570b4fa1..3c25aa4409 100644 --- a/apps/webapp/app/components/runs/v3/BulkAction.tsx +++ b/apps/webapp/app/components/runs/v3/BulkAction.tsx @@ -1,27 +1,30 @@ -import { ArrowPathIcon, NoSymbolIcon } from "@heroicons/react/20/solid"; -import { BulkActionType } from "@trigger.dev/database"; +import { ArrowPathIcon, CheckCircleIcon, NoSymbolIcon } from "@heroicons/react/20/solid"; +import type { BulkActionStatus, BulkActionType } from "@trigger.dev/database"; import assertNever from "assert-never"; +import { Spinner } from "~/components/primitives/Spinner"; import { cn } from "~/utils/cn"; -export function BulkActionStatusCombo({ +export function BulkActionTypeCombo({ type, className, iconClassName, + labelClassName, }: { type: BulkActionType; className?: string; iconClassName?: string; + labelClassName?: string; }) { return ( - + ); } -export function BulkActionLabel({ type }: { type: BulkActionType }) { - return {bulkActionTitle(type)}; +export function BulkActionLabel({ type, className }: { type: BulkActionType; className?: string }) { + return {bulkActionTitle(type)}; } export function BulkActionIcon({ type, className }: { type: BulkActionType; className: string }) { @@ -71,3 +74,62 @@ export function bulkActionVerb(type: BulkActionType): string { } } } + +export function BulkActionStatusCombo({ + status, + className, + iconClassName, + labelClassName, +}: { + status: BulkActionStatus; + className?: string; + iconClassName?: string; + labelClassName?: string; +}) { + return ( + + + + + ); +} + +export function BulkActionStatusIcon({ + status, + className, +}: { + status: BulkActionStatus; + className: string; +}) { + switch (status) { + case "PENDING": + return ; + case "COMPLETED": + return ; + case "ABORTED": + return ; + default: { + assertNever(status); + } + } +} + +export function BulkActionStatusLabel({ + status, + className, +}: { + status: BulkActionStatus; + className?: string; +}) { + switch (status) { + case "PENDING": + return In progress; + case "COMPLETED": + return Completed; + case "ABORTED": + return Aborted; + default: { + assertNever(status); + } + } +} diff --git a/apps/webapp/app/components/runs/v3/DeploymentError.tsx b/apps/webapp/app/components/runs/v3/DeploymentError.tsx index 517f91789b..11ee62dfa3 100644 --- a/apps/webapp/app/components/runs/v3/DeploymentError.tsx +++ b/apps/webapp/app/components/runs/v3/DeploymentError.tsx @@ -9,7 +9,7 @@ type DeploymentErrorProps = { export function DeploymentError({ errorData }: DeploymentErrorProps) { return ( -
+
{errorData.message && {errorData.message}} {errorData.stack && ( @@ -18,6 +18,7 @@ export function DeploymentError({ errorData }: DeploymentErrorProps) { showLineNumbers={false} code={errorData.stack} maxLines={20} + showTextWrapping /> )} {errorData.stderr && ( @@ -28,6 +29,7 @@ export function DeploymentError({ errorData }: DeploymentErrorProps) { showLineNumbers={false} code={errorData.stderr} maxLines={20} + showTextWrapping /> )} diff --git a/apps/webapp/app/components/runs/v3/DeploymentStatus.tsx b/apps/webapp/app/components/runs/v3/DeploymentStatus.tsx index dbfaf12d0d..a2a6d199ab 100644 --- a/apps/webapp/app/components/runs/v3/DeploymentStatus.tsx +++ b/apps/webapp/app/components/runs/v3/DeploymentStatus.tsx @@ -2,9 +2,10 @@ import { CheckCircleIcon, ExclamationTriangleIcon, NoSymbolIcon, + RectangleStackIcon, XCircleIcon, } from "@heroicons/react/20/solid"; -import { WorkerDeploymentStatus } from "@trigger.dev/database"; +import type { WorkerDeploymentStatus } from "@trigger.dev/database"; import assertNever from "assert-never"; import { Spinner } from "~/components/primitives/Spinner"; import { cn } from "~/utils/cn"; @@ -49,6 +50,10 @@ export function DeploymentStatusIcon({ }) { switch (status) { case "PENDING": + return ( + + ); + case "INSTALLING": case "BUILDING": case "DEPLOYING": return ; @@ -73,6 +78,8 @@ export function DeploymentStatusIcon({ export function deploymentStatusClassNameColor(status: WorkerDeploymentStatus): string { switch (status) { case "PENDING": + return "text-charcoal-500"; + case "INSTALLING": case "BUILDING": case "DEPLOYING": return "text-pending"; @@ -92,7 +99,9 @@ export function deploymentStatusClassNameColor(status: WorkerDeploymentStatus): export function deploymentStatusTitle(status: WorkerDeploymentStatus, isBuilt: boolean): string { switch (status) { case "PENDING": - return "Pendingโ€ฆ"; + return "Queuedโ€ฆ"; + case "INSTALLING": + return "Installingโ€ฆ"; case "BUILDING": return "Buildingโ€ฆ"; case "DEPLOYING": @@ -121,17 +130,22 @@ export function deploymentStatusTitle(status: WorkerDeploymentStatus, isBuilt: b // PENDING and CANCELED are not used so are ommited from the UI export const deploymentStatuses: WorkerDeploymentStatus[] = [ + "PENDING", + "INSTALLING", "BUILDING", "DEPLOYING", "DEPLOYED", "FAILED", "TIMED_OUT", + "CANCELED", ]; export function deploymentStatusDescription(status: WorkerDeploymentStatus): string { switch (status) { case "PENDING": return "The deployment is queued and waiting to be processed."; + case "INSTALLING": + return "The project dependencies are being installed."; case "BUILDING": return "The code is being built and prepared for deployment."; case "DEPLOYING": diff --git a/apps/webapp/app/components/runs/v3/PacketDisplay.tsx b/apps/webapp/app/components/runs/v3/PacketDisplay.tsx index 4da733f2c0..24a9b66b67 100644 --- a/apps/webapp/app/components/runs/v3/PacketDisplay.tsx +++ b/apps/webapp/app/components/runs/v3/PacketDisplay.tsx @@ -1,7 +1,11 @@ import { CloudArrowDownIcon } from "@heroicons/react/20/solid"; import { CodeBlock } from "~/components/code/CodeBlock"; +import { InlineCode } from "~/components/code/InlineCode"; import { LinkButton } from "~/components/primitives/Buttons"; +import { Header3 } from "~/components/primitives/Headers"; import { Paragraph } from "~/components/primitives/Paragraph"; +import { TextLink } from "~/components/primitives/TextLink"; +import { docsPath } from "~/utils/pathBuilder"; export function PacketDisplay({ data, @@ -15,13 +19,26 @@ export function PacketDisplay({ switch (dataType) { case "application/store": { return ( -
- - {title} +
+ {title} + + This {title.toLowerCase()} exceeded the size limit and was automatically offloaded to + object storage. You can retrieve it using{" "} + runs.retrieve or download it directly + below. Learn more + . - - Download - +
+ + Download {title.toLowerCase()} + +
); } @@ -33,6 +50,7 @@ export function PacketDisplay({ code={data} maxLines={20} showLineNumbers={false} + showTextWrapping /> ); } diff --git a/apps/webapp/app/components/runs/v3/ReplayRunDialog.tsx b/apps/webapp/app/components/runs/v3/ReplayRunDialog.tsx index 207217504d..83490aee11 100644 --- a/apps/webapp/app/components/runs/v3/ReplayRunDialog.tsx +++ b/apps/webapp/app/components/runs/v3/ReplayRunDialog.tsx @@ -1,18 +1,41 @@ +import { conform, useForm } from "@conform-to/react"; +import { parse } from "@conform-to/zod"; import { DialogClose } from "@radix-ui/react-dialog"; -import { Form, useNavigation, useSubmit } from "@remix-run/react"; -import { useCallback, useEffect, useRef } from "react"; +import { Form, useActionData, useNavigation, useParams, useSubmit } from "@remix-run/react"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { type UseDataFunctionReturn, useTypedFetcher } from "remix-typedjson"; +import { TaskIcon } from "~/assets/icons/TaskIcon"; import { JSONEditor } from "~/components/code/JSONEditor"; import { EnvironmentCombo } from "~/components/environments/EnvironmentLabel"; import { Button } from "~/components/primitives/Buttons"; import { DialogContent, DialogHeader } from "~/components/primitives/Dialog"; -import { Header3 } from "~/components/primitives/Headers"; +import { DurationPicker } from "~/components/primitives/DurationPicker"; +import { Fieldset } from "~/components/primitives/Fieldset"; +import { FormError } from "~/components/primitives/FormError"; +import { Hint } from "~/components/primitives/Hint"; +import { Input } from "~/components/primitives/Input"; import { InputGroup } from "~/components/primitives/InputGroup"; import { Label } from "~/components/primitives/Label"; import { Paragraph } from "~/components/primitives/Paragraph"; +import { type loader as queuesLoader } from "~/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.queues"; +import { + ResizableHandle, + ResizablePanel, + ResizablePanelGroup, +} from "~/components/primitives/Resizable"; import { Select, SelectItem } from "~/components/primitives/Select"; import { Spinner, SpinnerWhite } from "~/components/primitives/Spinner"; +import { TabButton, TabContainer } from "~/components/primitives/Tabs"; +import { TextLink } from "~/components/primitives/TextLink"; import { type loader } from "~/routes/resources.taskruns.$runParam.replay"; +import { docsPath } from "~/utils/pathBuilder"; +import { ReplayRunData } from "~/v3/replayTask"; +import { RectangleStackIcon } from "@heroicons/react/20/solid"; +import { Badge } from "~/components/primitives/Badge"; +import { RunTagInput } from "./RunTagInput"; +import { MachinePresetName } from "@trigger.dev/core/v3"; +import { InfoIconTooltip } from "~/components/primitives/Tooltip"; +import { divide } from "effect/Duration"; type ReplayRunDialogProps = { runFriendlyId: string; @@ -21,154 +44,528 @@ type ReplayRunDialogProps = { export function ReplayRunDialog({ runFriendlyId, failedRedirect }: ReplayRunDialogProps) { return ( - + ); } function ReplayContent({ runFriendlyId, failedRedirect }: ReplayRunDialogProps) { - const fetcher = useTypedFetcher(); - const isLoading = fetcher.state === "loading"; + const replayDataFetcher = useTypedFetcher(); + const isLoading = replayDataFetcher.state === "loading"; + const queueFetcher = useTypedFetcher(); + const [environmentIdOverride, setEnvironmentIdOverride] = useState(undefined); + + useEffect(() => { + const searchParams = new URLSearchParams(); + if (environmentIdOverride) { + searchParams.set("environmentIdOverride", environmentIdOverride); + } + + replayDataFetcher.load( + `/resources/taskruns/${runFriendlyId}/replay?${searchParams.toString()}` + ); + }, [runFriendlyId, environmentIdOverride]); + + const params = useParams(); useEffect(() => { - fetcher.load(`/resources/taskruns/${runFriendlyId}/replay`); - }, [runFriendlyId]); + if (params.organizationSlug && params.projectParam && params.envParam) { + const searchParams = new URLSearchParams(); + searchParams.set("type", "custom"); + searchParams.set("per_page", "100"); + + let envSlug = params.envParam; + + if (environmentIdOverride) { + const environmentOverride = replayDataFetcher.data?.environments.find( + (env) => env.id === environmentIdOverride + ); + envSlug = environmentOverride?.slug ?? envSlug; + } + + queueFetcher.load( + `/resources/orgs/${params.organizationSlug}/projects/${ + params.projectParam + }/env/${envSlug}/queues?${searchParams.toString()}` + ); + } + }, [params.organizationSlug, params.projectParam, params.envParam, environmentIdOverride]); + + const customQueues = useMemo(() => { + return queueFetcher.data?.queues ?? []; + }, [queueFetcher.data?.queues]); return ( - <> - Replay this run - {isLoading ? ( -
+
+ Replay this run + {isLoading && !replayDataFetcher.data ? ( +
- ) : fetcher.data ? ( + ) : replayDataFetcher.data ? ( ) : ( <>Failed to get run data )} - +
); } +const startingJson = "{\n\n}"; +const machinePresets = Object.values(MachinePresetName.enum); + function ReplayForm({ - payload, - payloadType, - environment, - environments, failedRedirect, runFriendlyId, -}: UseDataFunctionReturn & { failedRedirect: string; runFriendlyId: string }) { + replayData, + customQueues, + environmentIdOverride, + setEnvironmentIdOverride, +}: { + failedRedirect: string; + runFriendlyId: string; + replayData: UseDataFunctionReturn; + customQueues: UseDataFunctionReturn["queues"]; + environmentIdOverride: string | undefined; + setEnvironmentIdOverride: (environment: string) => void; +}) { const navigation = useNavigation(); const submit = useSubmit(); - const currentJson = useRef(payload); + + const [defaultPayloadJson, setDefaultPayloadJson] = useState( + replayData.payload ?? startingJson + ); + const setPayload = useCallback((code: string) => { + setDefaultPayloadJson(code); + }, []); + const currentPayloadJson = useRef(replayData.payload ?? startingJson); + + const [defaultMetadataJson, setDefaultMetadataJson] = useState( + replayData.metadata ?? startingJson + ); + const setMetadata = useCallback((code: string) => { + setDefaultMetadataJson(code); + }, []); + const currentMetadataJson = useRef(replayData.metadata ?? startingJson); + const formAction = `/resources/taskruns/${runFriendlyId}/replay`; + const isSubmitting = navigation.formAction === formAction; const editablePayload = - payloadType === "application/json" || payloadType === "application/super+json"; + replayData.payloadType === "application/json" || + replayData.payloadType === "application/super+json"; + + const [tab, setTab] = useState<"payload" | "metadata">(editablePayload ? "payload" : "metadata"); + + const { defaultTaskQueue } = replayData; - const submitForm = useCallback( - (e: React.FormEvent) => { - const formData = new FormData(e.currentTarget); - const data: Record = { - environment: formData.get("environment") as string, - failedRedirect: formData.get("failedRedirect") as string, - }; + const queues = + defaultTaskQueue && !customQueues.some((q) => q.id === defaultTaskQueue.id) + ? [defaultTaskQueue, ...customQueues] + : customQueues; + const queueItems = queues.map((q) => ({ + value: q.type === "task" ? `task/${q.name}` : q.name, + label: q.name, + type: q.type, + paused: q.paused, + })); + + const lastSubmission = useActionData(); + const [ + form, + { + environment, + payload, + metadata, + delaySeconds, + ttlSeconds, + idempotencyKey, + idempotencyKeyTTLSeconds, + queue, + concurrencyKey, + maxAttempts, + maxDurationSeconds, + tags, + version, + machine, + }, + ] = useForm({ + id: "replay-task", + lastSubmission: lastSubmission as any, + onSubmit(event, { formData }) { + event.preventDefault(); if (editablePayload) { - data.payload = currentJson.current; + formData.set(payload.name, currentPayloadJson.current); } + formData.set(metadata.name, currentMetadataJson.current); - submit(data, { - action: formAction, - method: "post", - }); - e.preventDefault(); + submit(formData, { method: "POST", action: formAction }); }, - [currentJson] - ); + onValidate({ formData }) { + return parse(formData, { schema: ReplayRunData }); + }, + }); return ( -
submitForm(e)} className="pt-2"> - {editablePayload ? ( - <> - - Replaying will create a new run using the same or modified payload, executing against - the latest version in your selected environment. - - Payload -
+ + + + + Replaying will create a new run in the selected environment. You can modify the payload, + metadata and run options. + + + +
{ - currentJson.current = v; + if (tab === "payload") { + currentPayloadJson.current = v; + setPayload(v); + } else { + currentMetadataJson.current = v; + setMetadata(v); + } }} - showClearButton={false} - showCopyButton={false} height="100%" min-height="100%" max-height="100%" + additionalActions={ + +
+
+ { + setTab("payload"); + }} + > + Payload + + {!editablePayload && ( + + Payload is not editable for runs with{" "} + + large payloads. + + + } + /> + )} +
+ { + setTab("metadata"); + }} + > + Metadata + +
+
+ } />
- - ) : null} - - - - - -
+ + + +
+
+ + Options enable you to control the execution behavior of your task.{" "} + Read the docs. + + + + + Overrides the machine preset. + {machine.error} + + + + + {replayData.disableVersionSelection ? ( + Only the latest version is available in the development environment. + ) : ( + Runs task on a specific version. + )} + {version.error} + + + + {replayData.allowArbitraryQueues ? ( + + ) : ( + + )} + Assign run to a specific queue. + {queue.error} + + + + + Add tags to easily filter runs. + {tags.error} + + + + { + // only allow entering integers > 1 + if (["-", "+", ".", "e", "E"].includes(e.key)) { + e.preventDefault(); + } + }} + onBlur={(e) => { + const value = parseInt(e.target.value); + if (value < 1 && e.target.value !== "") { + e.target.value = "1"; + } + }} + /> + Retries failed runs up to the specified number of attempts. + {maxAttempts.error} + + + + + Overrides the maximum compute time limit for the run. + {maxDurationSeconds.error} + + + + + {idempotencyKey.error} + + Specify an idempotency key to ensure that a task is only triggered once with the + same key. + + + + + + Keys expire after 30 days by default. + + {idempotencyKeyTTLSeconds.error} + + + + + + + Limits concurrency by creating a separate queue for each value of the key. + + {concurrencyKey.error} + + + + + Delays run by a specific duration. + {delaySeconds.error} + + + + + Expires the run if it hasn't started within the TTL. + {ttlSeconds.error} + + {form.error} +
+
+
+ + +
- +
+ + + + + +
); diff --git a/apps/webapp/app/components/runs/v3/RetryDeploymentIndexingDialog.tsx b/apps/webapp/app/components/runs/v3/RetryDeploymentIndexingDialog.tsx deleted file mode 100644 index 8eb85f9946..0000000000 --- a/apps/webapp/app/components/runs/v3/RetryDeploymentIndexingDialog.tsx +++ /dev/null @@ -1,60 +0,0 @@ -import { ArrowPathIcon } from "@heroicons/react/20/solid"; -import { DialogClose } from "@radix-ui/react-dialog"; -import { Form, useNavigation } from "@remix-run/react"; -import { Button } from "~/components/primitives/Buttons"; -import { - DialogContent, - DialogDescription, - DialogFooter, - DialogHeader, -} from "~/components/primitives/Dialog"; -import { SpinnerWhite } from "~/components/primitives/Spinner"; - -type RetryDeploymentIndexingDialogProps = { - projectId: string; - deploymentShortCode: string; - redirectPath: string; -}; - -export function RetryDeploymentIndexingDialog({ - projectId, - deploymentShortCode, - redirectPath, -}: RetryDeploymentIndexingDialogProps) { - const navigation = useNavigation(); - - const formAction = `/resources/${projectId}/deployments/${deploymentShortCode}/retry-indexing`; - const isLoading = navigation.formAction === formAction; - - return ( - - Retry indexing this deployment? - - Retrying can be useful if indexing failed due to missing environment variables. Make sure - you set them before retrying. In most other cases, indexing will keep failing until you fix - any errors and re-deploy. - - - - - -
- -
-
-
- ); -} diff --git a/apps/webapp/app/components/runs/v3/RollbackDeploymentDialog.tsx b/apps/webapp/app/components/runs/v3/RollbackDeploymentDialog.tsx deleted file mode 100644 index 50df478098..0000000000 --- a/apps/webapp/app/components/runs/v3/RollbackDeploymentDialog.tsx +++ /dev/null @@ -1,102 +0,0 @@ -import { ArrowPathIcon } from "@heroicons/react/20/solid"; -import { DialogClose } from "@radix-ui/react-dialog"; -import { Form, useNavigation } from "@remix-run/react"; -import { Button } from "~/components/primitives/Buttons"; -import { - DialogContent, - DialogDescription, - DialogFooter, - DialogHeader, -} from "~/components/primitives/Dialog"; -import { SpinnerWhite } from "~/components/primitives/Spinner"; - -type RollbackDeploymentDialogProps = { - projectId: string; - deploymentShortCode: string; - redirectPath: string; -}; - -export function RollbackDeploymentDialog({ - projectId, - deploymentShortCode, - redirectPath, -}: RollbackDeploymentDialogProps) { - const navigation = useNavigation(); - - const formAction = `/resources/${projectId}/deployments/${deploymentShortCode}/rollback`; - const isLoading = navigation.formAction === formAction; - - return ( - - Rollback to this deployment? - - This deployment will become the default for all future runs. Tasks triggered but not - included in this deploy will remain queued until you roll back to or create a new deployment - with these tasks included. - - - - - -
- -
-
-
- ); -} - -export function PromoteDeploymentDialog({ - projectId, - deploymentShortCode, - redirectPath, -}: RollbackDeploymentDialogProps) { - const navigation = useNavigation(); - - const formAction = `/resources/${projectId}/deployments/${deploymentShortCode}/promote`; - const isLoading = navigation.formAction === formAction; - - return ( - - Promote this deployment? - - This deployment will become the default for all future runs not explicitly tied to a - specific deployment. - - - - - -
- -
-
-
- ); -} diff --git a/apps/webapp/app/components/runs/v3/RunFilters.tsx b/apps/webapp/app/components/runs/v3/RunFilters.tsx index 393acb7616..297d95be0b 100644 --- a/apps/webapp/app/components/runs/v3/RunFilters.tsx +++ b/apps/webapp/app/components/runs/v3/RunFilters.tsx @@ -1,20 +1,32 @@ import * as Ariakit from "@ariakit/react"; import { + CalendarIcon, ClockIcon, FingerPrintIcon, + RectangleStackIcon, Squares2X2Icon, TagIcon, - TrashIcon, + XMarkIcon, } from "@heroicons/react/20/solid"; import { Form, useFetcher } from "@remix-run/react"; +import { IconRotateClockwise2, IconToggleLeft } from "@tabler/icons-react"; +import { MachinePresetName } from "@trigger.dev/core/v3"; import type { BulkActionType, TaskRunStatus, TaskTriggerSource } from "@trigger.dev/database"; -import { ListChecks, ListFilterIcon } from "lucide-react"; +import { ListFilterIcon } from "lucide-react"; import { matchSorter } from "match-sorter"; import { type ReactNode, useCallback, useEffect, useMemo, useState } from "react"; import { z } from "zod"; +import { ListCheckedIcon } from "~/assets/icons/ListCheckedIcon"; +import { MachineDefaultIcon } from "~/assets/icons/MachineIcon"; import { StatusIcon } from "~/assets/icons/StatusIcon"; import { TaskIcon } from "~/assets/icons/TaskIcon"; +import { + formatMachinePresetName, + MachineLabelCombo, + machines, +} from "~/components/MachineLabelCombo"; import { AppliedFilter } from "~/components/primitives/AppliedFilter"; +import { Badge } from "~/components/primitives/Badge"; import { DateTime } from "~/components/primitives/DateTime"; import { FormError } from "~/components/primitives/FormError"; import { Input } from "~/components/primitives/Input"; @@ -38,13 +50,19 @@ import { TooltipProvider, TooltipTrigger, } from "~/components/primitives/Tooltip"; +import { useDebounceEffect } from "~/hooks/useDebounce"; +import { useEnvironment } from "~/hooks/useEnvironment"; import { useOptimisticLocation } from "~/hooks/useOptimisticLocation"; +import { useOrganization } from "~/hooks/useOrganizations"; import { useProject } from "~/hooks/useProject"; import { useSearchParams } from "~/hooks/useSearchParam"; -import { type loader as tagsLoader } from "~/routes/resources.projects.$projectParam.runs.tags"; +import { type loader as queuesLoader } from "~/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.queues"; +import { type loader as versionsLoader } from "~/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.versions"; +import { type loader as tagsLoader } from "~/routes/resources.environments.$envId.runs.tags"; import { Button } from "../../primitives/Buttons"; -import { BulkActionStatusCombo } from "./BulkAction"; -import { appliedSummary, FilterMenuProvider, TimeFilter } from "./SharedFilters"; +import { BulkActionTypeCombo } from "./BulkAction"; +import { appliedSummary, FilterMenuProvider, TimeFilter, timeFilters } from "./SharedFilters"; +import { AIFilterInput } from "./AIFilterInput"; import { allTaskRunStatuses, descriptionForTaskRunStatus, @@ -54,42 +72,247 @@ import { } from "./TaskRunStatus"; import { TaskTriggerSourceIcon } from "./TaskTriggerSource"; -export const TaskAttemptStatus = z.enum(allTaskRunStatuses); +export const RunStatus = z.enum(allTaskRunStatuses); + +const StringOrStringArray = z.preprocess((value) => { + if (typeof value === "string") { + if (value.length > 0) { + return [value]; + } + + return undefined; + } + + if (Array.isArray(value)) { + return value.filter((v) => typeof v === "string" && v.length > 0); + } + + return undefined; +}, z.string().array().optional()); + +export const MachinePresetOrMachinePresetArray = z.preprocess((value) => { + if (typeof value === "string") { + if (value.length > 0) { + const parsed = MachinePresetName.safeParse(value); + return parsed.success ? [parsed.data] : undefined; + } + + return undefined; + } + + if (Array.isArray(value)) { + return value + .filter((v) => typeof v === "string" && v.length > 0) + .map((v) => MachinePresetName.safeParse(v)) + .filter((result) => result.success) + .map((result) => result.data); + } + + return undefined; +}, MachinePresetName.array().optional()); export const TaskRunListSearchFilters = z.object({ - cursor: z.string().optional(), - direction: z.enum(["forward", "backward"]).optional(), - environments: z.preprocess( - (value) => (typeof value === "string" ? [value] : value), - z.string().array().optional() + cursor: z.string().optional().describe("Cursor for pagination - used internally for navigation"), + direction: z + .enum(["forward", "backward"]) + .optional() + .describe("Pagination direction - forward or backward. Used internally for navigation"), + environments: StringOrStringArray.describe( + "Environment names to filter by (DEVELOPMENT, STAGING, PREVIEW, PRODUCTION)" ), - tasks: z.preprocess( - (value) => (typeof value === "string" ? [value] : value), - z.string().array().optional() + tasks: StringOrStringArray.describe( + "Task identifiers to filter by (these are user-defined names)" ), - versions: z.preprocess( - (value) => (typeof value === "string" ? [value] : value), - z.string().array().optional() + versions: StringOrStringArray.describe( + "Version identifiers to filter by (these are in this format 20250718.1). Needs to be looked up." ), - statuses: z.preprocess( - (value) => (typeof value === "string" ? [value] : value), - TaskAttemptStatus.array().optional() - ), - tags: z.preprocess( - (value) => (typeof value === "string" ? [value] : value), - z.string().array().optional() + statuses: z + .preprocess((value) => { + if (typeof value === "string") { + if (value.length > 0) { + return [value]; + } + + return undefined; + } + + if (Array.isArray(value)) { + return value.filter((v) => typeof v === "string" && v.length > 0); + } + + return undefined; + }, RunStatus.array().optional()) + .describe(`Run statuses to filter by (${filterableTaskRunStatuses.join(", ")})`), + tags: StringOrStringArray.describe("Tag names to filter by (these are user-defined names)"), + bulkId: z + .string() + .optional() + .describe("Bulk action ID to filter by - shows runs from a specific bulk operation"), + period: z + .preprocess((value) => (value === "all" ? undefined : value), z.string().optional()) + .describe("Time period string (e.g., '1h', '7d', '30d', '1y') for relative time filtering"), + from: z.coerce + .number() + .optional() + .describe("Unix timestamp for start of time range - absolute time filtering"), + to: z.coerce + .number() + .optional() + .describe("Unix timestamp for end of time range - absolute time filtering"), + rootOnly: z.coerce + .boolean() + .optional() + .describe("Show only root runs (not child runs) - set to true to exclude sub-runs"), + batchId: z + .string() + .optional() + .describe( + "Batch ID to filter by - shows runs from a specific batch operation. They start with batch_" + ), + runId: StringOrStringArray.describe("Specific run IDs to filter by. They start with run_"), + scheduleId: z + .string() + .optional() + .describe( + "Schedule ID to filter by - shows runs from a specific schedule. They start with sched_" + ), + queues: StringOrStringArray.describe("Queue names to filter by (these are user-defined names)"), + machines: MachinePresetOrMachinePresetArray.describe( + `Machine presets to filter by (${machines.join(", ")})` ), - bulkId: z.string().optional(), - period: z.preprocess((value) => (value === "all" ? undefined : value), z.string().optional()), - from: z.coerce.number().optional(), - to: z.coerce.number().optional(), - rootOnly: z.coerce.boolean().optional(), - batchId: z.string().optional(), - runId: z.string().optional(), - scheduleId: z.string().optional(), }); export type TaskRunListSearchFilters = z.infer; +export type TaskRunListSearchFilterKey = keyof TaskRunListSearchFilters; + +export function filterTitle(filterKey: string) { + switch (filterKey) { + case "cursor": + return "Cursor"; + case "direction": + return "Direction"; + case "statuses": + return "Status"; + case "tasks": + return "Tasks"; + case "tags": + return "Tags"; + case "bulkId": + return "Bulk action"; + case "period": + return "Period"; + case "from": + return "From"; + case "to": + return "To"; + case "rootOnly": + return "Root only"; + case "batchId": + return "Batch ID"; + case "runId": + return "Run ID"; + case "scheduleId": + return "Schedule ID"; + case "queues": + return "Queues"; + case "machines": + return "Machine"; + case "versions": + return "Version"; + default: + return filterKey; + } +} + +export function filterIcon(filterKey: string): ReactNode | undefined { + switch (filterKey) { + case "cursor": + case "direction": + return undefined; + case "statuses": + return ; + case "tasks": + return ; + case "tags": + return ; + case "bulkId": + return ; + case "period": + return ; + case "from": + return ; + case "to": + return ; + case "rootOnly": + return ; + case "batchId": + return ; + case "runId": + return ; + case "scheduleId": + return ; + case "queues": + return ; + case "machines": + return ; + case "versions": + return ; + default: + return undefined; + } +} + +export function getRunFiltersFromSearchParams( + searchParams: URLSearchParams +): TaskRunListSearchFilters { + const params = { + cursor: searchParams.get("cursor") ?? undefined, + direction: searchParams.get("direction") ?? undefined, + statuses: + searchParams.getAll("statuses").filter((v) => v.length > 0).length > 0 + ? searchParams.getAll("statuses") + : undefined, + tasks: + searchParams.getAll("tasks").filter((v) => v.length > 0).length > 0 + ? searchParams.getAll("tasks") + : undefined, + period: searchParams.get("period") ?? undefined, + bulkId: searchParams.get("bulkId") ?? undefined, + tags: + searchParams.getAll("tags").filter((v) => v.length > 0).length > 0 + ? searchParams.getAll("tags") + : undefined, + from: searchParams.get("from") ?? undefined, + to: searchParams.get("to") ?? undefined, + rootOnly: searchParams.has("rootOnly") ? searchParams.get("rootOnly") === "true" : undefined, + runId: + searchParams.getAll("runId").filter((v) => v.length > 0).length > 0 + ? searchParams.getAll("runId") + : undefined, + batchId: searchParams.get("batchId") ?? undefined, + scheduleId: searchParams.get("scheduleId") ?? undefined, + queues: + searchParams.getAll("queues").filter((v) => v.length > 0).length > 0 + ? searchParams.getAll("queues") + : undefined, + machines: + searchParams.getAll("machines").filter((v) => v.length > 0).length > 0 + ? searchParams.getAll("machines") + : undefined, + versions: + searchParams.getAll("versions").filter((v) => v.length > 0).length > 0 + ? searchParams.getAll("versions") + : undefined, + }; + + const parsed = TaskRunListSearchFilters.safeParse(params); + + if (!parsed.success) { + return {}; + } + + return parsed.data; +} type RunFiltersProps = { possibleTasks: { slug: string; triggerSource: TaskTriggerSource }[]; @@ -97,6 +320,7 @@ type RunFiltersProps = { id: string; type: BulkActionType; createdAt: Date; + name: string; }[]; rootOnlyDefault: boolean; hasFilters: boolean; @@ -112,11 +336,15 @@ export function RunsFilters(props: RunFiltersProps) { searchParams.has("tags") || searchParams.has("batchId") || searchParams.has("runId") || - searchParams.has("scheduleId"); + searchParams.has("scheduleId") || + searchParams.has("queues") || + searchParams.has("machines") || + searchParams.has("versions"); return (
+ @@ -125,9 +353,7 @@ export function RunsFilters(props: RunFiltersProps) { {searchParams.has("rootOnly") && ( )} - +
@@ -138,14 +364,17 @@ const filterTypes = [ { name: "statuses", title: "Status", - icon: , + icon: , }, { name: "tasks", title: "Tasks", icon: }, { name: "tags", title: "Tags", icon: }, + { name: "versions", title: "Versions", icon: }, + { name: "queues", title: "Queues", icon: }, + { name: "machines", title: "Machines", icon: }, { name: "run", title: "Run ID", icon: }, { name: "batch", title: "Batch ID", icon: }, { name: "schedule", title: "Schedule ID", icon: }, - { name: "bulk", title: "Bulk action", icon: }, + { name: "bulk", title: "Bulk action", icon: }, ] as const; type FilterType = (typeof filterTypes)[number]["name"]; @@ -162,11 +391,12 @@ function FilterMenu(props: RunFiltersProps) {
} - variant={"tertiary/small"} + variant={"secondary/small"} shortcut={shortcut} tooltipTitle={"Filter runs"} + className="pr-0.5" > - Filter + <> ); @@ -192,6 +422,9 @@ function AppliedFilters({ possibleTasks, bulkActions }: RunFiltersProps) { + + + @@ -220,12 +453,18 @@ function Menu(props: MenuProps) { return props.setFilterType(undefined)} {...props} />; case "tags": return props.setFilterType(undefined)} {...props} />; + case "queues": + return props.setFilterType(undefined)} {...props} />; + case "machines": + return props.setFilterType(undefined)} {...props} />; case "run": return props.setFilterType(undefined)} {...props} />; case "batch": return props.setFilterType(undefined)} {...props} />; case "schedule": return props.setFilterType(undefined)} {...props} />; + case "versions": + return props.setFilterType(undefined)} {...props} />; } } @@ -334,7 +573,7 @@ function AppliedStatusFilter() { const { values, del } = useSearchParams(); const statuses = values("statuses"); - if (statuses.length === 0) { + if (statuses.length === 0 || statuses.every((v) => v === "")) { return null; } @@ -346,8 +585,10 @@ function AppliedStatusFilter() { }> runStatusTitle(v as TaskRunStatus)))} onRemove={() => del(["statuses", "cursor", "direction"])} + variant="secondary/small" /> } @@ -421,7 +662,7 @@ function TasksDropdown({ function AppliedTaskFilter({ possibleTasks }: Pick) { const { values, del } = useSearchParams(); - if (values("tasks").length === 0) { + if (values("tasks").length === 0 || values("tasks").every((v) => v === "")) { return null; } @@ -433,6 +674,7 @@ function AppliedTaskFilter({ possibleTasks }: Pick}> { const task = possibleTasks.find((task) => task.slug === v); @@ -440,6 +682,7 @@ function AppliedTaskFilter({ possibleTasks }: Pick del(["tasks", "cursor", "direction"])} + variant="secondary/small" /> } @@ -485,7 +728,7 @@ function BulkActionsDropdown({ {trigger} { if (onClose) { onClose(); @@ -498,11 +741,20 @@ function BulkActionsDropdown({ None - {filtered.map((item, index) => ( - -
- - + {filtered.map((item) => ( + +
+ + {item.name} + +
+ + +
))} @@ -531,8 +783,10 @@ function AppliedBulkActionsFilter({ bulkActions }: Pick}> del(["bulkId", "cursor", "direction"])} + variant="secondary/small" /> } @@ -556,45 +810,63 @@ function TagsDropdown({ searchValue: string; onClose?: () => void; }) { - const project = useProject(); - const { values, replace } = useSearchParams(); + const environment = useEnvironment(); + const { values, value, replace } = useSearchParams(); const handleChange = (values: string[]) => { clearSearchValue(); replace({ - tags: values, + tags: values.length > 0 ? values : undefined, cursor: undefined, direction: undefined, }); }; + const { period, from, to } = timeFilters({ + period: value("period"), + from: value("from"), + to: value("to"), + }); + + const tagValues = values("tags").filter((v) => v !== ""); + const selected = tagValues.length > 0 ? tagValues : undefined; + const fetcher = useFetcher(); useEffect(() => { const searchParams = new URLSearchParams(); if (searchValue) { - searchParams.set("name", encodeURIComponent(searchValue)); + searchParams.set("name", searchValue); } - fetcher.load(`/resources/projects/${project.slug}/runs/tags?${searchParams}`); - }, [searchValue]); + if (period) { + searchParams.set("period", period); + } + if (from) { + searchParams.set("from", from.getTime().toString()); + } + if (to) { + searchParams.set("to", to.getTime().toString()); + } + fetcher.load(`/resources/environments/${environment.id}/runs/tags?${searchParams}`); + }, [environment.id, searchValue, period, from?.getTime(), to?.getTime()]); const filtered = useMemo(() => { let items: string[] = []; if (searchValue === "") { - items = values("tags"); + items = [...(selected ?? [])]; } if (fetcher.data === undefined) { return matchSorter(items, searchValue); } - items.push(...fetcher.data.tags.map((t) => t.name)); + items.push(...fetcher.data.tags); return matchSorter(Array.from(new Set(items)), searchValue); - }, [searchValue, fetcher.data]); + }, [searchValue, fetcher.data, selected]); return ( - + {trigger} v === "")) { return null; } @@ -650,8 +922,422 @@ function AppliedTagsFilter() { }> del(["tags", "cursor", "direction"])} + variant="secondary/small" + /> + + } + searchValue={search} + clearSearchValue={() => setSearch("")} + /> + )} + + ); +} + +function QueuesDropdown({ + trigger, + clearSearchValue, + searchValue, + onClose, +}: { + trigger: ReactNode; + clearSearchValue: () => void; + searchValue: string; + onClose?: () => void; +}) { + const organization = useOrganization(); + const project = useProject(); + const environment = useEnvironment(); + const { values, replace } = useSearchParams(); + + const handleChange = (values: string[]) => { + clearSearchValue(); + replace({ + queues: values.length > 0 ? values : undefined, + cursor: undefined, + direction: undefined, + }); + }; + + const queueValues = values("queues").filter((v) => v !== ""); + const selected = queueValues.length > 0 ? queueValues : undefined; + + const fetcher = useFetcher(); + + useDebounceEffect( + searchValue, + (s) => { + const searchParams = new URLSearchParams(); + searchParams.set("per_page", "25"); + if (searchValue) { + searchParams.set("query", s); + } + fetcher.load( + `/resources/orgs/${organization.slug}/projects/${project.slug}/env/${ + environment.slug + }/queues?${searchParams.toString()}` + ); + }, + 250 + ); + + const filtered = useMemo(() => { + let items: { name: string; type: "custom" | "task"; value: string }[] = []; + + for (const queueName of selected ?? []) { + const queueItem = fetcher.data?.queues.find((q) => q.name === queueName); + if (!queueItem) { + if (queueName.startsWith("task/")) { + items.push({ + name: queueName.replace("task/", ""), + type: "task", + value: queueName, + }); + } else { + items.push({ + name: queueName, + type: "custom", + value: queueName, + }); + } + } + } + + if (fetcher.data === undefined) { + return matchSorter(items, searchValue); + } + + items.push( + ...fetcher.data.queues.map((q) => ({ + name: q.name, + type: q.type, + value: q.type === "task" ? `task/${q.name}` : q.name, + })) + ); + + return matchSorter(Array.from(new Set(items)), searchValue, { + keys: ["name"], + }); + }, [searchValue, fetcher.data]); + + return ( + + {trigger} + { + if (onClose) { + onClose(); + return false; + } + + return true; + }} + > + ( +
+ + {fetcher.state === "loading" && } +
+ )} + /> + + {filtered.length > 0 + ? filtered.map((queue) => ( + + ) : ( + + ) + } + > + {queue.name} + + )) + : null} + {filtered.length === 0 && fetcher.state !== "loading" && ( + No queues found + )} + +
+
+ ); +} + +function AppliedQueuesFilter() { + const { values, del } = useSearchParams(); + + const queues = values("queues"); + + if (queues.length === 0 || queues.every((v) => v === "")) { + return null; + } + + return ( + + {(search, setSearch) => ( + }> + v.replace("task/", "")))} + onRemove={() => del(["queues", "cursor", "direction"])} + variant="secondary/small" + /> + + } + searchValue={search} + clearSearchValue={() => setSearch("")} + /> + )} + + ); +} + +function MachinesDropdown({ + trigger, + clearSearchValue, + searchValue, + onClose, +}: { + trigger: ReactNode; + clearSearchValue: () => void; + searchValue: string; + onClose?: () => void; +}) { + const { values, replace } = useSearchParams(); + + const handleChange = (values: string[]) => { + clearSearchValue(); + replace({ machines: values, cursor: undefined, direction: undefined }); + }; + + const filtered = useMemo(() => { + if (searchValue === "") { + return machines; + } + return matchSorter(machines, searchValue); + }, [searchValue]); + + return ( + + {trigger} + { + if (onClose) { + onClose(); + return false; + } + + return true; + }} + > + + + {filtered.map((item, index) => ( + + + + ))} + + + + ); +} + +function AppliedMachinesFilter() { + const { values, del } = useSearchParams(); + const machines = values("machines"); + + if (machines.length === 0 || machines.every((v) => v === "")) { + return null; + } + + return ( + + {(search, setSearch) => ( + }> + { + const parsed = MachinePresetName.safeParse(v); + if (!parsed.success) { + return v; + } + return formatMachinePresetName(parsed.data); + }) + )} + onRemove={() => del(["machines", "cursor", "direction"])} + variant="secondary/small" + /> + + } + searchValue={search} + clearSearchValue={() => setSearch("")} + /> + )} + + ); +} + +function VersionsDropdown({ + trigger, + clearSearchValue, + searchValue, + onClose, +}: { + trigger: ReactNode; + clearSearchValue: () => void; + searchValue: string; + onClose?: () => void; +}) { + const organization = useOrganization(); + const project = useProject(); + const environment = useEnvironment(); + const { values, replace } = useSearchParams(); + + const handleChange = (values: string[]) => { + clearSearchValue(); + replace({ + versions: values.length > 0 ? values : undefined, + cursor: undefined, + direction: undefined, + }); + }; + + const versionValues = values("versions").filter((v) => v !== ""); + const selected = versionValues.length > 0 ? versionValues : undefined; + + const fetcher = useFetcher(); + + useDebounceEffect( + searchValue, + (s) => { + const searchParams = new URLSearchParams(); + if (searchValue) { + searchParams.set("query", s); + } + fetcher.load( + `/resources/orgs/${organization.slug}/projects/${project.slug}/env/${ + environment.slug + }/versions?${searchParams.toString()}` + ); + }, + 250 + ); + + const filtered = useMemo(() => { + let items: { version: string; isCurrent: boolean }[] = []; + + for (const version of selected ?? []) { + const versionItem = fetcher.data?.versions.find((v) => v.version === version); + if (!versionItem) { + items.push({ + version, + isCurrent: false, + }); + } + } + + if (fetcher.data === undefined) { + return matchSorter(items, searchValue); + } + + items.push(...fetcher.data.versions); + + if (searchValue === "") { + return items; + } + + return matchSorter(Array.from(new Set(items)), searchValue, { + keys: ["version"], + }); + }, [searchValue, fetcher.data]); + + return ( + + {trigger} + { + if (onClose) { + onClose(); + return false; + } + + return true; + }} + > + ( +
+ + {fetcher.state === "loading" && } +
+ )} + /> + + {filtered.length > 0 + ? filtered.map((version) => ( + + + {version.version} + {version.isCurrent ? Current : null} + + + )) + : null} + {filtered.length === 0 && fetcher.state !== "loading" && ( + No versions found + )} + +
+
+ ); +} + +function AppliedVersionsFilter() { + const { values, del } = useSearchParams(); + + const versions = values("versions"); + + if (versions.length === 0 || versions.every((v) => v === "")) { + return null; + } + + return ( + + {(search, setSearch) => ( + }> + del(["versions", "cursor", "direction"])} + variant="secondary/small" /> } @@ -678,10 +1364,9 @@ function RootOnlyToggle({ defaultValue }: { defaultValue: boolean }) { return ( { replace({ rootOnly: checked ? "true" : "false", @@ -796,8 +1481,10 @@ function AppliedRunIdFilter() { }> del(["runId", "cursor", "direction"])} + variant="secondary/small" /> } @@ -914,8 +1601,10 @@ function AppliedBatchIdFilter() { }> del(["batchId", "cursor", "direction"])} + variant="secondary/small" /> } @@ -1032,8 +1721,10 @@ function AppliedScheduleIdFilter() { }> del(["scheduleId", "cursor", "direction"])} + variant="secondary/small" /> } diff --git a/apps/webapp/app/components/runs/v3/RunIcon.tsx b/apps/webapp/app/components/runs/v3/RunIcon.tsx index fd277997af..615def59cd 100644 --- a/apps/webapp/app/components/runs/v3/RunIcon.tsx +++ b/apps/webapp/app/components/runs/v3/RunIcon.tsx @@ -20,6 +20,7 @@ import { TriggerIcon } from "~/assets/icons/TriggerIcon"; import { PythonLogoIcon } from "~/assets/icons/PythonLogoIcon"; import { TraceIcon } from "~/assets/icons/TraceIcon"; import { WaitpointTokenIcon } from "~/assets/icons/WaitpointTokenIcon"; +import { StreamsIcon } from "~/assets/icons/StreamsIcon"; type TaskIconProps = { name: string | undefined; @@ -97,6 +98,7 @@ export function RunIcon({ name, className, spanName }: TaskIconProps) { return ; case "task-hook-init": case "task-hook-onStart": + case "task-hook-onStartAttempt": case "task-hook-onSuccess": case "task-hook-onWait": case "task-hook-onResume": @@ -107,6 +109,8 @@ export function RunIcon({ name, className, spanName }: TaskIconProps) { case "task-hook-onFailure": case "task-hook-catchError": return ; + case "streams": + return ; } return ; diff --git a/apps/webapp/app/components/runs/v3/RunTag.tsx b/apps/webapp/app/components/runs/v3/RunTag.tsx index c7aab7cb09..14baeca1a3 100644 --- a/apps/webapp/app/components/runs/v3/RunTag.tsx +++ b/apps/webapp/app/components/runs/v3/RunTag.tsx @@ -3,11 +3,21 @@ import tagLeftPath from "./tag-left.svg"; import { SimpleTooltip } from "~/components/primitives/Tooltip"; import { Link } from "@remix-run/react"; import { cn } from "~/utils/cn"; -import { ClipboardCheckIcon, ClipboardIcon } from "lucide-react"; +import { ClipboardCheckIcon, ClipboardIcon, XIcon } from "lucide-react"; type Tag = string | { key: string; value: string }; -export function RunTag({ tag, to, tooltip }: { tag: string; to?: string; tooltip?: string }) { +export function RunTag({ + tag, + to, + tooltip, + action = { type: "copy" }, +}: { + tag: string; + action?: { type: "copy" } | { type: "delete"; onDelete: (tag: string) => void }; + to?: string; + tooltip?: string; +}) { const tagResult = useMemo(() => splitTag(tag), [tag]); const [isHovered, setIsHovered] = useState(false); @@ -57,7 +67,11 @@ export function RunTag({ tag, to, tooltip }: { tag: string; to?: string; tooltip return (
setIsHovered(false)}> {tagContent} - + {action.type === "delete" ? ( + + ) : ( + + )}
); } @@ -105,6 +119,45 @@ function CopyButton({ textToCopy, isHovered }: { textToCopy: string; isHovered: ); } +function DeleteButton({ + tag, + onDelete, + isHovered, +}: { + tag: string; + onDelete: (tag: string) => void; + isHovered: boolean; +}) { + const handleDelete = useCallback( + (e: React.MouseEvent) => { + e.preventDefault(); + e.stopPropagation(); + onDelete(tag); + }, + [tag, onDelete] + ); + + return ( + e.stopPropagation()} + className={cn( + "absolute -right-6 top-0 z-10 size-6 items-center justify-center rounded-r-sm border-y border-r border-charcoal-650 bg-charcoal-750", + isHovered ? "flex" : "hidden", + "text-text-dimmed hover:border-charcoal-600 hover:bg-charcoal-700 hover:text-rose-400" + )} + > + + + } + content="Remove tag" + disableHoverableContent + /> + ); +} + /** Takes a string and turns it into a tag * * If the string has 12 or fewer alpha characters followed by an underscore or colon then we return an object with a key and value diff --git a/apps/webapp/app/components/runs/v3/RunTagInput.tsx b/apps/webapp/app/components/runs/v3/RunTagInput.tsx new file mode 100644 index 0000000000..25d818f402 --- /dev/null +++ b/apps/webapp/app/components/runs/v3/RunTagInput.tsx @@ -0,0 +1,133 @@ +import { useCallback, useState, useEffect, type KeyboardEvent } from "react"; +import { AnimatePresence, motion } from "framer-motion"; +import { Input } from "~/components/primitives/Input"; +import { RunTag } from "./RunTag"; + +interface TagInputProps { + id?: string; // used for the hidden input for form submission + name?: string; // used for the hidden input for form submission + defaultTags?: string[]; + tags?: string[]; + placeholder?: string; + variant?: "small" | "medium"; + maxTags?: number; + maxTagLength?: number; + onTagsChange?: (tags: string[]) => void; +} + +export function RunTagInput({ + id, + name, + defaultTags = [], + tags: controlledTags, + placeholder = "Type and press Enter to add tags", + variant = "small", + maxTags = 10, + maxTagLength = 128, + onTagsChange, +}: TagInputProps) { + // Use controlled tags if provided, otherwise use default + const initialTags = controlledTags ?? defaultTags; + + const [tags, setTags] = useState(initialTags); + const [inputValue, setInputValue] = useState(""); + + // Sync internal state with external tag changes + useEffect(() => { + if (controlledTags !== undefined) { + setTags(controlledTags); + } + }, [controlledTags]); + + const addTag = useCallback( + (tagText: string) => { + const trimmedTag = tagText.trim(); + if (trimmedTag && !tags.includes(trimmedTag) && tags.length < maxTags) { + const newTags = [...tags, trimmedTag]; + setTags(newTags); + onTagsChange?.(newTags); + } + setInputValue(""); + }, + [tags, onTagsChange, maxTags] + ); + + const removeTag = useCallback( + (tagToRemove: string) => { + const newTags = tags.filter((tag) => tag !== tagToRemove); + setTags(newTags); + onTagsChange?.(newTags); + }, + [tags, onTagsChange] + ); + + const handleKeyDown = useCallback( + (e: KeyboardEvent) => { + if (e.key === "Enter") { + e.preventDefault(); + addTag(inputValue); + } else if (e.key === "Backspace" && inputValue === "" && tags.length > 0) { + removeTag(tags[tags.length - 1]); + } else if (e.key === ",") { + e.preventDefault(); + } + }, + [inputValue, addTag, removeTag, tags] + ); + + const maxTagsReached = tags.length >= maxTags; + + return ( +
+ + + setInputValue(e.target.value)} + onKeyDown={handleKeyDown} + placeholder={maxTagsReached ? `A maximum of ${maxTags} tags is allowed` : placeholder} + variant={variant} + disabled={maxTagsReached} + maxLength={maxTagLength} + /> + + {tags.length > 0 && ( +
+ + {tags.map((tag, i) => ( + + + + ))} + +
+ )} +
+ ); +} diff --git a/apps/webapp/app/components/runs/v3/ScheduleFilters.tsx b/apps/webapp/app/components/runs/v3/ScheduleFilters.tsx index 4dd0e7f2ff..3a30e0cb37 100644 --- a/apps/webapp/app/components/runs/v3/ScheduleFilters.tsx +++ b/apps/webapp/app/components/runs/v3/ScheduleFilters.tsx @@ -1,6 +1,5 @@ import { MagnifyingGlassIcon, XMarkIcon } from "@heroicons/react/20/solid"; import { useNavigate } from "@remix-run/react"; -import { type RuntimeEnvironment } from "@trigger.dev/database"; import { useCallback } from "react"; import { z } from "zod"; import { Input } from "~/components/primitives/Input"; diff --git a/apps/webapp/app/components/runs/v3/SharedFilters.tsx b/apps/webapp/app/components/runs/v3/SharedFilters.tsx index 5b7478d6a1..14edfa3c7d 100644 --- a/apps/webapp/app/components/runs/v3/SharedFilters.tsx +++ b/apps/webapp/app/components/runs/v3/SharedFilters.tsx @@ -10,6 +10,7 @@ import { Label } from "~/components/primitives/Label"; import { ComboboxProvider, SelectPopover, SelectProvider } from "~/components/primitives/Select"; import { useSearchParams } from "~/hooks/useSearchParam"; import { Button } from "../../primitives/Buttons"; +import { filterIcon } from "./RunFilters"; export type DisplayableEnvironment = Pick & { userName?: string; @@ -100,6 +101,8 @@ if (!defaultPeriodMs) { throw new Error("Invalid default period"); } +type TimeRangeType = "period" | "range" | "from" | "to"; + export const timeFilters = ({ period, from, @@ -108,16 +111,27 @@ export const timeFilters = ({ period?: string; from?: string | number; to?: string | number; -}): { period?: string; from?: Date; to?: Date; isDefault: boolean } => { +}): { + period?: string; + from?: Date; + to?: Date; + isDefault: boolean; + rangeType: TimeRangeType; + label: string; + valueLabel: ReactNode; +} => { if (period) { - return { period, isDefault: period === defaultPeriod }; + return { period, isDefault: period === defaultPeriod, ...timeFilterRenderValues({ period }) }; } if (from && to) { + const fromDate = typeof from === "string" ? dateFromString(from) : new Date(from); + const toDate = typeof to === "string" ? dateFromString(to) : new Date(to); return { - from: typeof from === "string" ? dateFromString(from) : new Date(from), - to: typeof to === "string" ? dateFromString(to) : new Date(to), + from: fromDate, + to: toDate, isDefault: false, + ...timeFilterRenderValues({ from: fromDate, to: toDate }), }; } @@ -127,6 +141,7 @@ export const timeFilters = ({ return { from: fromDate, isDefault: false, + ...timeFilterRenderValues({ from: fromDate }), }; } @@ -136,25 +151,28 @@ export const timeFilters = ({ return { to: toDate, isDefault: false, + ...timeFilterRenderValues({ to: toDate }), }; } return { period: defaultPeriod, isDefault: true, + ...timeFilterRenderValues({ period: defaultPeriod }), }; }; -export function TimeFilter() { - const { value, del } = useSearchParams(); - - const { period, from, to } = timeFilters({ - period: value("period"), - from: value("from"), - to: value("to"), - }); +export function timeFilterRenderValues({ + from, + to, + period, +}: { + from?: Date; + to?: Date; + period?: string; +}) { + const rangeType: TimeRangeType = from && to ? "range" : from ? "from" : to ? "to" : "period"; - const rangeType = from && to ? "range" : from ? "from" : to ? "to" : "period"; let valueLabel: ReactNode; switch (rangeType) { case "period": @@ -183,13 +201,31 @@ export function TimeFilter() { ? "Created after" : "Created before"; + return { label, valueLabel, rangeType }; +} + +export function TimeFilter() { + const { value, del } = useSearchParams(); + + const { period, from, to, label, valueLabel } = timeFilters({ + period: value("period"), + from: value("from"), + to: value("to"), + }); + return ( {() => ( }> - + } period={period} @@ -229,20 +265,23 @@ export function TimeDropdown({ setOpen(false); }, [fromValue, toValue, replace]); - const handlePeriodClick = useCallback((period: string) => { - setFromValue(undefined); - setToValue(undefined); + const handlePeriodClick = useCallback( + (period: string) => { + replace({ + period, + cursor: undefined, + direction: undefined, + from: undefined, + to: undefined, + }); - replace({ - period: period, - cursor: undefined, - direction: undefined, - from: undefined, - to: undefined, - }); + setFromValue(undefined); + setToValue(undefined); - setOpen(false); - }, []); + setOpen(false); + }, + [replace] + ); return ( @@ -266,8 +305,12 @@ export function TimeDropdown({ ? "border-indigo-500 group-hover/button:border-indigo-500" : undefined } - onClick={() => handlePeriodClick(p.value)} + onClick={(e) => { + e.preventDefault(); + handlePeriodClick(p.value); + }} fullWidth + type="button" > {p.label} @@ -307,11 +350,13 @@ export function TimeDropdown({
@@ -323,7 +368,11 @@ export function TimeDropdown({ enabledOnInputElements: true, }} disabled={!fromValue && !toValue} - onClick={() => apply()} + onClick={(e) => { + e.preventDefault(); + apply(); + }} + type="button" > Apply @@ -347,7 +396,7 @@ export function appliedSummary(values: string[], maxValues = 3) { return values.join(", "); } -function dateFromString(value: string | undefined | null): Date | undefined { +export function dateFromString(value: string | undefined | null): Date | undefined { if (!value) return; //is it an int? diff --git a/apps/webapp/app/components/runs/v3/TaskRunStatus.tsx b/apps/webapp/app/components/runs/v3/TaskRunStatus.tsx index fd2143ecb8..ae5a6b28af 100644 --- a/apps/webapp/app/components/runs/v3/TaskRunStatus.tsx +++ b/apps/webapp/app/components/runs/v3/TaskRunStatus.tsx @@ -11,7 +11,7 @@ import { TrashIcon, XCircleIcon, } from "@heroicons/react/20/solid"; -import { type TaskRunStatus } from "@trigger.dev/database"; +import type { TaskRunStatus } from "@trigger.dev/database"; import assertNever from "assert-never"; import { HourglassIcon } from "lucide-react"; import { TimedOutIcon } from "~/assets/icons/TimedOutIcon"; @@ -24,12 +24,13 @@ export const allTaskRunStatuses = [ "WAITING_FOR_DEPLOY", "PENDING_VERSION", "PENDING", + "DEQUEUED", "EXECUTING", "RETRYING_AFTER_FAILURE", "WAITING_TO_RESUME", "COMPLETED_SUCCESSFULLY", - "CANCELED", "COMPLETED_WITH_ERRORS", + "CANCELED", "TIMED_OUT", "CRASHED", "PAUSED", @@ -42,16 +43,15 @@ export const filterableTaskRunStatuses = [ "PENDING_VERSION", "DELAYED", "PENDING", - "WAITING_TO_RESUME", + "DEQUEUED", "EXECUTING", - "RETRYING_AFTER_FAILURE", + "WAITING_TO_RESUME", "COMPLETED_SUCCESSFULLY", - "CANCELED", "COMPLETED_WITH_ERRORS", "TIMED_OUT", "CRASHED", - "INTERRUPTED", "SYSTEM_FAILURE", + "CANCELED", "EXPIRED", ] as const satisfies Readonly>; @@ -60,6 +60,7 @@ const taskRunStatusDescriptions: Record = { PENDING: "Task is waiting to be executed.", PENDING_VERSION: "Run cannot execute until a version includes the task and queue.", WAITING_FOR_DEPLOY: "Run cannot execute until a version includes the task and queue.", + DEQUEUED: "Task has been dequeued from the queue but is not yet executing.", EXECUTING: "Task is currently being executed.", RETRYING_AFTER_FAILURE: "Task is being reattempted after a failure.", WAITING_TO_RESUME: `You have used a "wait" function. When the wait is complete, the task will resume execution.`, @@ -82,6 +83,7 @@ export const QUEUED_STATUSES = [ ] satisfies TaskRunStatus[]; export const RUNNING_STATUSES = [ + "DEQUEUED", "EXECUTING", "RETRYING_AFTER_FAILURE", "WAITING_TO_RESUME", @@ -164,6 +166,8 @@ export function TaskRunStatusIcon({ case "PENDING_VERSION": case "WAITING_FOR_DEPLOY": return ; + case "DEQUEUED": + return ; case "EXECUTING": return ; case "WAITING_TO_RESUME": @@ -205,6 +209,7 @@ export function runStatusClassNameColor(status: TaskRunStatus): string { return "text-amber-500"; case "EXECUTING": case "RETRYING_AFTER_FAILURE": + case "DEQUEUED": return "text-pending"; case "WAITING_TO_RESUME": return "text-charcoal-500"; @@ -240,6 +245,8 @@ export function runStatusTitle(status: TaskRunStatus): string { case "PENDING_VERSION": case "WAITING_FOR_DEPLOY": return "Pending version"; + case "DEQUEUED": + return "Dequeued"; case "EXECUTING": return "Executing"; case "WAITING_TO_RESUME": diff --git a/apps/webapp/app/components/runs/v3/TaskRunsTable.tsx b/apps/webapp/app/components/runs/v3/TaskRunsTable.tsx index c85963edcb..14cdf5a67b 100644 --- a/apps/webapp/app/components/runs/v3/TaskRunsTable.tsx +++ b/apps/webapp/app/components/runs/v3/TaskRunsTable.tsx @@ -10,6 +10,9 @@ import { BeakerIcon, BookOpenIcon, CheckIcon } from "@heroicons/react/24/solid"; import { useLocation } from "@remix-run/react"; import { formatDuration, formatDurationMilliseconds } from "@trigger.dev/core/v3"; import { useCallback, useRef } from "react"; +import { TaskIconSmall } from "~/assets/icons/TaskIcon"; +import { MachineLabelCombo } from "~/components/MachineLabelCombo"; +import { MachineTooltipInfo } from "~/components/MachineTooltipInfo"; import { Badge } from "~/components/primitives/Badge"; import { Button, LinkButton } from "~/components/primitives/Buttons"; import { Checkbox } from "~/components/primitives/Checkbox"; @@ -18,16 +21,17 @@ import { Header3 } from "~/components/primitives/Headers"; import { PopoverMenuItem } from "~/components/primitives/Popover"; import { useSelectedItems } from "~/components/primitives/SelectedItemsProvider"; import { SimpleTooltip } from "~/components/primitives/Tooltip"; +import { TruncatedCopyableValue } from "~/components/primitives/TruncatedCopyableValue"; +import { useEnvironment } from "~/hooks/useEnvironment"; import { useFeatures } from "~/hooks/useFeatures"; import { useOrganization } from "~/hooks/useOrganizations"; import { useProject } from "~/hooks/useProject"; -import { useUser } from "~/hooks/useUser"; import { - type RunListAppliedFilters, - type RunListItem, -} from "~/presenters/v3/RunListPresenter.server"; -import { formatCurrencyAccurate, formatNumber } from "~/utils/numberFormatter"; -import { docsPath, v3RunSpanPath, v3TestPath } from "~/utils/pathBuilder"; + type NextRunListAppliedFilters, + type NextRunListItem, +} from "~/presenters/v3/NextRunListPresenter.server"; +import { formatCurrencyAccurate } from "~/utils/numberFormatter"; +import { docsPath, v3RunSpanPath, v3TestPath,v3TestTaskPath } from "~/utils/pathBuilder"; import { DateTime } from "../../primitives/DateTime"; import { Paragraph } from "../../primitives/Paragraph"; import { Spinner } from "../../primitives/Spinner"; @@ -51,19 +55,20 @@ import { filterableTaskRunStatuses, TaskRunStatusCombo, } from "./TaskRunStatus"; -import { useEnvironment } from "~/hooks/useEnvironment"; -import { CopyableText } from "~/components/primitives/CopyableText"; -import { ClipboardField } from "~/components/primitives/ClipboardField"; +import { useOptimisticLocation } from "~/hooks/useOptimisticLocation"; +import { useSearchParams } from "~/hooks/useSearchParam"; type RunsTableProps = { total: number; hasFilters: boolean; - filters: RunListAppliedFilters; + filters: NextRunListAppliedFilters; showJob?: boolean; - runs: RunListItem[]; + runs: NextRunListItem[]; + rootOnlyDefault?: boolean; isLoading?: boolean; allowSelection?: boolean; variant?: TableVariant; + disableAdjacentRows?: boolean; }; export function TaskRunsTable({ @@ -71,16 +76,23 @@ export function TaskRunsTable({ hasFilters, filters, runs, + rootOnlyDefault, + disableAdjacentRows = false, isLoading = false, allowSelection = false, variant = "dimmed", }: RunsTableProps) { const organization = useOrganization(); const project = useProject(); - const environment = useEnvironment(); const checkboxes = useRef<(HTMLInputElement | null)[]>([]); - const { selectedItems, has, hasAll, select, deselect, toggle } = useSelectedItems(allowSelection); + const { has, hasAll, select, deselect, toggle } = useSelectedItems(allowSelection); const { isManagedCloud } = useFeatures(); + const { value } = useSearchParams(); + const location = useOptimisticLocation(); + const rootOnly = value("rootOnly") ? `` : `rootOnly=${rootOnlyDefault}`; + const search = rootOnly ? `${rootOnly}&${location.search}` : location.search; + /** TableState has to be encoded as a separate URI component, so it's merged under one, 'tableState' param */ + const tableStateParam = disableAdjacentRows ? '' : encodeURIComponent(search); const showCompute = isManagedCloud; @@ -93,7 +105,7 @@ export function TaskRunsTable({ if (event.shiftKey) { const oldItem = runs.at(index - 1); const newItem = runs.at(index - 2); - const itemsIds = [oldItem?.id, newItem?.id].filter(Boolean); + const itemsIds = [oldItem?.friendlyId, newItem?.friendlyId].filter(Boolean); select(itemsIds); } } else if (event.key === "ArrowDown" && index < checkboxes.current.length - 1) { @@ -102,7 +114,7 @@ export function TaskRunsTable({ if (event.shiftKey) { const oldItem = runs.at(index - 1); const newItem = runs.at(index); - const itemsIds = [oldItem?.id, newItem?.id].filter(Boolean); + const itemsIds = [oldItem?.friendlyId, newItem?.friendlyId].filter(Boolean); select(itemsIds); } } @@ -118,9 +130,9 @@ export function TaskRunsTable({ {runs.length > 0 && ( r.id))} + checked={hasAll(runs.map((r) => r.friendlyId))} onChange={(element) => { - const ids = runs.map((r) => r.id); + const ids = runs.map((r) => r.friendlyId); const checked = element.currentTarget.checked; if (checked) { select(ids); @@ -203,6 +215,10 @@ export function TaskRunsTable({ Compute )} + }> + Machine + + Queue Test Created at ) : ( runs.map((run, index) => { + const searchParams = new URLSearchParams(); + if (tableStateParam) { + searchParams.set("tableState", tableStateParam); + } const path = v3RunSpanPath(organization, project, run.environment, run, { spanId: run.spanId, - }); + }, searchParams); return ( {allowSelection && ( { - toggle(run.id); + checked={has(run.friendlyId)} + onChange={() => { + toggle(run.friendlyId); }} ref={(r) => { checkboxes.current[index + 1] = r; @@ -309,20 +329,7 @@ export function TaskRunsTable({ )} - - - - } - asChild - disableHoverableContent - /> + @@ -390,6 +397,25 @@ export function TaskRunsTable({ : "โ€“"} )} + + + + + + {run.queue.type === "task" ? ( + } + content={`This queue was automatically created from your "${run.queue.name}" task`} + /> + ) : ( + } + content={`This is a custom queue you added in your code.`} + /> + )} + {run.queue.name} + + {run.isTest ? : "โ€“"} @@ -423,7 +449,7 @@ export function TaskRunsTable({ ); } -function RunActionsCell({ run, path }: { run: RunListItem; path: string }) { +function RunActionsCell({ run, path }: { run: NextRunListItem; path: string }) { const location = useLocation(); if (!run.isCancellable && !run.isReplayable) return {""}; @@ -555,6 +581,8 @@ function BlankState({ isLoading, filters }: Pick; const { tasks, from, to, ...otherFilters } = filters; + const singleTaskFromFilters = filters.tasks.length === 1 ? filters.tasks[0] : null; + const testPath = singleTaskFromFilters ? v3TestTaskPath(organization, project, environment, {taskIdentifier: singleTaskFromFilters}) : v3TestPath(organization, project, environment); if ( filters.tasks.length === 1 && @@ -569,7 +597,7 @@ function BlankState({ isLoading, filters }: Pick
Run a test diff --git a/apps/webapp/app/components/runs/v3/TaskTriggerSource.tsx b/apps/webapp/app/components/runs/v3/TaskTriggerSource.tsx index 0dfc5f10e0..8d81e2f36c 100644 --- a/apps/webapp/app/components/runs/v3/TaskTriggerSource.tsx +++ b/apps/webapp/app/components/runs/v3/TaskTriggerSource.tsx @@ -1,5 +1,5 @@ import { ClockIcon } from "@heroicons/react/20/solid"; -import { type TaskTriggerSource } from "@trigger.dev/database"; +import type { TaskTriggerSource } from "@trigger.dev/database"; import { TaskIconSmall } from "~/assets/icons/TaskIcon"; import { cn } from "~/utils/cn"; diff --git a/apps/webapp/app/components/runs/v3/WaitpointTokenFilters.tsx b/apps/webapp/app/components/runs/v3/WaitpointTokenFilters.tsx index 7c64647628..ae41639414 100644 --- a/apps/webapp/app/components/runs/v3/WaitpointTokenFilters.tsx +++ b/apps/webapp/app/components/runs/v3/WaitpointTokenFilters.tsx @@ -75,9 +75,7 @@ export function WaitpointTokenFilters(props: WaitpointTokenFiltersProps) { {hasFilters && (
- +
@@ -109,7 +107,7 @@ function FilterMenu() {
} - variant={"minimal/small"} + variant={"secondary/small"} shortcut={shortcut} tooltipTitle={"Filter runs"} > @@ -285,10 +283,12 @@ function AppliedStatusFilter() { }> } value={appliedSummary( statuses.map((v) => waitpointStatusTitle(v as WaitpointTokenStatus)) )} onRemove={() => del(["statuses", "cursor", "direction"])} + variant="secondary/small" /> } @@ -330,7 +330,7 @@ function TagsDropdown({ useEffect(() => { const searchParams = new URLSearchParams(); if (searchValue) { - searchParams.set("name", encodeURIComponent(searchValue)); + searchParams.set("name", searchValue); } fetcher.load( `/resources/orgs/${organization.slug}/projects/${project.slug}/env/${environment.slug}/waitpoints/tags?${searchParams}` @@ -409,8 +409,10 @@ function AppliedTagsFilter() { }> } value={appliedSummary(values("tags"))} onRemove={() => del(["tags", "cursor", "direction"])} + variant="secondary/small" /> } @@ -527,8 +529,10 @@ function AppliedWaitpointIdFilter() { }> } value={id} onRemove={() => del(["id", "cursor", "direction"])} + variant="secondary/small" /> } @@ -594,7 +598,7 @@ function IdempotencyKeyDropdown({
setIdempotencyKey(e.target.value)} variant="small" @@ -643,8 +647,10 @@ function AppliedIdempotencyKeyFilter() { }> } value={idempotencyKey} onRemove={() => del(["idempotencyKey", "cursor", "direction"])} + variant="secondary/small" /> } diff --git a/apps/webapp/app/database-types.ts b/apps/webapp/app/database-types.ts index b6d47cd9ac..3305dc67d5 100644 --- a/apps/webapp/app/database-types.ts +++ b/apps/webapp/app/database-types.ts @@ -30,6 +30,7 @@ export const TaskRunStatus = { PENDING: "PENDING", PENDING_VERSION: "PENDING_VERSION", WAITING_FOR_DEPLOY: "WAITING_FOR_DEPLOY", + DEQUEUED: "DEQUEUED", EXECUTING: "EXECUTING", WAITING_TO_RESUME: "WAITING_TO_RESUME", RETRYING_AFTER_FAILURE: "RETRYING_AFTER_FAILURE", diff --git a/apps/webapp/app/db.server.ts b/apps/webapp/app/db.server.ts index 969f3f0276..47b67a1a40 100644 --- a/apps/webapp/app/db.server.ts +++ b/apps/webapp/app/db.server.ts @@ -1,10 +1,11 @@ import { Prisma, PrismaClient, - PrismaClientOrTransaction, - PrismaReplicaClient, - PrismaTransactionClient, - PrismaTransactionOptions, + $transaction as transac, + type PrismaClientOrTransaction, + type PrismaReplicaClient, + type PrismaTransactionClient, + type PrismaTransactionOptions, } from "@trigger.dev/database"; import invariant from "tiny-invariant"; import { z } from "zod"; @@ -12,9 +13,9 @@ import { env } from "./env.server"; import { logger } from "./services/logger.server"; import { isValidDatabaseUrl } from "./utils/db"; import { singleton } from "./utils/singleton"; -import { $transaction as transac } from "@trigger.dev/database"; import { startActiveSpan } from "./v3/tracer.server"; import { Span } from "@opentelemetry/api"; +import { queryPerformanceMonitor } from "./utils/queryPerformanceMonitor.server"; export type { PrismaTransactionClient, @@ -122,28 +123,99 @@ function getClient() { url: databaseUrl.href, }, }, - // @ts-expect-error log: [ + // events { - emit: "stdout", + emit: "event", level: "error", }, { - emit: "stdout", + emit: "event", level: "info", }, { - emit: "stdout", + emit: "event", level: "warn", }, - ].concat( - process.env.VERBOSE_PRISMA_LOGS === "1" + // stdout + ...((process.env.PRISMA_LOG_TO_STDOUT === "1" ? [ - { emit: "event", level: "query" }, - { emit: "stdout", level: "query" }, + { + emit: "stdout", + level: "error", + }, + { + emit: "stdout", + level: "info", + }, + { + emit: "stdout", + level: "warn", + }, ] - : [] - ), + : []) satisfies Prisma.LogDefinition[]), + // Query performance monitoring + ...((process.env.VERBOSE_PRISMA_LOGS === "1" || + process.env.VERY_SLOW_QUERY_THRESHOLD_MS !== undefined + ? [ + { + emit: "event", + level: "query", + }, + ] + : []) satisfies Prisma.LogDefinition[]), + // verbose + ...((process.env.VERBOSE_PRISMA_LOGS === "1" + ? [ + { + emit: "stdout", + level: "query", + }, + ] + : []) satisfies Prisma.LogDefinition[]), + ], + }); + + // Only use structured logging if we're not already logging to stdout + if (process.env.PRISMA_LOG_TO_STDOUT !== "1") { + client.$on("info", (log) => { + logger.info("PrismaClient info", { + clientType: "writer", + event: { + timestamp: log.timestamp, + message: log.message, + target: log.target, + }, + }); + }); + + client.$on("warn", (log) => { + logger.warn("PrismaClient warn", { + clientType: "writer", + event: { + timestamp: log.timestamp, + message: log.message, + target: log.target, + }, + }); + }); + + client.$on("error", (log) => { + logger.error("PrismaClient error", { + clientType: "writer", + event: { + timestamp: log.timestamp, + message: log.message, + target: log.target, + }, + ignoreError: true, + }); + }); + } + + // Add query performance monitoring + client.$on("query", (log) => { + queryPerformanceMonitor.onQuery("writer", log); }); // connect eagerly @@ -174,28 +246,98 @@ function getReplicaClient() { url: replicaUrl.href, }, }, - // @ts-expect-error log: [ + // events { - emit: "stdout", + emit: "event", level: "error", }, { - emit: "stdout", + emit: "event", level: "info", }, { - emit: "stdout", + emit: "event", level: "warn", }, - ].concat( - process.env.VERBOSE_PRISMA_LOGS === "1" + // stdout + ...((process.env.PRISMA_LOG_TO_STDOUT === "1" ? [ - { emit: "event", level: "query" }, - { emit: "stdout", level: "query" }, + { + emit: "stdout", + level: "error", + }, + { + emit: "stdout", + level: "info", + }, + { + emit: "stdout", + level: "warn", + }, ] - : [] - ), + : []) satisfies Prisma.LogDefinition[]), + // Query performance monitoring + ...((process.env.VERBOSE_PRISMA_LOGS === "1" || + process.env.VERY_SLOW_QUERY_THRESHOLD_MS !== undefined + ? [ + { + emit: "event", + level: "query", + }, + ] + : []) satisfies Prisma.LogDefinition[]), + // verbose + ...((process.env.VERBOSE_PRISMA_LOGS === "1" + ? [ + { + emit: "stdout", + level: "query", + }, + ] + : []) satisfies Prisma.LogDefinition[]), + ], + }); + + // Only use structured logging if we're not already logging to stdout + if (process.env.PRISMA_LOG_TO_STDOUT !== "1") { + replicaClient.$on("info", (log) => { + logger.info("PrismaClient info", { + clientType: "reader", + event: { + timestamp: log.timestamp, + message: log.message, + target: log.target, + }, + }); + }); + + replicaClient.$on("warn", (log) => { + logger.warn("PrismaClient warn", { + clientType: "reader", + event: { + timestamp: log.timestamp, + message: log.message, + target: log.target, + }, + }); + }); + + replicaClient.$on("error", (log) => { + logger.error("PrismaClient error", { + clientType: "reader", + event: { + timestamp: log.timestamp, + message: log.message, + target: log.target, + }, + }); + }); + } + + // Add query performance monitoring for replica client + replicaClient.$on("query", (log) => { + queryPerformanceMonitor.onQuery("replica", log); }); // connect eagerly diff --git a/apps/webapp/app/entry.server.tsx b/apps/webapp/app/entry.server.tsx index d05fabd90b..4ee4f252a3 100644 --- a/apps/webapp/app/entry.server.tsx +++ b/apps/webapp/app/entry.server.tsx @@ -1,21 +1,28 @@ -import { - createReadableStreamFromReadable, - type DataFunctionArgs, - type EntryContext, -} from "@remix-run/node"; // or cloudflare/deno +import { createReadableStreamFromReadable, type EntryContext } from "@remix-run/node"; // or cloudflare/deno import { RemixServer } from "@remix-run/react"; +import { wrapHandleErrorWithSentry } from "@sentry/remix"; import { parseAcceptLanguage } from "intl-parse-accept-language"; import isbot from "isbot"; import { renderToPipeableStream } from "react-dom/server"; import { PassThrough } from "stream"; import * as Worker from "~/services/worker.server"; +import { bootstrap } from "./bootstrap"; import { LocaleContextProvider } from "./components/primitives/LocaleProvider"; import { OperatingSystemContextProvider, OperatingSystemPlatform, } from "./components/primitives/OperatingSystemProvider"; +import { Prisma } from "./db.server"; +import { env } from "./env.server"; +import { eventLoopMonitor } from "./eventLoopMonitor.server"; +import { logger } from "./services/logger.server"; +import { resourceMonitor } from "./services/resourceMonitor.server"; import { singleton } from "./utils/singleton"; -import { bootstrap } from "./bootstrap"; +import { remoteBuildsEnabled } from "./v3/remoteImageBuilder.server"; +import { + registerRunEngineEventBusHandlers, + setupBatchQueueCallbacks, +} from "./v3/runEngineHandlers.server"; const ABORT_DELAY = 30000; @@ -170,9 +177,21 @@ function handleBrowserRequest( }); } -export function handleError(error: unknown, { request, params, context }: DataFunctionArgs) { - logError(error, request); -} +export const handleError = wrapHandleErrorWithSentry((error, { request }) => { + if (request instanceof Request) { + logger.debug("Error in handleError", { + error, + request: { + url: request.url, + method: request.method, + }, + }); + } else { + logger.debug("Error in handleError", { + error, + }); + } +}); Worker.init().catch((error) => { logError(error); @@ -215,18 +234,13 @@ process.on("uncaughtException", (error, origin) => { }); singleton("RunEngineEventBusHandlers", registerRunEngineEventBusHandlers); +singleton("SetupBatchQueueCallbacks", setupBatchQueueCallbacks); export { apiRateLimiter } from "./services/apiRateLimit.server"; export { engineRateLimiter } from "./services/engineRateLimit.server"; +export { runWithHttpContext } from "./services/httpAsyncStorage.server"; export { socketIo } from "./v3/handleSocketIo.server"; export { wss } from "./v3/handleWebsockets.server"; -export { runWithHttpContext } from "./services/httpAsyncStorage.server"; -import { eventLoopMonitor } from "./eventLoopMonitor.server"; -import { env } from "./env.server"; -import { logger } from "./services/logger.server"; -import { Prisma } from "./db.server"; -import { registerRunEngineEventBusHandlers } from "./v3/runEngineHandlers.server"; -import { remoteBuildsEnabled } from "./v3/remoteImageBuilder.server"; if (env.EVENT_LOOP_MONITOR_ENABLED === "1") { eventLoopMonitor.enable(); @@ -237,3 +251,7 @@ if (remoteBuildsEnabled()) { } else { console.log("๐Ÿ—๏ธ Local builds enabled"); } + +if (env.RESOURCE_MONITOR_ENABLED === "1") { + resourceMonitor.startMonitoring(1000); +} diff --git a/apps/webapp/app/env.server.ts b/apps/webapp/app/env.server.ts index 3297616866..1cc0db0bf0 100644 --- a/apps/webapp/app/env.server.ts +++ b/apps/webapp/app/env.server.ts @@ -1,767 +1,1285 @@ import { z } from "zod"; +import { BoolEnv } from "./utils/boolEnv"; import { isValidDatabaseUrl } from "./utils/db"; import { isValidRegex } from "./utils/regex"; -import { BoolEnv } from "./utils/boolEnv"; -const EnvironmentSchema = z.object({ - NODE_ENV: z.union([z.literal("development"), z.literal("production"), z.literal("test")]), - DATABASE_URL: z - .string() - .refine( - isValidDatabaseUrl, - "DATABASE_URL is invalid, for details please check the additional output above this message." - ), - DATABASE_CONNECTION_LIMIT: z.coerce.number().int().default(10), - DATABASE_POOL_TIMEOUT: z.coerce.number().int().default(60), - DATABASE_CONNECTION_TIMEOUT: z.coerce.number().int().default(20), - DIRECT_URL: z - .string() - .refine( - isValidDatabaseUrl, - "DIRECT_URL is invalid, for details please check the additional output above this message." - ), - DATABASE_READ_REPLICA_URL: z.string().optional(), - SESSION_SECRET: z.string(), - MAGIC_LINK_SECRET: z.string(), - ENCRYPTION_KEY: z.string(), - WHITELISTED_EMAILS: z - .string() - .refine(isValidRegex, "WHITELISTED_EMAILS must be a valid regex.") - .optional(), - ADMIN_EMAILS: z.string().refine(isValidRegex, "ADMIN_EMAILS must be a valid regex.").optional(), - REMIX_APP_PORT: z.string().optional(), - LOGIN_ORIGIN: z.string().default("http://localhost:3030"), - APP_ORIGIN: z.string().default("http://localhost:3030"), - API_ORIGIN: z.string().optional(), - STREAM_ORIGIN: z.string().optional(), - ELECTRIC_ORIGIN: z.string().default("http://localhost:3060"), - // A comma separated list of electric origins to shard into different electric instances by environmentId - // example: "http://localhost:3060,http://localhost:3061,http://localhost:3062" - ELECTRIC_ORIGIN_SHARDS: z.string().optional(), - APP_ENV: z.string().default(process.env.NODE_ENV), - SERVICE_NAME: z.string().default("trigger.dev webapp"), - POSTHOG_PROJECT_KEY: z.string().default("phc_LFH7kJiGhdIlnO22hTAKgHpaKhpM8gkzWAFvHmf5vfS"), - TRIGGER_TELEMETRY_DISABLED: z.string().optional(), - AUTH_GITHUB_CLIENT_ID: z.string().optional(), - AUTH_GITHUB_CLIENT_SECRET: z.string().optional(), - EMAIL_TRANSPORT: z.enum(["resend", "smtp", "aws-ses"]).optional(), - FROM_EMAIL: z.string().optional(), - REPLY_TO_EMAIL: z.string().optional(), - RESEND_API_KEY: z.string().optional(), - SMTP_HOST: z.string().optional(), - SMTP_PORT: z.coerce.number().optional(), - SMTP_SECURE: BoolEnv.optional(), - SMTP_USER: z.string().optional(), - SMTP_PASSWORD: z.string().optional(), - - PLAIN_API_KEY: z.string().optional(), - WORKER_SCHEMA: z.string().default("graphile_worker"), - WORKER_CONCURRENCY: z.coerce.number().int().default(10), - WORKER_POLL_INTERVAL: z.coerce.number().int().default(1000), - WORKER_ENABLED: z.string().default("true"), - GRACEFUL_SHUTDOWN_TIMEOUT: z.coerce.number().int().default(60000), - DISABLE_SSE: z.string().optional(), - OPENAI_API_KEY: z.string().optional(), - - // Redis options - REDIS_HOST: z.string().optional(), - REDIS_READER_HOST: z.string().optional(), - REDIS_READER_PORT: z.coerce.number().optional(), - REDIS_PORT: z.coerce.number().optional(), - REDIS_USERNAME: z.string().optional(), - REDIS_PASSWORD: z.string().optional(), - REDIS_TLS_DISABLED: z.string().optional(), - - RATE_LIMIT_REDIS_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_HOST), - RATE_LIMIT_REDIS_READER_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_READER_HOST), - RATE_LIMIT_REDIS_READER_PORT: z.coerce - .number() - .optional() - .transform( - (v) => - v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) - ), - RATE_LIMIT_REDIS_PORT: z.coerce - .number() - .optional() - .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), - RATE_LIMIT_REDIS_USERNAME: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_USERNAME), - RATE_LIMIT_REDIS_PASSWORD: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_PASSWORD), - RATE_LIMIT_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), - RATE_LIMIT_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), - - CACHE_REDIS_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_HOST), - CACHE_REDIS_READER_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_READER_HOST), - CACHE_REDIS_READER_PORT: z.coerce - .number() - .optional() - .transform( - (v) => - v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) - ), - CACHE_REDIS_PORT: z.coerce - .number() - .optional() - .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), - CACHE_REDIS_USERNAME: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_USERNAME), - CACHE_REDIS_PASSWORD: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_PASSWORD), - CACHE_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), - CACHE_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), - - REALTIME_STREAMS_REDIS_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_HOST), - REALTIME_STREAMS_REDIS_READER_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_READER_HOST), - REALTIME_STREAMS_REDIS_READER_PORT: z.coerce - .number() - .optional() - .transform( - (v) => - v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) - ), - REALTIME_STREAMS_REDIS_PORT: z.coerce - .number() - .optional() - .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), - REALTIME_STREAMS_REDIS_USERNAME: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_USERNAME), - REALTIME_STREAMS_REDIS_PASSWORD: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_PASSWORD), - REALTIME_STREAMS_REDIS_TLS_DISABLED: z - .string() - .default(process.env.REDIS_TLS_DISABLED ?? "false"), - REALTIME_STREAMS_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), - - REALTIME_MAXIMUM_CREATED_AT_FILTER_AGE_IN_MS: z.coerce - .number() - .int() - .default(24 * 60 * 60 * 1000), // 1 day in milliseconds - - PUBSUB_REDIS_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_HOST), - PUBSUB_REDIS_READER_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_READER_HOST), - PUBSUB_REDIS_READER_PORT: z.coerce - .number() - .optional() - .transform( - (v) => - v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) - ), - PUBSUB_REDIS_PORT: z.coerce - .number() - .optional() - .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), - PUBSUB_REDIS_USERNAME: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_USERNAME), - PUBSUB_REDIS_PASSWORD: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_PASSWORD), - PUBSUB_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), - PUBSUB_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), - - DEFAULT_ENV_EXECUTION_CONCURRENCY_LIMIT: z.coerce.number().int().default(100), - DEFAULT_ORG_EXECUTION_CONCURRENCY_LIMIT: z.coerce.number().int().default(300), - DEFAULT_DEV_ENV_EXECUTION_ATTEMPTS: z.coerce.number().int().positive().default(1), - - //API Rate limiting - /** - * @example "60s" - * @example "1m" - * @example "1h" - * @example "1d" - * @example "1000ms" - * @example "1000s" - */ - API_RATE_LIMIT_REFILL_INTERVAL: z.string().default("10s"), // refill 250 tokens every 10 seconds - API_RATE_LIMIT_MAX: z.coerce.number().int().default(750), // allow bursts of 750 requests - API_RATE_LIMIT_REFILL_RATE: z.coerce.number().int().default(250), // refix 250 tokens every 10 seconds - API_RATE_LIMIT_REQUEST_LOGS_ENABLED: z.string().default("0"), - API_RATE_LIMIT_REJECTION_LOGS_ENABLED: z.string().default("1"), - API_RATE_LIMIT_LIMITER_LOGS_ENABLED: z.string().default("0"), - - API_RATE_LIMIT_JWT_WINDOW: z.string().default("1m"), - API_RATE_LIMIT_JWT_TOKENS: z.coerce.number().int().default(60), - - //v3 - PROVIDER_SECRET: z.string().default("provider-secret"), - COORDINATOR_SECRET: z.string().default("coordinator-secret"), - DEPOT_TOKEN: z.string().optional(), - DEPOT_ORG_ID: z.string().optional(), - DEPOT_REGION: z.string().default("us-east-1"), - DEPLOY_REGISTRY_HOST: z.string().min(1), - DEPLOY_REGISTRY_USERNAME: z.string().optional(), - DEPLOY_REGISTRY_PASSWORD: z.string().optional(), - DEPLOY_REGISTRY_NAMESPACE: z.string().min(1).default("trigger"), - DEPLOY_IMAGE_PLATFORM: z.string().default("linux/amd64"), - DEPLOY_TIMEOUT_MS: z.coerce - .number() - .int() - .default(60 * 1000 * 8), // 8 minutes - OBJECT_STORE_BASE_URL: z.string().optional(), - OBJECT_STORE_ACCESS_KEY_ID: z.string().optional(), - OBJECT_STORE_SECRET_ACCESS_KEY: z.string().optional(), - OBJECT_STORE_REGION: z.string().optional(), - OBJECT_STORE_SERVICE: z.string().default("s3"), - EVENTS_BATCH_SIZE: z.coerce.number().int().default(100), - EVENTS_BATCH_INTERVAL: z.coerce.number().int().default(1000), - EVENTS_DEFAULT_LOG_RETENTION: z.coerce.number().int().default(7), - SHARED_QUEUE_CONSUMER_POOL_SIZE: z.coerce.number().int().default(10), - SHARED_QUEUE_CONSUMER_INTERVAL_MS: z.coerce.number().int().default(100), - SHARED_QUEUE_CONSUMER_NEXT_TICK_INTERVAL_MS: z.coerce.number().int().default(100), - SHARED_QUEUE_CONSUMER_EMIT_RESUME_DEPENDENCY_TIMEOUT_MS: z.coerce.number().int().default(1000), - SHARED_QUEUE_CONSUMER_RESOLVE_PAYLOADS_BATCH_SIZE: z.coerce.number().int().default(25), - - MANAGED_WORKER_SECRET: z.string().default("managed-secret"), - - // Development OTEL environment variables - DEV_OTEL_EXPORTER_OTLP_ENDPOINT: z.string().optional(), - // If this is set to 1, then the below variables are used to configure the batch processor for spans and logs - DEV_OTEL_BATCH_PROCESSING_ENABLED: z.string().default("0"), - DEV_OTEL_SPAN_MAX_EXPORT_BATCH_SIZE: z.string().default("64"), - DEV_OTEL_SPAN_SCHEDULED_DELAY_MILLIS: z.string().default("200"), - DEV_OTEL_SPAN_EXPORT_TIMEOUT_MILLIS: z.string().default("30000"), - DEV_OTEL_SPAN_MAX_QUEUE_SIZE: z.string().default("512"), - DEV_OTEL_LOG_MAX_EXPORT_BATCH_SIZE: z.string().default("64"), - DEV_OTEL_LOG_SCHEDULED_DELAY_MILLIS: z.string().default("200"), - DEV_OTEL_LOG_EXPORT_TIMEOUT_MILLIS: z.string().default("30000"), - DEV_OTEL_LOG_MAX_QUEUE_SIZE: z.string().default("512"), - - PROD_OTEL_BATCH_PROCESSING_ENABLED: z.string().default("0"), - PROD_OTEL_SPAN_MAX_EXPORT_BATCH_SIZE: z.string().default("64"), - PROD_OTEL_SPAN_SCHEDULED_DELAY_MILLIS: z.string().default("200"), - PROD_OTEL_SPAN_EXPORT_TIMEOUT_MILLIS: z.string().default("30000"), - PROD_OTEL_SPAN_MAX_QUEUE_SIZE: z.string().default("512"), - PROD_OTEL_LOG_MAX_EXPORT_BATCH_SIZE: z.string().default("64"), - PROD_OTEL_LOG_SCHEDULED_DELAY_MILLIS: z.string().default("200"), - PROD_OTEL_LOG_EXPORT_TIMEOUT_MILLIS: z.string().default("30000"), - PROD_OTEL_LOG_MAX_QUEUE_SIZE: z.string().default("512"), - - CHECKPOINT_THRESHOLD_IN_MS: z.coerce.number().int().default(30000), - - // Internal OTEL environment variables - INTERNAL_OTEL_TRACE_EXPORTER_URL: z.string().optional(), - INTERNAL_OTEL_TRACE_EXPORTER_AUTH_HEADERS: z.string().optional(), - INTERNAL_OTEL_TRACE_LOGGING_ENABLED: z.string().default("1"), - // this means 1/20 traces or 5% of traces will be sampled (sampled = recorded) - INTERNAL_OTEL_TRACE_SAMPLING_RATE: z.string().default("20"), - INTERNAL_OTEL_TRACE_INSTRUMENT_PRISMA_ENABLED: z.string().default("0"), - INTERNAL_OTEL_TRACE_DISABLED: z.string().default("0"), - - INTERNAL_OTEL_LOG_EXPORTER_URL: z.string().optional(), - INTERNAL_OTEL_METRIC_EXPORTER_URL: z.string().optional(), - INTERNAL_OTEL_METRIC_EXPORTER_AUTH_HEADERS: z.string().optional(), - INTERNAL_OTEL_METRIC_EXPORTER_ENABLED: z.string().default("0"), - INTERNAL_OTEL_METRIC_EXPORTER_INTERVAL_MS: z.coerce.number().int().default(30_000), - - ORG_SLACK_INTEGRATION_CLIENT_ID: z.string().optional(), - ORG_SLACK_INTEGRATION_CLIENT_SECRET: z.string().optional(), - - /** These enable the alerts feature in v3 */ - ALERT_EMAIL_TRANSPORT: z.enum(["resend", "smtp", "aws-ses"]).optional(), - ALERT_FROM_EMAIL: z.string().optional(), - ALERT_REPLY_TO_EMAIL: z.string().optional(), - ALERT_RESEND_API_KEY: z.string().optional(), - ALERT_SMTP_HOST: z.string().optional(), - ALERT_SMTP_PORT: z.coerce.number().optional(), - ALERT_SMTP_SECURE: BoolEnv.optional(), - ALERT_SMTP_USER: z.string().optional(), - ALERT_SMTP_PASSWORD: z.string().optional(), - ALERT_RATE_LIMITER_EMISSION_INTERVAL: z.coerce.number().int().default(2_500), - ALERT_RATE_LIMITER_BURST_TOLERANCE: z.coerce.number().int().default(10_000), - ALERT_RATE_LIMITER_REDIS_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_HOST), - ALERT_RATE_LIMITER_REDIS_READER_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_READER_HOST), - ALERT_RATE_LIMITER_REDIS_READER_PORT: z.coerce - .number() - .optional() - .transform( - (v) => - v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) - ), - ALERT_RATE_LIMITER_REDIS_PORT: z.coerce - .number() - .optional() - .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), - ALERT_RATE_LIMITER_REDIS_USERNAME: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_USERNAME), - ALERT_RATE_LIMITER_REDIS_PASSWORD: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_PASSWORD), - ALERT_RATE_LIMITER_REDIS_TLS_DISABLED: z - .string() - .default(process.env.REDIS_TLS_DISABLED ?? "false"), - ALERT_RATE_LIMITER_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), - - LOOPS_API_KEY: z.string().optional(), - MARQS_DISABLE_REBALANCING: BoolEnv.default(false), - MARQS_VISIBILITY_TIMEOUT_MS: z.coerce - .number() - .int() - .default(60 * 1000 * 15), - MARQS_SHARED_QUEUE_LIMIT: z.coerce.number().int().default(1000), - MARQS_MAXIMUM_QUEUE_PER_ENV_COUNT: z.coerce.number().int().default(50), - MARQS_DEV_QUEUE_LIMIT: z.coerce.number().int().default(1000), - MARQS_MAXIMUM_NACK_COUNT: z.coerce.number().int().default(64), - MARQS_CONCURRENCY_LIMIT_BIAS: z.coerce.number().default(0.75), - MARQS_AVAILABLE_CAPACITY_BIAS: z.coerce.number().default(0.3), - MARQS_QUEUE_AGE_RANDOMIZATION_BIAS: z.coerce.number().default(0.25), - MARQS_REUSE_SNAPSHOT_COUNT: z.coerce.number().int().default(0), - MARQS_MAXIMUM_ENV_COUNT: z.coerce.number().int().optional(), - - PROD_TASK_HEARTBEAT_INTERVAL_MS: z.coerce.number().int().optional(), - - VERBOSE_GRAPHILE_LOGGING: z.string().default("false"), - V2_MARQS_ENABLED: z.string().default("0"), - V2_MARQS_CONSUMER_POOL_ENABLED: z.string().default("0"), - V2_MARQS_CONSUMER_POOL_SIZE: z.coerce.number().int().default(10), - V2_MARQS_CONSUMER_POLL_INTERVAL_MS: z.coerce.number().int().default(1000), - V2_MARQS_QUEUE_SELECTION_COUNT: z.coerce.number().int().default(36), - V2_MARQS_VISIBILITY_TIMEOUT_MS: z.coerce - .number() - .int() - .default(60 * 1000 * 15), - V2_MARQS_DEFAULT_ENV_CONCURRENCY: z.coerce.number().int().default(100), - V2_MARQS_VERBOSE: z.string().default("0"), - V3_MARQS_CONCURRENCY_MONITOR_ENABLED: z.string().default("0"), - V2_MARQS_CONCURRENCY_MONITOR_ENABLED: z.string().default("0"), - /* Usage settings */ - USAGE_EVENT_URL: z.string().optional(), - PROD_USAGE_HEARTBEAT_INTERVAL_MS: z.coerce.number().int().optional(), - - CENTS_PER_RUN: z.coerce.number().default(0), - - EVENT_LOOP_MONITOR_ENABLED: z.string().default("1"), - MAXIMUM_LIVE_RELOADING_EVENTS: z.coerce.number().int().default(1000), - MAXIMUM_TRACE_SUMMARY_VIEW_COUNT: z.coerce.number().int().default(25_000), - TASK_PAYLOAD_OFFLOAD_THRESHOLD: z.coerce.number().int().default(524_288), // 512KB - TASK_PAYLOAD_MAXIMUM_SIZE: z.coerce.number().int().default(3_145_728), // 3MB - BATCH_TASK_PAYLOAD_MAXIMUM_SIZE: z.coerce.number().int().default(1_000_000), // 1MB - TASK_RUN_METADATA_MAXIMUM_SIZE: z.coerce.number().int().default(262_144), // 256KB - - MAXIMUM_DEV_QUEUE_SIZE: z.coerce.number().int().optional(), - MAXIMUM_DEPLOYED_QUEUE_SIZE: z.coerce.number().int().optional(), - MAX_BATCH_V2_TRIGGER_ITEMS: z.coerce.number().int().default(500), - MAX_BATCH_AND_WAIT_V2_TRIGGER_ITEMS: z.coerce.number().int().default(500), - - REALTIME_STREAM_VERSION: z.enum(["v1", "v2"]).default("v1"), - REALTIME_STREAM_MAX_LENGTH: z.coerce.number().int().default(1000), - REALTIME_STREAM_TTL: z.coerce - .number() - .int() - .default(60 * 60 * 24), // 1 day in seconds - BATCH_METADATA_OPERATIONS_FLUSH_INTERVAL_MS: z.coerce.number().int().default(1000), - BATCH_METADATA_OPERATIONS_FLUSH_ENABLED: z.string().default("1"), - BATCH_METADATA_OPERATIONS_FLUSH_LOGGING_ENABLED: z.string().default("1"), - - // Run Engine 2.0 - RUN_ENGINE_WORKER_COUNT: z.coerce.number().int().default(4), - RUN_ENGINE_TASKS_PER_WORKER: z.coerce.number().int().default(10), - RUN_ENGINE_WORKER_CONCURRENCY_LIMIT: z.coerce.number().int().default(10), - RUN_ENGINE_WORKER_POLL_INTERVAL: z.coerce.number().int().default(100), - RUN_ENGINE_WORKER_IMMEDIATE_POLL_INTERVAL: z.coerce.number().int().default(100), - RUN_ENGINE_TIMEOUT_PENDING_EXECUTING: z.coerce.number().int().default(60_000), - RUN_ENGINE_TIMEOUT_PENDING_CANCEL: z.coerce.number().int().default(60_000), - RUN_ENGINE_TIMEOUT_EXECUTING: z.coerce.number().int().default(60_000), - RUN_ENGINE_TIMEOUT_EXECUTING_WITH_WAITPOINTS: z.coerce.number().int().default(60_000), - RUN_ENGINE_TIMEOUT_SUSPENDED: z.coerce - .number() - .int() - .default(60_000 * 10), - RUN_ENGINE_DEBUG_WORKER_NOTIFICATIONS: BoolEnv.default(false), - RUN_ENGINE_PARENT_QUEUE_LIMIT: z.coerce.number().int().default(1000), - RUN_ENGINE_CONCURRENCY_LIMIT_BIAS: z.coerce.number().default(0.75), - RUN_ENGINE_AVAILABLE_CAPACITY_BIAS: z.coerce.number().default(0.3), - RUN_ENGINE_QUEUE_AGE_RANDOMIZATION_BIAS: z.coerce.number().default(0.25), - RUN_ENGINE_REUSE_SNAPSHOT_COUNT: z.coerce.number().int().default(0), - RUN_ENGINE_MAXIMUM_ENV_COUNT: z.coerce.number().int().optional(), - RUN_ENGINE_RUN_QUEUE_SHARD_COUNT: z.coerce.number().int().default(4), - RUN_ENGINE_WORKER_SHUTDOWN_TIMEOUT_MS: z.coerce.number().int().default(60_000), - RUN_ENGINE_RETRY_WARM_START_THRESHOLD_MS: z.coerce.number().int().default(30_000), - RUN_ENGINE_PROCESS_WORKER_QUEUE_DEBOUNCE_MS: z.coerce.number().int().default(200), - RUN_ENGINE_DEQUEUE_BLOCKING_TIMEOUT_SECONDS: z.coerce.number().int().default(10), - RUN_ENGINE_MASTER_QUEUE_CONSUMERS_INTERVAL_MS: z.coerce.number().int().default(500), - - RUN_ENGINE_WORKER_REDIS_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_HOST), - RUN_ENGINE_WORKER_REDIS_READER_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_READER_HOST), - RUN_ENGINE_WORKER_REDIS_READER_PORT: z.coerce - .number() - .optional() - .transform( - (v) => - v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) - ), - RUN_ENGINE_WORKER_REDIS_PORT: z.coerce - .number() - .optional() - .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), - RUN_ENGINE_WORKER_REDIS_USERNAME: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_USERNAME), - RUN_ENGINE_WORKER_REDIS_PASSWORD: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_PASSWORD), - RUN_ENGINE_WORKER_REDIS_TLS_DISABLED: z - .string() - .default(process.env.REDIS_TLS_DISABLED ?? "false"), - - RUN_ENGINE_RUN_QUEUE_REDIS_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_HOST), - RUN_ENGINE_RUN_QUEUE_REDIS_READER_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_READER_HOST), - RUN_ENGINE_RUN_QUEUE_REDIS_READER_PORT: z.coerce - .number() - .optional() - .transform( - (v) => - v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) - ), - RUN_ENGINE_RUN_QUEUE_REDIS_PORT: z.coerce - .number() - .optional() - .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), - RUN_ENGINE_RUN_QUEUE_REDIS_USERNAME: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_USERNAME), - RUN_ENGINE_RUN_QUEUE_REDIS_PASSWORD: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_PASSWORD), - RUN_ENGINE_RUN_QUEUE_REDIS_TLS_DISABLED: z - .string() - .default(process.env.REDIS_TLS_DISABLED ?? "false"), - - RUN_ENGINE_RUN_LOCK_REDIS_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_HOST), - RUN_ENGINE_RUN_LOCK_REDIS_READER_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_READER_HOST), - RUN_ENGINE_RUN_LOCK_REDIS_READER_PORT: z.coerce - .number() - .optional() - .transform( - (v) => - v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) - ), - RUN_ENGINE_RUN_LOCK_REDIS_PORT: z.coerce - .number() - .optional() - .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), - RUN_ENGINE_RUN_LOCK_REDIS_USERNAME: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_USERNAME), - RUN_ENGINE_RUN_LOCK_REDIS_PASSWORD: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_PASSWORD), - RUN_ENGINE_RUN_LOCK_REDIS_TLS_DISABLED: z - .string() - .default(process.env.REDIS_TLS_DISABLED ?? "false"), - - RUN_ENGINE_DEV_PRESENCE_REDIS_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_HOST), - RUN_ENGINE_DEV_PRESENCE_REDIS_READER_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_READER_HOST), - RUN_ENGINE_DEV_PRESENCE_REDIS_READER_PORT: z.coerce - .number() - .optional() - .transform( - (v) => - v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) - ), - RUN_ENGINE_DEV_PRESENCE_REDIS_PORT: z.coerce - .number() - .optional() - .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), - RUN_ENGINE_DEV_PRESENCE_REDIS_USERNAME: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_USERNAME), - RUN_ENGINE_DEV_PRESENCE_REDIS_PASSWORD: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_PASSWORD), - RUN_ENGINE_DEV_PRESENCE_REDIS_TLS_DISABLED: z - .string() - .default(process.env.REDIS_TLS_DISABLED ?? "false"), - - //API Rate limiting - /** - * @example "60s" - * @example "1m" - * @example "1h" - * @example "1d" - * @example "1000ms" - * @example "1000s" - */ - RUN_ENGINE_RATE_LIMIT_REFILL_INTERVAL: z.string().default("10s"), // refill 250 tokens every 10 seconds - RUN_ENGINE_RATE_LIMIT_MAX: z.coerce.number().int().default(1200), // allow bursts of 750 requests - RUN_ENGINE_RATE_LIMIT_REFILL_RATE: z.coerce.number().int().default(400), // refix 250 tokens every 10 seconds - RUN_ENGINE_RATE_LIMIT_REQUEST_LOGS_ENABLED: z.string().default("0"), - RUN_ENGINE_RATE_LIMIT_REJECTION_LOGS_ENABLED: z.string().default("1"), - RUN_ENGINE_RATE_LIMIT_LIMITER_LOGS_ENABLED: z.string().default("0"), - - RUN_ENGINE_RELEASE_CONCURRENCY_ENABLED: z.string().default("0"), - RUN_ENGINE_RELEASE_CONCURRENCY_DISABLE_CONSUMERS: z.string().default("0"), - RUN_ENGINE_RELEASE_CONCURRENCY_MAX_TOKENS_RATIO: z.coerce.number().default(1), - RUN_ENGINE_RELEASE_CONCURRENCY_RELEASINGS_MAX_AGE: z.coerce - .number() - .int() - .default(60_000 * 30), - RUN_ENGINE_RELEASE_CONCURRENCY_RELEASINGS_POLL_INTERVAL: z.coerce.number().int().default(60_000), - RUN_ENGINE_RELEASE_CONCURRENCY_MAX_RETRIES: z.coerce.number().int().default(3), - RUN_ENGINE_RELEASE_CONCURRENCY_CONSUMERS_COUNT: z.coerce.number().int().default(1), - RUN_ENGINE_RELEASE_CONCURRENCY_POLL_INTERVAL: z.coerce.number().int().default(500), - RUN_ENGINE_RELEASE_CONCURRENCY_BATCH_SIZE: z.coerce.number().int().default(10), - - RUN_ENGINE_WORKER_ENABLED: z.string().default("1"), - RUN_ENGINE_WORKER_LOG_LEVEL: z.enum(["log", "error", "warn", "info", "debug"]).default("info"), - - /** How long should the presence ttl last */ - DEV_PRESENCE_SSE_TIMEOUT: z.coerce.number().int().default(30_000), - DEV_PRESENCE_TTL_MS: z.coerce.number().int().default(5_000), - DEV_PRESENCE_POLL_MS: z.coerce.number().int().default(1_000), - /** How many ms to wait until dequeuing again, if there was a run last time */ - DEV_DEQUEUE_INTERVAL_WITH_RUN: z.coerce.number().int().default(250), - /** How many ms to wait until dequeuing again, if there was no run last time */ - DEV_DEQUEUE_INTERVAL_WITHOUT_RUN: z.coerce.number().int().default(1_000), - /** The max number of runs per API call that we'll dequeue in DEV */ - DEV_DEQUEUE_MAX_RUNS_PER_PULL: z.coerce.number().int().default(10), - - /** The maximum concurrent local run processes executing at once in dev */ - DEV_MAX_CONCURRENT_RUNS: z.coerce.number().int().default(25), - - /** The CLI should connect to this for dev runs */ - DEV_ENGINE_URL: z.string().default(process.env.APP_ORIGIN ?? "http://localhost:3030"), - - LEGACY_RUN_ENGINE_WORKER_ENABLED: z.string().default(process.env.WORKER_ENABLED ?? "true"), - LEGACY_RUN_ENGINE_WORKER_CONCURRENCY_WORKERS: z.coerce.number().int().default(2), - LEGACY_RUN_ENGINE_WORKER_CONCURRENCY_TASKS_PER_WORKER: z.coerce.number().int().default(1), - LEGACY_RUN_ENGINE_WORKER_POLL_INTERVAL: z.coerce.number().int().default(1000), - LEGACY_RUN_ENGINE_WORKER_IMMEDIATE_POLL_INTERVAL: z.coerce.number().int().default(50), - LEGACY_RUN_ENGINE_WORKER_CONCURRENCY_LIMIT: z.coerce.number().int().default(100), - LEGACY_RUN_ENGINE_WORKER_SHUTDOWN_TIMEOUT_MS: z.coerce.number().int().default(60_000), - - LEGACY_RUN_ENGINE_WORKER_REDIS_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_HOST), - LEGACY_RUN_ENGINE_WORKER_REDIS_READER_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_READER_HOST), - LEGACY_RUN_ENGINE_WORKER_REDIS_READER_PORT: z.coerce - .number() - .optional() - .transform( - (v) => - v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) - ), - LEGACY_RUN_ENGINE_WORKER_REDIS_PORT: z.coerce - .number() - .optional() - .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), - LEGACY_RUN_ENGINE_WORKER_REDIS_USERNAME: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_USERNAME), - LEGACY_RUN_ENGINE_WORKER_REDIS_PASSWORD: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_PASSWORD), - LEGACY_RUN_ENGINE_WORKER_REDIS_TLS_DISABLED: z - .string() - .default(process.env.REDIS_TLS_DISABLED ?? "false"), - LEGACY_RUN_ENGINE_WORKER_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), - - LEGACY_RUN_ENGINE_WAITING_FOR_DEPLOY_BATCH_SIZE: z.coerce.number().int().default(100), - LEGACY_RUN_ENGINE_WAITING_FOR_DEPLOY_BATCH_STAGGER_MS: z.coerce.number().int().default(1_000), - - COMMON_WORKER_ENABLED: z.string().default(process.env.WORKER_ENABLED ?? "true"), - COMMON_WORKER_CONCURRENCY_WORKERS: z.coerce.number().int().default(2), - COMMON_WORKER_CONCURRENCY_TASKS_PER_WORKER: z.coerce.number().int().default(10), - COMMON_WORKER_POLL_INTERVAL: z.coerce.number().int().default(1000), - COMMON_WORKER_IMMEDIATE_POLL_INTERVAL: z.coerce.number().int().default(50), - COMMON_WORKER_CONCURRENCY_LIMIT: z.coerce.number().int().default(100), - COMMON_WORKER_SHUTDOWN_TIMEOUT_MS: z.coerce.number().int().default(60_000), - - COMMON_WORKER_REDIS_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_HOST), - COMMON_WORKER_REDIS_READER_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_READER_HOST), - COMMON_WORKER_REDIS_READER_PORT: z.coerce - .number() - .optional() - .transform( - (v) => - v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) - ), - COMMON_WORKER_REDIS_PORT: z.coerce - .number() - .optional() - .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), - COMMON_WORKER_REDIS_USERNAME: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_USERNAME), - COMMON_WORKER_REDIS_PASSWORD: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_PASSWORD), - COMMON_WORKER_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), - COMMON_WORKER_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), - - TASK_EVENT_PARTITIONING_ENABLED: z.string().default("0"), - TASK_EVENT_PARTITIONED_WINDOW_IN_SECONDS: z.coerce.number().int().default(60), // 1 minute - - QUEUE_SSE_AUTORELOAD_INTERVAL_MS: z.coerce.number().int().default(5_000), - QUEUE_SSE_AUTORELOAD_TIMEOUT_MS: z.coerce.number().int().default(60_000), - - SLACK_BOT_TOKEN: z.string().optional(), - SLACK_SIGNUP_REASON_CHANNEL_ID: z.string().optional(), - - // kapa.ai - KAPA_AI_WEBSITE_ID: z.string().optional(), - - // BetterStack - BETTERSTACK_API_KEY: z.string().optional(), - BETTERSTACK_STATUS_PAGE_ID: z.string().optional(), - - RUN_REPLICATION_REDIS_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_HOST), - RUN_REPLICATION_REDIS_READER_HOST: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_READER_HOST), - RUN_REPLICATION_REDIS_READER_PORT: z.coerce - .number() - .optional() - .transform( - (v) => - v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) - ), - RUN_REPLICATION_REDIS_PORT: z.coerce - .number() - .optional() - .transform((v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined)), - RUN_REPLICATION_REDIS_USERNAME: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_USERNAME), - RUN_REPLICATION_REDIS_PASSWORD: z - .string() - .optional() - .transform((v) => v ?? process.env.REDIS_PASSWORD), - RUN_REPLICATION_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), - - RUN_REPLICATION_CLICKHOUSE_URL: z.string().optional(), - RUN_REPLICATION_ENABLED: z.string().default("0"), - RUN_REPLICATION_SLOT_NAME: z.string().default("task_runs_to_clickhouse_v1"), - RUN_REPLICATION_PUBLICATION_NAME: z.string().default("task_runs_to_clickhouse_v1_publication"), - RUN_REPLICATION_MAX_FLUSH_CONCURRENCY: z.coerce.number().int().default(100), - RUN_REPLICATION_FLUSH_INTERVAL_MS: z.coerce.number().int().default(1000), - RUN_REPLICATION_FLUSH_BATCH_SIZE: z.coerce.number().int().default(100), - RUN_REPLICATION_LEADER_LOCK_TIMEOUT_MS: z.coerce.number().int().default(30_000), - RUN_REPLICATION_LEADER_LOCK_EXTEND_INTERVAL_MS: z.coerce.number().int().default(10_000), - RUN_REPLICATION_ACK_INTERVAL_SECONDS: z.coerce.number().int().default(10), - RUN_REPLICATION_LOG_LEVEL: z.enum(["log", "error", "warn", "info", "debug"]).default("info"), - RUN_REPLICATION_LEADER_LOCK_ADDITIONAL_TIME_MS: z.coerce.number().int().default(10_000), - RUN_REPLICATION_LEADER_LOCK_RETRY_INTERVAL_MS: z.coerce.number().int().default(500), - RUN_REPLICATION_WAIT_FOR_ASYNC_INSERT: z.string().default("0"), - RUN_REPLICATION_KEEP_ALIVE_ENABLED: z.string().default("1"), - RUN_REPLICATION_KEEP_ALIVE_IDLE_SOCKET_TTL_MS: z.coerce.number().int().optional(), - RUN_REPLICATION_MAX_OPEN_CONNECTIONS: z.coerce.number().int().default(10), - - // Clickhouse - CLICKHOUSE_URL: z.string().optional(), - CLICKHOUSE_KEEP_ALIVE_ENABLED: z.string().default("1"), - CLICKHOUSE_KEEP_ALIVE_IDLE_SOCKET_TTL_MS: z.coerce.number().int().optional(), - CLICKHOUSE_MAX_OPEN_CONNECTIONS: z.coerce.number().int().default(10), - CLICKHOUSE_LOG_LEVEL: z.enum(["log", "error", "warn", "info", "debug"]).default("info"), - CLICKHOUSE_COMPRESSION_REQUEST: z.string().default("1"), - - // Bootstrap - TRIGGER_BOOTSTRAP_ENABLED: z.string().default("0"), - TRIGGER_BOOTSTRAP_WORKER_GROUP_NAME: z.string().optional(), - TRIGGER_BOOTSTRAP_WORKER_TOKEN_PATH: z.string().optional(), - - // Machine presets - MACHINE_PRESETS_OVERRIDE_PATH: z.string().optional(), -}); +const GithubAppEnvSchema = z.preprocess( + (val) => { + const obj = val as any; + if (!obj || !obj.GITHUB_APP_ENABLED) { + return { ...obj, GITHUB_APP_ENABLED: "0" }; + } + return obj; + }, + z.discriminatedUnion("GITHUB_APP_ENABLED", [ + z.object({ + GITHUB_APP_ENABLED: z.literal("1"), + GITHUB_APP_ID: z.string(), + GITHUB_APP_PRIVATE_KEY: z.string(), + GITHUB_APP_WEBHOOK_SECRET: z.string(), + GITHUB_APP_SLUG: z.string(), + }), + z.object({ + GITHUB_APP_ENABLED: z.literal("0"), + }), + ]) +); + +// eventually we can make all S2 env vars required once the S2 OSS version is out +const S2EnvSchema = z.preprocess( + (val) => { + const obj = val as any; + if (!obj || !obj.S2_ENABLED) { + return { ...obj, S2_ENABLED: "0" }; + } + return obj; + }, + z.discriminatedUnion("S2_ENABLED", [ + z.object({ + S2_ENABLED: z.literal("1"), + S2_ACCESS_TOKEN: z.string(), + S2_DEPLOYMENT_LOGS_BASIN_NAME: z.string(), + }), + z.object({ + S2_ENABLED: z.literal("0"), + }), + ]) +); + +const EnvironmentSchema = z + .object({ + NODE_ENV: z.union([z.literal("development"), z.literal("production"), z.literal("test")]), + DATABASE_URL: z + .string() + .refine( + isValidDatabaseUrl, + "DATABASE_URL is invalid, for details please check the additional output above this message." + ), + DATABASE_CONNECTION_LIMIT: z.coerce.number().int().default(10), + DATABASE_POOL_TIMEOUT: z.coerce.number().int().default(60), + DATABASE_CONNECTION_TIMEOUT: z.coerce.number().int().default(20), + DIRECT_URL: z + .string() + .refine( + isValidDatabaseUrl, + "DIRECT_URL is invalid, for details please check the additional output above this message." + ), + DATABASE_READ_REPLICA_URL: z.string().optional(), + SESSION_SECRET: z.string(), + MAGIC_LINK_SECRET: z.string(), + ENCRYPTION_KEY: z + .string() + .refine( + (val) => Buffer.from(val, "utf8").length === 32, + "ENCRYPTION_KEY must be exactly 32 bytes" + ), + WHITELISTED_EMAILS: z + .string() + .refine(isValidRegex, "WHITELISTED_EMAILS must be a valid regex.") + .optional(), + ADMIN_EMAILS: z.string().refine(isValidRegex, "ADMIN_EMAILS must be a valid regex.").optional(), + REMIX_APP_PORT: z.string().optional(), + LOGIN_ORIGIN: z.string().default("http://localhost:3030"), + LOGIN_RATE_LIMITS_ENABLED: BoolEnv.default(true), + APP_ORIGIN: z.string().default("http://localhost:3030"), + API_ORIGIN: z.string().optional(), + STREAM_ORIGIN: z.string().optional(), + ELECTRIC_ORIGIN: z.string().default("http://localhost:3060"), + // A comma separated list of electric origins to shard into different electric instances by environmentId + // example: "http://localhost:3060,http://localhost:3061,http://localhost:3062" + ELECTRIC_ORIGIN_SHARDS: z.string().optional(), + APP_ENV: z.string().default(process.env.NODE_ENV), + SERVICE_NAME: z.string().default("trigger.dev webapp"), + POSTHOG_PROJECT_KEY: z.string().default("phc_LFH7kJiGhdIlnO22hTAKgHpaKhpM8gkzWAFvHmf5vfS"), + TRIGGER_TELEMETRY_DISABLED: z.string().optional(), + AUTH_GITHUB_CLIENT_ID: z.string().optional(), + AUTH_GITHUB_CLIENT_SECRET: z.string().optional(), + AUTH_GOOGLE_CLIENT_ID: z.string().optional(), + AUTH_GOOGLE_CLIENT_SECRET: z.string().optional(), + EMAIL_TRANSPORT: z.enum(["resend", "smtp", "aws-ses"]).optional(), + FROM_EMAIL: z.string().optional(), + REPLY_TO_EMAIL: z.string().optional(), + RESEND_API_KEY: z.string().optional(), + SMTP_HOST: z.string().optional(), + SMTP_PORT: z.coerce.number().optional(), + SMTP_SECURE: BoolEnv.optional(), + SMTP_USER: z.string().optional(), + SMTP_PASSWORD: z.string().optional(), + + PLAIN_API_KEY: z.string().optional(), + WORKER_SCHEMA: z.string().default("graphile_worker"), + WORKER_CONCURRENCY: z.coerce.number().int().default(10), + WORKER_POLL_INTERVAL: z.coerce.number().int().default(1000), + WORKER_ENABLED: z.string().default("true"), + GRACEFUL_SHUTDOWN_TIMEOUT: z.coerce.number().int().default(60000), + DISABLE_SSE: z.string().optional(), + OPENAI_API_KEY: z.string().optional(), + + // Redis options + REDIS_HOST: z.string().optional(), + REDIS_READER_HOST: z.string().optional(), + REDIS_READER_PORT: z.coerce.number().optional(), + REDIS_PORT: z.coerce.number().optional(), + REDIS_USERNAME: z.string().optional(), + REDIS_PASSWORD: z.string().optional(), + REDIS_TLS_DISABLED: z.string().optional(), + + RATE_LIMIT_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + RATE_LIMIT_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + RATE_LIMIT_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + RATE_LIMIT_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + RATE_LIMIT_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + RATE_LIMIT_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + RATE_LIMIT_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), + RATE_LIMIT_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), + + CACHE_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + CACHE_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + CACHE_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + CACHE_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + CACHE_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + CACHE_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + CACHE_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), + CACHE_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), + + REALTIME_STREAMS_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + REALTIME_STREAMS_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + REALTIME_STREAMS_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + REALTIME_STREAMS_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + REALTIME_STREAMS_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + REALTIME_STREAMS_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + REALTIME_STREAMS_REDIS_TLS_DISABLED: z + .string() + .default(process.env.REDIS_TLS_DISABLED ?? "false"), + REALTIME_STREAMS_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), + REALTIME_STREAMS_INACTIVITY_TIMEOUT_MS: z.coerce.number().int().default(60000), // 1 minute + + REALTIME_MAXIMUM_CREATED_AT_FILTER_AGE_IN_MS: z.coerce + .number() + .int() + .default(24 * 60 * 60 * 1000), // 1 day in milliseconds + + PUBSUB_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + PUBSUB_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + PUBSUB_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + PUBSUB_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + PUBSUB_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + PUBSUB_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + PUBSUB_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), + PUBSUB_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), + + DEFAULT_ENV_EXECUTION_CONCURRENCY_LIMIT: z.coerce.number().int().default(100), + DEFAULT_ENV_EXECUTION_CONCURRENCY_BURST_FACTOR: z.coerce.number().default(1.0), + DEFAULT_ORG_EXECUTION_CONCURRENCY_LIMIT: z.coerce.number().int().default(300), + DEFAULT_DEV_ENV_EXECUTION_ATTEMPTS: z.coerce.number().int().positive().default(1), + + //API Rate limiting + /** + * @example "60s" + * @example "1m" + * @example "1h" + * @example "1d" + * @example "1000ms" + * @example "1000s" + */ + API_RATE_LIMIT_REFILL_INTERVAL: z.string().default("10s"), // refill 250 tokens every 10 seconds + API_RATE_LIMIT_MAX: z.coerce.number().int().default(750), // allow bursts of 750 requests + API_RATE_LIMIT_REFILL_RATE: z.coerce.number().int().default(250), // refix 250 tokens every 10 seconds + API_RATE_LIMIT_REQUEST_LOGS_ENABLED: z.string().default("0"), + API_RATE_LIMIT_REJECTION_LOGS_ENABLED: z.string().default("1"), + API_RATE_LIMIT_LIMITER_LOGS_ENABLED: z.string().default("0"), + + API_RATE_LIMIT_JWT_WINDOW: z.string().default("1m"), + API_RATE_LIMIT_JWT_TOKENS: z.coerce.number().int().default(60), + + //v3 + PROVIDER_SECRET: z.string().default("provider-secret"), + COORDINATOR_SECRET: z.string().default("coordinator-secret"), + DEPOT_TOKEN: z.string().optional(), + DEPOT_ORG_ID: z.string().optional(), + DEPOT_REGION: z.string().default("us-east-1"), + + // Deployment registry (v3) + DEPLOY_REGISTRY_HOST: z.string().min(1), + DEPLOY_REGISTRY_USERNAME: z.string().optional(), + DEPLOY_REGISTRY_PASSWORD: z.string().optional(), + DEPLOY_REGISTRY_NAMESPACE: z.string().min(1).default("trigger"), + DEPLOY_REGISTRY_ECR_TAGS: z.string().optional(), // csv, for example: "key1=value1,key2=value2" + DEPLOY_REGISTRY_ECR_ASSUME_ROLE_ARN: z.string().optional(), + DEPLOY_REGISTRY_ECR_ASSUME_ROLE_EXTERNAL_ID: z.string().optional(), + + // Deployment registry (v4) - falls back to v3 registry if not specified + V4_DEPLOY_REGISTRY_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.DEPLOY_REGISTRY_HOST) + .pipe(z.string().min(1)), // Ensure final type is required string + V4_DEPLOY_REGISTRY_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.DEPLOY_REGISTRY_USERNAME), + V4_DEPLOY_REGISTRY_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.DEPLOY_REGISTRY_PASSWORD), + V4_DEPLOY_REGISTRY_NAMESPACE: z + .string() + .optional() + .transform((v) => v ?? process.env.DEPLOY_REGISTRY_NAMESPACE) + .pipe(z.string().min(1).default("trigger")), // Ensure final type is required string + V4_DEPLOY_REGISTRY_ECR_TAGS: z + .string() + .optional() + .transform((v) => v ?? process.env.DEPLOY_REGISTRY_ECR_TAGS), + V4_DEPLOY_REGISTRY_ECR_ASSUME_ROLE_ARN: z + .string() + .optional() + .transform((v) => v ?? process.env.DEPLOY_REGISTRY_ECR_ASSUME_ROLE_ARN), + V4_DEPLOY_REGISTRY_ECR_ASSUME_ROLE_EXTERNAL_ID: z + .string() + .optional() + .transform((v) => v ?? process.env.DEPLOY_REGISTRY_ECR_ASSUME_ROLE_EXTERNAL_ID), + + DEPLOY_IMAGE_PLATFORM: z.string().default("linux/amd64"), + DEPLOY_TIMEOUT_MS: z.coerce + .number() + .int() + .default(60 * 1000 * 8), // 8 minutes + DEPLOY_QUEUE_TIMEOUT_MS: z.coerce + .number() + .int() + .default(60 * 1000 * 15), // 15 minutes + + OBJECT_STORE_BASE_URL: z.string().optional(), + OBJECT_STORE_ACCESS_KEY_ID: z.string().optional(), + OBJECT_STORE_SECRET_ACCESS_KEY: z.string().optional(), + OBJECT_STORE_REGION: z.string().optional(), + OBJECT_STORE_SERVICE: z.string().default("s3"), + + ARTIFACTS_OBJECT_STORE_BUCKET: z.string().optional(), + ARTIFACTS_OBJECT_STORE_BASE_URL: z.string().optional(), + ARTIFACTS_OBJECT_STORE_ACCESS_KEY_ID: z.string().optional(), + ARTIFACTS_OBJECT_STORE_SECRET_ACCESS_KEY: z.string().optional(), + ARTIFACTS_OBJECT_STORE_REGION: z.string().optional(), + EVENTS_BATCH_SIZE: z.coerce.number().int().default(100), + EVENTS_BATCH_INTERVAL: z.coerce.number().int().default(1000), + EVENTS_DEFAULT_LOG_RETENTION: z.coerce.number().int().default(7), + EVENTS_MIN_CONCURRENCY: z.coerce.number().int().default(1), + EVENTS_MAX_CONCURRENCY: z.coerce.number().int().default(10), + EVENTS_MAX_BATCH_SIZE: z.coerce.number().int().default(500), + EVENTS_MEMORY_PRESSURE_THRESHOLD: z.coerce.number().int().default(5000), + EVENTS_LOAD_SHEDDING_THRESHOLD: z.coerce.number().int().default(100000), + EVENTS_LOAD_SHEDDING_ENABLED: z.string().default("1"), + SHARED_QUEUE_CONSUMER_POOL_SIZE: z.coerce.number().int().default(10), + SHARED_QUEUE_CONSUMER_INTERVAL_MS: z.coerce.number().int().default(100), + SHARED_QUEUE_CONSUMER_NEXT_TICK_INTERVAL_MS: z.coerce.number().int().default(100), + SHARED_QUEUE_CONSUMER_EMIT_RESUME_DEPENDENCY_TIMEOUT_MS: z.coerce.number().int().default(1000), + SHARED_QUEUE_CONSUMER_RESOLVE_PAYLOADS_BATCH_SIZE: z.coerce.number().int().default(25), + + MANAGED_WORKER_SECRET: z.string().default("managed-secret"), + + // Development OTEL environment variables + DEV_OTEL_EXPORTER_OTLP_ENDPOINT: z.string().optional(), + // If this is set to 1, then the below variables are used to configure the batch processor for spans and logs + DEV_OTEL_BATCH_PROCESSING_ENABLED: z.string().default("0"), + DEV_OTEL_SPAN_MAX_EXPORT_BATCH_SIZE: z.string().default("64"), + DEV_OTEL_SPAN_SCHEDULED_DELAY_MILLIS: z.string().default("200"), + DEV_OTEL_SPAN_EXPORT_TIMEOUT_MILLIS: z.string().default("30000"), + DEV_OTEL_SPAN_MAX_QUEUE_SIZE: z.string().default("512"), + DEV_OTEL_LOG_MAX_EXPORT_BATCH_SIZE: z.string().default("64"), + DEV_OTEL_LOG_SCHEDULED_DELAY_MILLIS: z.string().default("200"), + DEV_OTEL_LOG_EXPORT_TIMEOUT_MILLIS: z.string().default("30000"), + DEV_OTEL_LOG_MAX_QUEUE_SIZE: z.string().default("512"), + + PROD_OTEL_BATCH_PROCESSING_ENABLED: z.string().default("0"), + PROD_OTEL_SPAN_MAX_EXPORT_BATCH_SIZE: z.string().default("64"), + PROD_OTEL_SPAN_SCHEDULED_DELAY_MILLIS: z.string().default("200"), + PROD_OTEL_SPAN_EXPORT_TIMEOUT_MILLIS: z.string().default("30000"), + PROD_OTEL_SPAN_MAX_QUEUE_SIZE: z.string().default("512"), + PROD_OTEL_LOG_MAX_EXPORT_BATCH_SIZE: z.string().default("64"), + PROD_OTEL_LOG_SCHEDULED_DELAY_MILLIS: z.string().default("200"), + PROD_OTEL_LOG_EXPORT_TIMEOUT_MILLIS: z.string().default("30000"), + PROD_OTEL_LOG_MAX_QUEUE_SIZE: z.string().default("512"), + + TRIGGER_OTEL_SPAN_ATTRIBUTE_COUNT_LIMIT: z.string().default("1024"), + TRIGGER_OTEL_LOG_ATTRIBUTE_COUNT_LIMIT: z.string().default("1024"), + TRIGGER_OTEL_SPAN_ATTRIBUTE_VALUE_LENGTH_LIMIT: z.string().default("131072"), + TRIGGER_OTEL_LOG_ATTRIBUTE_VALUE_LENGTH_LIMIT: z.string().default("131072"), + TRIGGER_OTEL_SPAN_EVENT_COUNT_LIMIT: z.string().default("10"), + TRIGGER_OTEL_LINK_COUNT_LIMIT: z.string().default("2"), + TRIGGER_OTEL_ATTRIBUTE_PER_LINK_COUNT_LIMIT: z.string().default("10"), + TRIGGER_OTEL_ATTRIBUTE_PER_EVENT_COUNT_LIMIT: z.string().default("10"), + + CHECKPOINT_THRESHOLD_IN_MS: z.coerce.number().int().default(30000), + + // Internal OTEL environment variables + INTERNAL_OTEL_TRACE_EXPORTER_URL: z.string().optional(), + INTERNAL_OTEL_TRACE_EXPORTER_AUTH_HEADERS: z.string().optional(), + INTERNAL_OTEL_TRACE_LOGGING_ENABLED: z.string().default("1"), + // this means 1/20 traces or 5% of traces will be sampled (sampled = recorded) + INTERNAL_OTEL_TRACE_SAMPLING_RATE: z.string().default("20"), + INTERNAL_OTEL_TRACE_INSTRUMENT_PRISMA_ENABLED: z.string().default("0"), + INTERNAL_OTEL_TRACE_DISABLED: z.string().default("0"), + + INTERNAL_OTEL_LOG_EXPORTER_URL: z.string().optional(), + INTERNAL_OTEL_METRIC_EXPORTER_URL: z.string().optional(), + INTERNAL_OTEL_METRIC_EXPORTER_AUTH_HEADERS: z.string().optional(), + INTERNAL_OTEL_METRIC_EXPORTER_ENABLED: z.string().default("0"), + INTERNAL_OTEL_METRIC_EXPORTER_INTERVAL_MS: z.coerce.number().int().default(30_000), + INTERNAL_OTEL_HOST_METRICS_ENABLED: BoolEnv.default(true), + INTERNAL_OTEL_NODEJS_METRICS_ENABLED: BoolEnv.default(true), + INTERNAL_OTEL_ADDITIONAL_DETECTORS_ENABLED: BoolEnv.default(true), + + ORG_SLACK_INTEGRATION_CLIENT_ID: z.string().optional(), + ORG_SLACK_INTEGRATION_CLIENT_SECRET: z.string().optional(), + + /** These enable the alerts feature in v3 */ + ALERT_EMAIL_TRANSPORT: z.enum(["resend", "smtp", "aws-ses"]).optional(), + ALERT_FROM_EMAIL: z.string().optional(), + ALERT_REPLY_TO_EMAIL: z.string().optional(), + ALERT_RESEND_API_KEY: z.string().optional(), + ALERT_SMTP_HOST: z.string().optional(), + ALERT_SMTP_PORT: z.coerce.number().optional(), + ALERT_SMTP_SECURE: BoolEnv.optional(), + ALERT_SMTP_USER: z.string().optional(), + ALERT_SMTP_PASSWORD: z.string().optional(), + ALERT_RATE_LIMITER_EMISSION_INTERVAL: z.coerce.number().int().default(2_500), + ALERT_RATE_LIMITER_BURST_TOLERANCE: z.coerce.number().int().default(10_000), + ALERT_RATE_LIMITER_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + ALERT_RATE_LIMITER_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + ALERT_RATE_LIMITER_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + ALERT_RATE_LIMITER_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + ALERT_RATE_LIMITER_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + ALERT_RATE_LIMITER_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + ALERT_RATE_LIMITER_REDIS_TLS_DISABLED: z + .string() + .default(process.env.REDIS_TLS_DISABLED ?? "false"), + ALERT_RATE_LIMITER_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), + + LOOPS_API_KEY: z.string().optional(), + MARQS_DISABLE_REBALANCING: BoolEnv.default(false), + MARQS_VISIBILITY_TIMEOUT_MS: z.coerce + .number() + .int() + .default(60 * 1000 * 15), + MARQS_SHARED_QUEUE_LIMIT: z.coerce.number().int().default(1000), + MARQS_MAXIMUM_QUEUE_PER_ENV_COUNT: z.coerce.number().int().default(50), + MARQS_DEV_QUEUE_LIMIT: z.coerce.number().int().default(1000), + MARQS_MAXIMUM_NACK_COUNT: z.coerce.number().int().default(64), + MARQS_CONCURRENCY_LIMIT_BIAS: z.coerce.number().default(0.75), + MARQS_AVAILABLE_CAPACITY_BIAS: z.coerce.number().default(0.3), + MARQS_QUEUE_AGE_RANDOMIZATION_BIAS: z.coerce.number().default(0.25), + MARQS_REUSE_SNAPSHOT_COUNT: z.coerce.number().int().default(0), + MARQS_MAXIMUM_ENV_COUNT: z.coerce.number().int().optional(), + MARQS_SHARED_WORKER_QUEUE_CONSUMER_INTERVAL_MS: z.coerce.number().int().default(250), + MARQS_SHARED_WORKER_QUEUE_MAX_MESSAGE_COUNT: z.coerce.number().int().default(10), + + MARQS_SHARED_WORKER_QUEUE_EAGER_DEQUEUE_ENABLED: z.string().default("0"), + MARQS_WORKER_ENABLED: z.string().default("0"), + MARQS_WORKER_COUNT: z.coerce.number().int().default(2), + MARQS_WORKER_CONCURRENCY_LIMIT: z.coerce.number().int().default(50), + MARQS_WORKER_CONCURRENCY_TASKS_PER_WORKER: z.coerce.number().int().default(5), + MARQS_WORKER_POLL_INTERVAL_MS: z.coerce.number().int().default(100), + MARQS_WORKER_IMMEDIATE_POLL_INTERVAL_MS: z.coerce.number().int().default(100), + MARQS_WORKER_SHUTDOWN_TIMEOUT_MS: z.coerce.number().int().default(60_000), + MARQS_SHARED_WORKER_QUEUE_COOLOFF_COUNT_THRESHOLD: z.coerce.number().int().default(10), + MARQS_SHARED_WORKER_QUEUE_COOLOFF_PERIOD_MS: z.coerce.number().int().default(5_000), + + PROD_TASK_HEARTBEAT_INTERVAL_MS: z.coerce.number().int().optional(), + + VERBOSE_GRAPHILE_LOGGING: z.string().default("false"), + V2_MARQS_ENABLED: z.string().default("0"), + V2_MARQS_CONSUMER_POOL_ENABLED: z.string().default("0"), + V2_MARQS_CONSUMER_POOL_SIZE: z.coerce.number().int().default(10), + V2_MARQS_CONSUMER_POLL_INTERVAL_MS: z.coerce.number().int().default(1000), + V2_MARQS_QUEUE_SELECTION_COUNT: z.coerce.number().int().default(36), + V2_MARQS_VISIBILITY_TIMEOUT_MS: z.coerce + .number() + .int() + .default(60 * 1000 * 15), + V2_MARQS_DEFAULT_ENV_CONCURRENCY: z.coerce.number().int().default(100), + V2_MARQS_VERBOSE: z.string().default("0"), + V3_MARQS_CONCURRENCY_MONITOR_ENABLED: z.string().default("0"), + V2_MARQS_CONCURRENCY_MONITOR_ENABLED: z.string().default("0"), + /* Usage settings */ + USAGE_EVENT_URL: z.string().optional(), + PROD_USAGE_HEARTBEAT_INTERVAL_MS: z.coerce.number().int().optional(), + + CENTS_PER_RUN: z.coerce.number().default(0), + + EVENT_LOOP_MONITOR_ENABLED: z.string().default("1"), + RESOURCE_MONITOR_ENABLED: z.string().default("0"), + MAXIMUM_LIVE_RELOADING_EVENTS: z.coerce.number().int().default(1000), + MAXIMUM_TRACE_SUMMARY_VIEW_COUNT: z.coerce.number().int().default(25_000), + MAXIMUM_TRACE_DETAILED_SUMMARY_VIEW_COUNT: z.coerce.number().int().default(10_000), + TASK_PAYLOAD_OFFLOAD_THRESHOLD: z.coerce.number().int().default(524_288), // 512KB + BATCH_PAYLOAD_OFFLOAD_THRESHOLD: z.coerce.number().int().optional(), // Defaults to TASK_PAYLOAD_OFFLOAD_THRESHOLD if not set + TASK_PAYLOAD_MAXIMUM_SIZE: z.coerce.number().int().default(3_145_728), // 3MB + BATCH_TASK_PAYLOAD_MAXIMUM_SIZE: z.coerce.number().int().default(1_000_000), // 1MB + TASK_RUN_METADATA_MAXIMUM_SIZE: z.coerce.number().int().default(262_144), // 256KB + + MAXIMUM_DEV_QUEUE_SIZE: z.coerce.number().int().optional(), + MAXIMUM_DEPLOYED_QUEUE_SIZE: z.coerce.number().int().optional(), + MAX_BATCH_V2_TRIGGER_ITEMS: z.coerce.number().int().default(500), + MAX_BATCH_AND_WAIT_V2_TRIGGER_ITEMS: z.coerce.number().int().default(500), + + // 2-phase batch API settings + STREAMING_BATCH_MAX_ITEMS: z.coerce.number().int().default(1_000), // Max items in streaming batch + STREAMING_BATCH_ITEM_MAXIMUM_SIZE: z.coerce.number().int().default(3_145_728), + BATCH_RATE_LIMIT_REFILL_RATE: z.coerce.number().int().default(100), + BATCH_RATE_LIMIT_MAX: z.coerce.number().int().default(1200), + BATCH_RATE_LIMIT_REFILL_INTERVAL: z.string().default("10s"), + BATCH_CONCURRENCY_LIMIT_DEFAULT: z.coerce.number().int().default(1), + + REALTIME_STREAM_VERSION: z.enum(["v1", "v2"]).default("v1"), + REALTIME_STREAM_MAX_LENGTH: z.coerce.number().int().default(1000), + REALTIME_STREAM_TTL: z.coerce + .number() + .int() + .default(60 * 60 * 24), // 1 day in seconds + BATCH_METADATA_OPERATIONS_FLUSH_INTERVAL_MS: z.coerce.number().int().default(1000), + BATCH_METADATA_OPERATIONS_FLUSH_ENABLED: z.string().default("1"), + BATCH_METADATA_OPERATIONS_FLUSH_LOGGING_ENABLED: z.string().default("1"), + + // Run Engine 2.0 + RUN_ENGINE_WORKER_COUNT: z.coerce.number().int().default(4), + RUN_ENGINE_TASKS_PER_WORKER: z.coerce.number().int().default(10), + RUN_ENGINE_WORKER_CONCURRENCY_LIMIT: z.coerce.number().int().default(10), + RUN_ENGINE_WORKER_POLL_INTERVAL: z.coerce.number().int().default(100), + RUN_ENGINE_WORKER_IMMEDIATE_POLL_INTERVAL: z.coerce.number().int().default(100), + RUN_ENGINE_TIMEOUT_PENDING_EXECUTING: z.coerce.number().int().default(60_000), + RUN_ENGINE_TIMEOUT_PENDING_CANCEL: z.coerce.number().int().default(60_000), + RUN_ENGINE_TIMEOUT_EXECUTING: z.coerce.number().int().default(300_000), // 5 minutes + RUN_ENGINE_TIMEOUT_EXECUTING_WITH_WAITPOINTS: z.coerce.number().int().default(300_000), // 5 minutes + RUN_ENGINE_TIMEOUT_SUSPENDED: z.coerce + .number() + .int() + .default(60_000 * 10), + RUN_ENGINE_DEBUG_WORKER_NOTIFICATIONS: BoolEnv.default(false), + RUN_ENGINE_PARENT_QUEUE_LIMIT: z.coerce.number().int().default(1000), + RUN_ENGINE_CONCURRENCY_LIMIT_BIAS: z.coerce.number().default(0.75), + RUN_ENGINE_AVAILABLE_CAPACITY_BIAS: z.coerce.number().default(0.3), + RUN_ENGINE_QUEUE_AGE_RANDOMIZATION_BIAS: z.coerce.number().default(0.25), + RUN_ENGINE_REUSE_SNAPSHOT_COUNT: z.coerce.number().int().default(0), + RUN_ENGINE_MAXIMUM_ENV_COUNT: z.coerce.number().int().optional(), + RUN_ENGINE_RUN_QUEUE_SHARD_COUNT: z.coerce.number().int().default(4), + RUN_ENGINE_WORKER_SHUTDOWN_TIMEOUT_MS: z.coerce.number().int().default(60_000), + RUN_ENGINE_RETRY_WARM_START_THRESHOLD_MS: z.coerce.number().int().default(30_000), + RUN_ENGINE_PROCESS_WORKER_QUEUE_DEBOUNCE_MS: z.coerce.number().int().default(200), + RUN_ENGINE_DEQUEUE_BLOCKING_TIMEOUT_SECONDS: z.coerce.number().int().default(10), + RUN_ENGINE_MASTER_QUEUE_CONSUMERS_INTERVAL_MS: z.coerce.number().int().default(1000), + RUN_ENGINE_MASTER_QUEUE_COOLOFF_PERIOD_MS: z.coerce.number().int().default(10_000), + RUN_ENGINE_MASTER_QUEUE_COOLOFF_COUNT_THRESHOLD: z.coerce.number().int().default(10), + RUN_ENGINE_MASTER_QUEUE_CONSUMER_DEQUEUE_COUNT: z.coerce.number().int().default(10), + RUN_ENGINE_CONCURRENCY_SWEEPER_SCAN_SCHEDULE: z.string().optional(), + RUN_ENGINE_CONCURRENCY_SWEEPER_PROCESS_MARKED_SCHEDULE: z.string().optional(), + RUN_ENGINE_CONCURRENCY_SWEEPER_SCAN_JITTER_IN_MS: z.coerce.number().int().optional(), + RUN_ENGINE_CONCURRENCY_SWEEPER_PROCESS_MARKED_JITTER_IN_MS: z.coerce.number().int().optional(), + + RUN_ENGINE_RUN_LOCK_DURATION: z.coerce.number().int().default(5000), + RUN_ENGINE_RUN_LOCK_AUTOMATIC_EXTENSION_THRESHOLD: z.coerce.number().int().default(1000), + RUN_ENGINE_RUN_LOCK_MAX_RETRIES: z.coerce.number().int().default(10), + RUN_ENGINE_RUN_LOCK_BASE_DELAY: z.coerce.number().int().default(100), + RUN_ENGINE_RUN_LOCK_MAX_DELAY: z.coerce.number().int().default(3000), + RUN_ENGINE_RUN_LOCK_BACKOFF_MULTIPLIER: z.coerce.number().default(1.8), + RUN_ENGINE_RUN_LOCK_JITTER_FACTOR: z.coerce.number().default(0.15), + RUN_ENGINE_RUN_LOCK_MAX_TOTAL_WAIT_TIME: z.coerce.number().int().default(15000), + + RUN_ENGINE_SUSPENDED_HEARTBEAT_RETRIES_MAX_COUNT: z.coerce.number().int().default(12), + RUN_ENGINE_SUSPENDED_HEARTBEAT_RETRIES_MAX_DELAY_MS: z.coerce + .number() + .int() + .default(60_000 * 60 * 6), + RUN_ENGINE_SUSPENDED_HEARTBEAT_RETRIES_INITIAL_DELAY_MS: z.coerce + .number() + .int() + .default(60_000), + RUN_ENGINE_SUSPENDED_HEARTBEAT_RETRIES_FACTOR: z.coerce.number().default(2), + + /** Maximum duration in milliseconds that a run can be debounced. Default: 1 hour (3,600,000ms) */ + RUN_ENGINE_MAXIMUM_DEBOUNCE_DURATION_MS: z.coerce + .number() + .int() + .default(60_000 * 60), // 1 hour + + RUN_ENGINE_WORKER_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + RUN_ENGINE_WORKER_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + RUN_ENGINE_WORKER_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + RUN_ENGINE_WORKER_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + RUN_ENGINE_WORKER_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + RUN_ENGINE_WORKER_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + RUN_ENGINE_WORKER_REDIS_TLS_DISABLED: z + .string() + .default(process.env.REDIS_TLS_DISABLED ?? "false"), + + RUN_ENGINE_RUN_QUEUE_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + RUN_ENGINE_RUN_QUEUE_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + RUN_ENGINE_RUN_QUEUE_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + RUN_ENGINE_RUN_QUEUE_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + RUN_ENGINE_RUN_QUEUE_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + RUN_ENGINE_RUN_QUEUE_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + RUN_ENGINE_RUN_QUEUE_REDIS_TLS_DISABLED: z + .string() + .default(process.env.REDIS_TLS_DISABLED ?? "false"), + + RUN_ENGINE_RUN_LOCK_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + RUN_ENGINE_RUN_LOCK_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + RUN_ENGINE_RUN_LOCK_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + RUN_ENGINE_RUN_LOCK_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + RUN_ENGINE_RUN_LOCK_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + RUN_ENGINE_RUN_LOCK_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + RUN_ENGINE_RUN_LOCK_REDIS_TLS_DISABLED: z + .string() + .default(process.env.REDIS_TLS_DISABLED ?? "false"), + + RUN_ENGINE_DEV_PRESENCE_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + RUN_ENGINE_DEV_PRESENCE_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + RUN_ENGINE_DEV_PRESENCE_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + RUN_ENGINE_DEV_PRESENCE_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + RUN_ENGINE_DEV_PRESENCE_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + RUN_ENGINE_DEV_PRESENCE_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + RUN_ENGINE_DEV_PRESENCE_REDIS_TLS_DISABLED: z + .string() + .default(process.env.REDIS_TLS_DISABLED ?? "false"), + + //API Rate limiting + /** + * @example "60s" + * @example "1m" + * @example "1h" + * @example "1d" + * @example "1000ms" + * @example "1000s" + */ + RUN_ENGINE_RATE_LIMIT_REFILL_INTERVAL: z.string().default("10s"), // refill 250 tokens every 10 seconds + RUN_ENGINE_RATE_LIMIT_MAX: z.coerce.number().int().default(1200), // allow bursts of 750 requests + RUN_ENGINE_RATE_LIMIT_REFILL_RATE: z.coerce.number().int().default(400), // refix 250 tokens every 10 seconds + RUN_ENGINE_RATE_LIMIT_REQUEST_LOGS_ENABLED: z.string().default("0"), + RUN_ENGINE_RATE_LIMIT_REJECTION_LOGS_ENABLED: z.string().default("1"), + RUN_ENGINE_RATE_LIMIT_LIMITER_LOGS_ENABLED: z.string().default("0"), + + RUN_ENGINE_RELEASE_CONCURRENCY_ENABLED: z.string().default("0"), + RUN_ENGINE_RELEASE_CONCURRENCY_DISABLE_CONSUMERS: z.string().default("0"), + RUN_ENGINE_RELEASE_CONCURRENCY_MAX_TOKENS_RATIO: z.coerce.number().default(1), + RUN_ENGINE_RELEASE_CONCURRENCY_RELEASINGS_MAX_AGE: z.coerce + .number() + .int() + .default(60_000 * 30), + RUN_ENGINE_RELEASE_CONCURRENCY_RELEASINGS_POLL_INTERVAL: z.coerce + .number() + .int() + .default(60_000), + RUN_ENGINE_RELEASE_CONCURRENCY_MAX_RETRIES: z.coerce.number().int().default(3), + RUN_ENGINE_RELEASE_CONCURRENCY_CONSUMERS_COUNT: z.coerce.number().int().default(1), + RUN_ENGINE_RELEASE_CONCURRENCY_POLL_INTERVAL: z.coerce.number().int().default(500), + RUN_ENGINE_RELEASE_CONCURRENCY_BATCH_SIZE: z.coerce.number().int().default(10), + + RUN_ENGINE_WORKER_ENABLED: z.string().default("1"), + RUN_ENGINE_WORKER_LOG_LEVEL: z.enum(["log", "error", "warn", "info", "debug"]).default("info"), + RUN_ENGINE_RUN_QUEUE_LOG_LEVEL: z + .enum(["log", "error", "warn", "info", "debug"]) + .default("info"), + RUN_ENGINE_TREAT_PRODUCTION_EXECUTION_STALLS_AS_OOM: z.string().default("0"), + + /** How long should the presence ttl last */ + DEV_PRESENCE_SSE_TIMEOUT: z.coerce.number().int().default(30_000), + DEV_PRESENCE_TTL_MS: z.coerce.number().int().default(5_000), + DEV_PRESENCE_POLL_MS: z.coerce.number().int().default(1_000), + /** How many ms to wait until dequeuing again, if there was a run last time */ + DEV_DEQUEUE_INTERVAL_WITH_RUN: z.coerce.number().int().default(250), + /** How many ms to wait until dequeuing again, if there was no run last time */ + DEV_DEQUEUE_INTERVAL_WITHOUT_RUN: z.coerce.number().int().default(1_000), + /** The max number of runs per API call that we'll dequeue in DEV */ + DEV_DEQUEUE_MAX_RUNS_PER_PULL: z.coerce.number().int().default(10), + + /** The maximum concurrent local run processes executing at once in dev. This is a hard limit */ + DEV_MAX_CONCURRENT_RUNS: z.coerce.number().int().optional(), + + /** The CLI should connect to this for dev runs */ + DEV_ENGINE_URL: z.string().default(process.env.APP_ORIGIN ?? "http://localhost:3030"), + + LEGACY_RUN_ENGINE_WORKER_ENABLED: z.string().default(process.env.WORKER_ENABLED ?? "true"), + LEGACY_RUN_ENGINE_WORKER_CONCURRENCY_WORKERS: z.coerce.number().int().default(2), + LEGACY_RUN_ENGINE_WORKER_CONCURRENCY_TASKS_PER_WORKER: z.coerce.number().int().default(1), + LEGACY_RUN_ENGINE_WORKER_POLL_INTERVAL: z.coerce.number().int().default(1000), + LEGACY_RUN_ENGINE_WORKER_IMMEDIATE_POLL_INTERVAL: z.coerce.number().int().default(50), + LEGACY_RUN_ENGINE_WORKER_CONCURRENCY_LIMIT: z.coerce.number().int().default(50), + LEGACY_RUN_ENGINE_WORKER_SHUTDOWN_TIMEOUT_MS: z.coerce.number().int().default(60_000), + LEGACY_RUN_ENGINE_WORKER_LOG_LEVEL: z + .enum(["log", "error", "warn", "info", "debug"]) + .default("info"), + + LEGACY_RUN_ENGINE_WORKER_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + LEGACY_RUN_ENGINE_WORKER_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + LEGACY_RUN_ENGINE_WORKER_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + LEGACY_RUN_ENGINE_WORKER_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + LEGACY_RUN_ENGINE_WORKER_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + LEGACY_RUN_ENGINE_WORKER_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + LEGACY_RUN_ENGINE_WORKER_REDIS_TLS_DISABLED: z + .string() + .default(process.env.REDIS_TLS_DISABLED ?? "false"), + LEGACY_RUN_ENGINE_WORKER_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), + + LEGACY_RUN_ENGINE_WAITING_FOR_DEPLOY_BATCH_SIZE: z.coerce.number().int().default(100), + LEGACY_RUN_ENGINE_WAITING_FOR_DEPLOY_BATCH_STAGGER_MS: z.coerce.number().int().default(1_000), + + COMMON_WORKER_ENABLED: z.string().default(process.env.WORKER_ENABLED ?? "true"), + COMMON_WORKER_CONCURRENCY_WORKERS: z.coerce.number().int().default(2), + COMMON_WORKER_CONCURRENCY_TASKS_PER_WORKER: z.coerce.number().int().default(10), + COMMON_WORKER_POLL_INTERVAL: z.coerce.number().int().default(1000), + COMMON_WORKER_IMMEDIATE_POLL_INTERVAL: z.coerce.number().int().default(50), + COMMON_WORKER_CONCURRENCY_LIMIT: z.coerce.number().int().default(50), + COMMON_WORKER_SHUTDOWN_TIMEOUT_MS: z.coerce.number().int().default(60_000), + COMMON_WORKER_LOG_LEVEL: z.enum(["log", "error", "warn", "info", "debug"]).default("info"), + + COMMON_WORKER_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + COMMON_WORKER_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + COMMON_WORKER_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + COMMON_WORKER_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + COMMON_WORKER_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + COMMON_WORKER_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + COMMON_WORKER_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), + COMMON_WORKER_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), + + BATCH_TRIGGER_PROCESS_JOB_VISIBILITY_TIMEOUT_MS: z.coerce + .number() + .int() + .default(60_000 * 5), // 5 minutes + + BATCH_TRIGGER_CACHED_RUNS_CHECK_ENABLED: BoolEnv.default(false), + + BATCH_TRIGGER_WORKER_ENABLED: z.string().default(process.env.WORKER_ENABLED ?? "true"), + BATCH_TRIGGER_WORKER_CONCURRENCY_WORKERS: z.coerce.number().int().default(2), + BATCH_TRIGGER_WORKER_CONCURRENCY_TASKS_PER_WORKER: z.coerce.number().int().default(10), + BATCH_TRIGGER_WORKER_POLL_INTERVAL: z.coerce.number().int().default(1000), + BATCH_TRIGGER_WORKER_IMMEDIATE_POLL_INTERVAL: z.coerce.number().int().default(50), + BATCH_TRIGGER_WORKER_CONCURRENCY_LIMIT: z.coerce.number().int().default(20), + BATCH_TRIGGER_WORKER_SHUTDOWN_TIMEOUT_MS: z.coerce.number().int().default(60_000), + BATCH_TRIGGER_WORKER_LOG_LEVEL: z + .enum(["log", "error", "warn", "info", "debug"]) + .default("info"), + + BATCH_TRIGGER_WORKER_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + BATCH_TRIGGER_WORKER_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + BATCH_TRIGGER_WORKER_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + BATCH_TRIGGER_WORKER_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + BATCH_TRIGGER_WORKER_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + BATCH_TRIGGER_WORKER_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + BATCH_TRIGGER_WORKER_REDIS_TLS_DISABLED: z + .string() + .default(process.env.REDIS_TLS_DISABLED ?? "false"), + BATCH_TRIGGER_WORKER_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), + + // BatchQueue DRR settings (Run Engine v2) + BATCH_QUEUE_DRR_QUANTUM: z.coerce.number().int().default(25), + BATCH_QUEUE_MAX_DEFICIT: z.coerce.number().int().default(100), + BATCH_QUEUE_CONSUMER_COUNT: z.coerce.number().int().default(3), + BATCH_QUEUE_CONSUMER_INTERVAL_MS: z.coerce.number().int().default(50), + // Global rate limit: max items processed per second across all consumers + // If not set, no global rate limiting is applied + BATCH_QUEUE_GLOBAL_RATE_LIMIT: z.coerce.number().int().positive().optional(), + + ADMIN_WORKER_ENABLED: z.string().default(process.env.WORKER_ENABLED ?? "true"), + ADMIN_WORKER_CONCURRENCY_WORKERS: z.coerce.number().int().default(2), + ADMIN_WORKER_CONCURRENCY_TASKS_PER_WORKER: z.coerce.number().int().default(10), + ADMIN_WORKER_POLL_INTERVAL: z.coerce.number().int().default(1000), + ADMIN_WORKER_IMMEDIATE_POLL_INTERVAL: z.coerce.number().int().default(50), + ADMIN_WORKER_CONCURRENCY_LIMIT: z.coerce.number().int().default(20), + ADMIN_WORKER_SHUTDOWN_TIMEOUT_MS: z.coerce.number().int().default(60_000), + ADMIN_WORKER_LOG_LEVEL: z.enum(["log", "error", "warn", "info", "debug"]).default("info"), + + ADMIN_WORKER_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + ADMIN_WORKER_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + ADMIN_WORKER_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + ADMIN_WORKER_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + ADMIN_WORKER_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + ADMIN_WORKER_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + ADMIN_WORKER_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), + ADMIN_WORKER_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), + + ALERTS_WORKER_ENABLED: z.string().default(process.env.WORKER_ENABLED ?? "true"), + ALERTS_WORKER_CONCURRENCY_WORKERS: z.coerce.number().int().default(2), + ALERTS_WORKER_CONCURRENCY_TASKS_PER_WORKER: z.coerce.number().int().default(10), + ALERTS_WORKER_POLL_INTERVAL: z.coerce.number().int().default(1000), + ALERTS_WORKER_IMMEDIATE_POLL_INTERVAL: z.coerce.number().int().default(100), + ALERTS_WORKER_CONCURRENCY_LIMIT: z.coerce.number().int().default(50), + ALERTS_WORKER_SHUTDOWN_TIMEOUT_MS: z.coerce.number().int().default(60_000), + ALERTS_WORKER_LOG_LEVEL: z.enum(["log", "error", "warn", "info", "debug"]).default("info"), + + ALERTS_WORKER_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + ALERTS_WORKER_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + ALERTS_WORKER_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + ALERTS_WORKER_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + ALERTS_WORKER_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + ALERTS_WORKER_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + ALERTS_WORKER_REDIS_TLS_DISABLED: z.string().default(process.env.REDIS_TLS_DISABLED ?? "false"), + ALERTS_WORKER_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), + + SCHEDULE_ENGINE_LOG_LEVEL: z.enum(["log", "error", "warn", "info", "debug"]).default("info"), + SCHEDULE_WORKER_ENABLED: z.string().default(process.env.WORKER_ENABLED ?? "true"), + SCHEDULE_WORKER_CONCURRENCY_WORKERS: z.coerce.number().int().default(2), + SCHEDULE_WORKER_CONCURRENCY_TASKS_PER_WORKER: z.coerce.number().int().default(10), + SCHEDULE_WORKER_POLL_INTERVAL: z.coerce.number().int().default(1000), + SCHEDULE_WORKER_IMMEDIATE_POLL_INTERVAL: z.coerce.number().int().default(50), + SCHEDULE_WORKER_CONCURRENCY_LIMIT: z.coerce.number().int().default(50), + SCHEDULE_WORKER_SHUTDOWN_TIMEOUT_MS: z.coerce.number().int().default(30_000), + SCHEDULE_WORKER_DISTRIBUTION_WINDOW_SECONDS: z.coerce.number().int().default(30), + + SCHEDULE_WORKER_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + SCHEDULE_WORKER_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + SCHEDULE_WORKER_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + SCHEDULE_WORKER_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + SCHEDULE_WORKER_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + SCHEDULE_WORKER_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + SCHEDULE_WORKER_REDIS_TLS_DISABLED: z + .string() + .default(process.env.REDIS_TLS_DISABLED ?? "false"), + SCHEDULE_WORKER_REDIS_CLUSTER_MODE_ENABLED: z.string().default("0"), + + TASK_EVENT_PARTITIONING_ENABLED: z.string().default("0"), + TASK_EVENT_PARTITIONED_WINDOW_IN_SECONDS: z.coerce.number().int().default(60), // 1 minute + + DEPLOYMENTS_AUTORELOAD_POLL_INTERVAL_MS: z.coerce.number().int().default(5_000), + BULK_ACTION_AUTORELOAD_POLL_INTERVAL_MS: z.coerce.number().int().default(1_000), + QUEUES_AUTORELOAD_POLL_INTERVAL_MS: z.coerce.number().int().default(5_000), + + SLACK_BOT_TOKEN: z.string().optional(), + SLACK_SIGNUP_REASON_CHANNEL_ID: z.string().optional(), + + // kapa.ai + KAPA_AI_WEBSITE_ID: z.string().optional(), + + // BetterStack + BETTERSTACK_API_KEY: z.string().optional(), + BETTERSTACK_STATUS_PAGE_ID: z.string().optional(), + + RUN_REPLICATION_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + RUN_REPLICATION_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + RUN_REPLICATION_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + RUN_REPLICATION_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + RUN_REPLICATION_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + RUN_REPLICATION_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + RUN_REPLICATION_REDIS_TLS_DISABLED: z + .string() + .default(process.env.REDIS_TLS_DISABLED ?? "false"), + + RUN_REPLICATION_CLICKHOUSE_URL: z.string().optional(), + RUN_REPLICATION_ENABLED: z.string().default("0"), + RUN_REPLICATION_SLOT_NAME: z.string().default("task_runs_to_clickhouse_v1"), + RUN_REPLICATION_PUBLICATION_NAME: z.string().default("task_runs_to_clickhouse_v1_publication"), + RUN_REPLICATION_MAX_FLUSH_CONCURRENCY: z.coerce.number().int().default(2), + RUN_REPLICATION_FLUSH_INTERVAL_MS: z.coerce.number().int().default(1000), + RUN_REPLICATION_FLUSH_BATCH_SIZE: z.coerce.number().int().default(100), + RUN_REPLICATION_LEADER_LOCK_TIMEOUT_MS: z.coerce.number().int().default(30_000), + RUN_REPLICATION_LEADER_LOCK_EXTEND_INTERVAL_MS: z.coerce.number().int().default(10_000), + RUN_REPLICATION_ACK_INTERVAL_SECONDS: z.coerce.number().int().default(10), + RUN_REPLICATION_LOG_LEVEL: z.enum(["log", "error", "warn", "info", "debug"]).default("info"), + RUN_REPLICATION_CLICKHOUSE_LOG_LEVEL: z + .enum(["log", "error", "warn", "info", "debug"]) + .default("info"), + RUN_REPLICATION_LEADER_LOCK_ADDITIONAL_TIME_MS: z.coerce.number().int().default(10_000), + RUN_REPLICATION_LEADER_LOCK_RETRY_INTERVAL_MS: z.coerce.number().int().default(500), + RUN_REPLICATION_WAIT_FOR_ASYNC_INSERT: z.string().default("0"), + RUN_REPLICATION_KEEP_ALIVE_ENABLED: z.string().default("0"), + RUN_REPLICATION_KEEP_ALIVE_IDLE_SOCKET_TTL_MS: z.coerce.number().int().optional(), + RUN_REPLICATION_MAX_OPEN_CONNECTIONS: z.coerce.number().int().default(10), + // Retry configuration for insert operations + RUN_REPLICATION_INSERT_MAX_RETRIES: z.coerce.number().int().default(3), + RUN_REPLICATION_INSERT_BASE_DELAY_MS: z.coerce.number().int().default(100), + RUN_REPLICATION_INSERT_MAX_DELAY_MS: z.coerce.number().int().default(2000), + RUN_REPLICATION_INSERT_STRATEGY: z.enum(["insert", "insert_async"]).default("insert"), + RUN_REPLICATION_DISABLE_PAYLOAD_INSERT: z.string().default("0"), + + // Clickhouse + CLICKHOUSE_URL: z.string(), + CLICKHOUSE_KEEP_ALIVE_ENABLED: z.string().default("1"), + CLICKHOUSE_KEEP_ALIVE_IDLE_SOCKET_TTL_MS: z.coerce.number().int().optional(), + CLICKHOUSE_MAX_OPEN_CONNECTIONS: z.coerce.number().int().default(10), + CLICKHOUSE_LOG_LEVEL: z.enum(["log", "error", "warn", "info", "debug"]).default("info"), + CLICKHOUSE_COMPRESSION_REQUEST: z.string().default("1"), + + EVENTS_CLICKHOUSE_URL: z + .string() + .optional() + .transform((v) => v ?? process.env.CLICKHOUSE_URL), + EVENTS_CLICKHOUSE_KEEP_ALIVE_ENABLED: z.string().default("1"), + EVENTS_CLICKHOUSE_KEEP_ALIVE_IDLE_SOCKET_TTL_MS: z.coerce.number().int().optional(), + EVENTS_CLICKHOUSE_MAX_OPEN_CONNECTIONS: z.coerce.number().int().default(10), + EVENTS_CLICKHOUSE_LOG_LEVEL: z.enum(["log", "error", "warn", "info", "debug"]).default("info"), + EVENTS_CLICKHOUSE_COMPRESSION_REQUEST: z.string().default("1"), + EVENTS_CLICKHOUSE_BATCH_SIZE: z.coerce.number().int().default(1000), + EVENTS_CLICKHOUSE_FLUSH_INTERVAL_MS: z.coerce.number().int().default(1000), + EVENTS_CLICKHOUSE_INSERT_STRATEGY: z.enum(["insert", "insert_async"]).default("insert"), + EVENTS_CLICKHOUSE_WAIT_FOR_ASYNC_INSERT: z.string().default("1"), + EVENTS_CLICKHOUSE_ASYNC_INSERT_MAX_DATA_SIZE: z.coerce.number().int().default(10485760), + EVENTS_CLICKHOUSE_ASYNC_INSERT_BUSY_TIMEOUT_MS: z.coerce.number().int().default(5000), + EVENTS_CLICKHOUSE_START_TIME_MAX_AGE_MS: z.coerce + .number() + .int() + .default(60_000 * 5), // 5 minutes + EVENT_REPOSITORY_CLICKHOUSE_ROLLOUT_PERCENT: z.coerce.number().optional(), + EVENT_REPOSITORY_DEFAULT_STORE: z + .enum(["postgres", "clickhouse", "clickhouse_v2"]) + .default("postgres"), + EVENT_REPOSITORY_DEBUG_LOGS_DISABLED: BoolEnv.default(false), + EVENTS_CLICKHOUSE_MAX_TRACE_SUMMARY_VIEW_COUNT: z.coerce.number().int().default(25_000), + EVENTS_CLICKHOUSE_MAX_TRACE_DETAILED_SUMMARY_VIEW_COUNT: z.coerce.number().int().default(5_000), + EVENTS_CLICKHOUSE_MAX_LIVE_RELOADING_SETTING: z.coerce.number().int().default(2000), + + // Bootstrap + TRIGGER_BOOTSTRAP_ENABLED: z.string().default("0"), + TRIGGER_BOOTSTRAP_WORKER_GROUP_NAME: z.string().optional(), + TRIGGER_BOOTSTRAP_WORKER_TOKEN_PATH: z.string().optional(), + + // Machine presets + MACHINE_PRESETS_OVERRIDE_PATH: z.string().optional(), + + // CLI package tag (e.g. "latest", "v4-beta", "4.0.0") - used for setup commands + TRIGGER_CLI_TAG: z.string().default("latest"), + + HEALTHCHECK_DATABASE_DISABLED: z.string().default("0"), + + REQUEST_IDEMPOTENCY_REDIS_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_HOST), + REQUEST_IDEMPOTENCY_REDIS_READER_HOST: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_READER_HOST), + REQUEST_IDEMPOTENCY_REDIS_READER_PORT: z.coerce + .number() + .optional() + .transform( + (v) => + v ?? (process.env.REDIS_READER_PORT ? parseInt(process.env.REDIS_READER_PORT) : undefined) + ), + REQUEST_IDEMPOTENCY_REDIS_PORT: z.coerce + .number() + .optional() + .transform( + (v) => v ?? (process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : undefined) + ), + REQUEST_IDEMPOTENCY_REDIS_USERNAME: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_USERNAME), + REQUEST_IDEMPOTENCY_REDIS_PASSWORD: z + .string() + .optional() + .transform((v) => v ?? process.env.REDIS_PASSWORD), + REQUEST_IDEMPOTENCY_REDIS_TLS_DISABLED: z + .string() + .default(process.env.REDIS_TLS_DISABLED ?? "false"), + + REQUEST_IDEMPOTENCY_LOG_LEVEL: z + .enum(["log", "error", "warn", "info", "debug"]) + .default("info"), + + REQUEST_IDEMPOTENCY_TTL_IN_MS: z.coerce + .number() + .int() + .default(60_000 * 60 * 24), + + // Bulk action + BULK_ACTION_BATCH_SIZE: z.coerce.number().int().default(100), + BULK_ACTION_BATCH_DELAY_MS: z.coerce.number().int().default(200), + BULK_ACTION_SUBBATCH_CONCURRENCY: z.coerce.number().int().default(5), + + // AI Run Filter + AI_RUN_FILTER_MODEL: z.string().optional(), + + EVENT_LOOP_MONITOR_THRESHOLD_MS: z.coerce.number().int().default(100), + EVENT_LOOP_MONITOR_UTILIZATION_INTERVAL_MS: z.coerce.number().int().default(1000), + EVENT_LOOP_MONITOR_UTILIZATION_SAMPLE_RATE: z.coerce.number().default(0.05), + + VERY_SLOW_QUERY_THRESHOLD_MS: z.coerce.number().int().optional(), + + REALTIME_STREAMS_S2_BASIN: z.string().optional(), + REALTIME_STREAMS_S2_ACCESS_TOKEN: z.string().optional(), + REALTIME_STREAMS_S2_ACCESS_TOKEN_EXPIRATION_IN_MS: z.coerce + .number() + .int() + .default(60_000 * 60 * 24), // 1 day + REALTIME_STREAMS_S2_LOG_LEVEL: z + .enum(["log", "error", "warn", "info", "debug"]) + .default("info"), + REALTIME_STREAMS_S2_FLUSH_INTERVAL_MS: z.coerce.number().int().default(100), + REALTIME_STREAMS_S2_MAX_RETRIES: z.coerce.number().int().default(10), + REALTIME_STREAMS_S2_WAIT_SECONDS: z.coerce.number().int().default(60), + REALTIME_STREAMS_DEFAULT_VERSION: z.enum(["v1", "v2"]).default("v1"), + WAIT_UNTIL_TIMEOUT_MS: z.coerce.number().int().default(600_000), + }) + .and(GithubAppEnvSchema) + .and(S2EnvSchema); export type Environment = z.infer; export const env = EnvironmentSchema.parse(process.env); diff --git a/apps/webapp/app/eventLoopMonitor.server.ts b/apps/webapp/app/eventLoopMonitor.server.ts index db25a28137..b86ea3d31a 100644 --- a/apps/webapp/app/eventLoopMonitor.server.ts +++ b/apps/webapp/app/eventLoopMonitor.server.ts @@ -1,10 +1,15 @@ import { createHook } from "node:async_hooks"; import { singleton } from "./utils/singleton"; import { tracer } from "./v3/tracer.server"; +import { env } from "./env.server"; +import { context, Context } from "@opentelemetry/api"; +import { performance } from "node:perf_hooks"; +import { logger } from "./services/logger.server"; +import { signalsEmitter } from "./services/signals.server"; -const THRESHOLD_NS = 1e8; // 100ms +const THRESHOLD_NS = env.EVENT_LOOP_MONITOR_THRESHOLD_MS * 1e6; -const cache = new Map(); +const cache = new Map(); function init(asyncId: number, type: string, triggerAsyncId: number, resource: any) { cache.set(asyncId, { @@ -26,6 +31,7 @@ function before(asyncId: number) { cache.set(asyncId, { ...cached, start: process.hrtime(), + parentCtx: context.active(), }); } @@ -47,13 +53,17 @@ function after(asyncId: number) { if (diffNs > THRESHOLD_NS) { const time = diffNs / 1e6; // in ms - const newSpan = tracer.startSpan("event-loop-blocked", { - startTime: new Date(new Date().getTime() - time), - attributes: { - asyncType: cached.type, - label: "EventLoopMonitor", + const newSpan = tracer.startSpan( + "event-loop-blocked", + { + startTime: new Date(new Date().getTime() - time), + attributes: { + asyncType: cached.type, + label: "EventLoopMonitor", + }, }, - }); + cached.parentCtx + ); newSpan.end(); } @@ -62,16 +72,53 @@ function after(asyncId: number) { export const eventLoopMonitor = singleton("eventLoopMonitor", () => { const hook = createHook({ init, before, after, destroy }); + let stopEventLoopUtilizationMonitoring: () => void; + return { enable: () => { console.log("๐Ÿฅธ Initializing event loop monitor"); hook.enable(); + + stopEventLoopUtilizationMonitoring = startEventLoopUtilizationMonitoring(); }, disable: () => { console.log("๐Ÿฅธ Disabling event loop monitor"); hook.disable(); + + stopEventLoopUtilizationMonitoring?.(); }, }; }); + +function startEventLoopUtilizationMonitoring() { + let lastEventLoopUtilization = performance.eventLoopUtilization(); + + const interval = setInterval(() => { + const currentEventLoopUtilization = performance.eventLoopUtilization(); + + const diff = performance.eventLoopUtilization( + currentEventLoopUtilization, + lastEventLoopUtilization + ); + const utilization = Number.isFinite(diff.utilization) ? diff.utilization : 0; + + if (Math.random() < env.EVENT_LOOP_MONITOR_UTILIZATION_SAMPLE_RATE) { + logger.info("nodejs.event_loop.utilization", { utilization }); + } + + lastEventLoopUtilization = currentEventLoopUtilization; + }, env.EVENT_LOOP_MONITOR_UTILIZATION_INTERVAL_MS); + + signalsEmitter.on("SIGTERM", () => { + clearInterval(interval); + }); + signalsEmitter.on("SIGINT", () => { + clearInterval(interval); + }); + + return () => { + clearInterval(interval); + }; +} diff --git a/apps/webapp/app/hooks/useAutoRevalidate.ts b/apps/webapp/app/hooks/useAutoRevalidate.ts new file mode 100644 index 0000000000..4205b03bcc --- /dev/null +++ b/apps/webapp/app/hooks/useAutoRevalidate.ts @@ -0,0 +1,48 @@ +import { useRevalidator } from "@remix-run/react"; +import { useEffect } from "react"; + +type UseAutoRevalidateOptions = { + interval?: number; // in milliseconds + onFocus?: boolean; + disabled?: boolean; +}; + +export function useAutoRevalidate(options: UseAutoRevalidateOptions = {}) { + const { interval = 5000, onFocus = true, disabled = false } = options; + const revalidator = useRevalidator(); + + useEffect(() => { + if (!interval || interval <= 0 || disabled) return; + + const intervalId = setInterval(() => { + if (revalidator.state === "loading") { + return; + } + revalidator.revalidate(); + }, interval); + + return () => clearInterval(intervalId); + }, [interval, disabled]); + + useEffect(() => { + if (!onFocus || disabled) return; + + const handleFocus = () => { + if (document.visibilityState === "visible" && revalidator.state !== "loading") { + revalidator.revalidate(); + } + }; + + // Revalidate when the page becomes visible + document.addEventListener("visibilitychange", handleFocus); + // Revalidate when the window gains focus + window.addEventListener("focus", handleFocus); + + return () => { + document.removeEventListener("visibilitychange", handleFocus); + window.removeEventListener("focus", handleFocus); + }; + }, [onFocus, disabled]); + + return revalidator; +} diff --git a/apps/webapp/app/hooks/useDebounce.ts b/apps/webapp/app/hooks/useDebounce.ts index a8670caf7f..da63330f2a 100644 --- a/apps/webapp/app/hooks/useDebounce.ts +++ b/apps/webapp/app/hooks/useDebounce.ts @@ -1,4 +1,4 @@ -import { useRef } from "react"; +import { useEffect, useRef } from "react"; /** * A function that you call with a debounce delay, the function will only be called after the delay has passed @@ -19,3 +19,25 @@ export function useDebounce any>(fn: T, delay: num }, delay); }; } + +/** + * A function that takes in a value, function, and delay. + * It will run the function with the debounced value, only if the value has changed. + * It should deal with the function being passed in not being a useCallback + */ +export function useDebounceEffect(value: T, fn: (value: T) => void, delay: number) { + const fnRef = useRef(fn); + + // Update the ref whenever the function changes + fnRef.current = fn; + + useEffect(() => { + const timeout = setTimeout(() => { + fnRef.current(value); + }, delay); + + return () => { + clearTimeout(timeout); + }; + }, [value, delay]); // Only depend on value and delay, not fn +} diff --git a/apps/webapp/app/hooks/useSearchParam.ts b/apps/webapp/app/hooks/useSearchParam.ts index c0f939abcc..2c7e456bbb 100644 --- a/apps/webapp/app/hooks/useSearchParam.ts +++ b/apps/webapp/app/hooks/useSearchParam.ts @@ -7,40 +7,18 @@ type Values = Record; export function useSearchParams() { const navigate = useNavigate(); const location = useOptimisticLocation(); - const search = new URLSearchParams(location.search); - - const set = useCallback( - (values: Values) => { - for (const [param, value] of Object.entries(values)) { - if (value === undefined) { - search.delete(param); - continue; - } - - if (typeof value === "string") { - search.set(param, value); - continue; - } - - search.delete(param); - for (const v of value) { - search.append(param, v); - } - } - }, - [location, search] - ); const replace = useCallback( (values: Values) => { - set(values); - navigate(`${location.pathname}?${search.toString()}`, { replace: true }); + const s = set(new URLSearchParams(location.search), values); + navigate(`${location.pathname}?${s.toString()}`, { replace: true }); }, - [location, search] + [location, navigate] ); const del = useCallback( (keys: string | string[]) => { + const search = new URLSearchParams(location.search); if (!Array.isArray(keys)) { keys = [keys]; } @@ -49,34 +27,60 @@ export function useSearchParams() { } navigate(`${location.pathname}?${search.toString()}`, { replace: true }); }, - [location, search] + [location, navigate] ); const value = useCallback( (param: string) => { - const val = search.get(param) ?? undefined; - if (val === undefined) { - return val; - } - - return decodeURIComponent(val); + const search = new URLSearchParams(location.search); + return search.get(param) ?? undefined; }, - [location, search] + [location] ); const values = useCallback( (param: string) => { - const all = search.getAll(param); - return all.map((v) => decodeURIComponent(v)); + const search = new URLSearchParams(location.search); + return search.getAll(param); }, - [location, search] + [location] + ); + + const has = useCallback( + (param: string) => { + const search = new URLSearchParams(location.search); + return search.has(param); + }, + [location] ); return { value, values, - set, replace, del, + has, }; } + +function set(searchParams: URLSearchParams, values: Values) { + const search = new URLSearchParams(searchParams); + for (const [param, value] of Object.entries(values)) { + if (value === undefined) { + search.delete(param); + continue; + } + + if (typeof value === "string") { + search.set(param, value); + continue; + } + + search.delete(param); + for (const v of value) { + search.append(param, v); + } + } + + return search; +} diff --git a/apps/webapp/app/hooks/useTriggerCliTag.ts b/apps/webapp/app/hooks/useTriggerCliTag.ts new file mode 100644 index 0000000000..190a28ef39 --- /dev/null +++ b/apps/webapp/app/hooks/useTriggerCliTag.ts @@ -0,0 +1,8 @@ +import { useTypedRouteLoaderData } from "remix-typedjson"; +import { type loader } from "~/root"; + +export function useTriggerCliTag() { + const routeMatch = useTypedRouteLoaderData("root"); + + return routeMatch!.triggerCliTag; +} diff --git a/apps/webapp/app/models/member.server.ts b/apps/webapp/app/models/member.server.ts index 86ae5d371d..04c1df1b41 100644 --- a/apps/webapp/app/models/member.server.ts +++ b/apps/webapp/app/models/member.server.ts @@ -1,5 +1,9 @@ -import { prisma } from "~/db.server"; +import { type Prisma, prisma } from "~/db.server"; import { createEnvironment } from "./organization.server"; +import { customAlphabet } from "nanoid"; + +const tokenValueLength = 40; +const tokenGenerator = customAlphabet("123456789abcdefghijkmnopqrstuvwxyz", tokenValueLength); export async function getTeamMembersAndInvites({ userId, @@ -95,14 +99,19 @@ export async function inviteMembers({ throw new Error("User does not have access to this organization"); } - const created = await prisma.orgMemberInvite.createMany({ - data: emails.map((email) => ({ - email, - organizationId: org.id, - inviterId: userId, - role: "MEMBER", - })), - skipDuplicates: true, + const invites = [...new Set(emails)].map( + (email) => + ({ + email, + token: tokenGenerator(), + organizationId: org.id, + inviterId: userId, + role: "MEMBER", + } satisfies Prisma.OrgMemberInviteCreateManyInput) + ); + + await prisma.orgMemberInvite.createMany({ + data: invites, }); return await prisma.orgMemberInvite.findMany({ @@ -147,12 +156,19 @@ export async function getUsersInvites({ email }: { email: string }) { }); } -export async function acceptInvite({ userId, inviteId }: { userId: string; inviteId: string }) { +export async function acceptInvite({ + user, + inviteId, +}: { + user: { id: string; email: string }; + inviteId: string; +}) { return await prisma.$transaction(async (tx) => { // 1. Delete the invite and get the invite details const invite = await tx.orgMemberInvite.delete({ where: { id: inviteId, + email: user.email, }, include: { organization: { @@ -167,7 +183,7 @@ export async function acceptInvite({ userId, inviteId }: { userId: string; invit const member = await tx.orgMember.create({ data: { organizationId: invite.organizationId, - userId, + userId: user.id, role: invite.role, }, }); @@ -187,7 +203,7 @@ export async function acceptInvite({ userId, inviteId }: { userId: string; invit // 4. Check for other invites const remainingInvites = await tx.orgMemberInvite.findMany({ where: { - email: invite.email, + email: user.email, }, }); @@ -195,28 +211,29 @@ export async function acceptInvite({ userId, inviteId }: { userId: string; invit }); } -export async function declineInvite({ userId, inviteId }: { userId: string; inviteId: string }) { +export async function declineInvite({ + user, + inviteId, +}: { + user: { id: string; email: string }; + inviteId: string; +}) { return await prisma.$transaction(async (tx) => { //1. delete invite const declinedInvite = await prisma.orgMemberInvite.delete({ where: { id: inviteId, + email: user.email, }, include: { organization: true, }, }); - //2. get email - const user = await prisma.user.findUnique({ - where: { id: userId }, - select: { email: true }, - }); - - //3. check for other invites + //2. check for other invites const remainingInvites = await prisma.orgMemberInvite.findMany({ where: { - email: user!.email, + email: user.email, }, }); @@ -224,10 +241,11 @@ export async function declineInvite({ userId, inviteId }: { userId: string; invi }); } -export async function resendInvite({ inviteId }: { inviteId: string }) { +export async function resendInvite({ inviteId, userId }: { inviteId: string; userId: string }) { return await prisma.orgMemberInvite.update({ where: { id: inviteId, + inviterId: userId, }, data: { updatedAt: new Date(), @@ -241,26 +259,27 @@ export async function resendInvite({ inviteId }: { inviteId: string }) { export async function revokeInvite({ userId, - slug, + orgSlug, inviteId, }: { userId: string; - slug: string; + orgSlug: string; inviteId: string; }) { - const org = await prisma.organization.findFirst({ - where: { slug, members: { some: { userId } } }, - }); - - if (!org) { - throw new Error("User does not have access to this organization"); - } - const invite = await prisma.orgMemberInvite.delete({ + const invite = await prisma.orgMemberInvite.findFirst({ where: { id: inviteId, - organizationId: org.id, + organization: { + slug: orgSlug, + members: { + some: { + userId, + }, + }, + }, }, select: { + id: true, email: true, organization: true, }, @@ -270,5 +289,11 @@ export async function revokeInvite({ throw new Error("Invite not found"); } + await prisma.orgMemberInvite.delete({ + where: { + id: invite.id, + }, + }); + return { email: invite.email, organization: invite.organization }; } diff --git a/apps/webapp/app/models/message.server.ts b/apps/webapp/app/models/message.server.ts index b488a43044..f19995f61c 100644 --- a/apps/webapp/app/models/message.server.ts +++ b/apps/webapp/app/models/message.server.ts @@ -1,7 +1,8 @@ -import { json, Session } from "@remix-run/node"; -import { createCookieSessionStorage } from "@remix-run/node"; -import { redirect } from "remix-typedjson"; +import { json, createCookieSessionStorage, type Session } from "@remix-run/node"; +import { redirect, typedjson } from "remix-typedjson"; +import { ButtonVariant } from "~/components/primitives/Buttons"; import { env } from "~/env.server"; +import { type FeedbackType } from "~/routes/resources.feedback"; export type ToastMessage = { message: string; @@ -9,9 +10,26 @@ export type ToastMessage = { options: Required; }; +export type ToastMessageAction = { + label: string; + variant?: ButtonVariant; + action: + | { + type: "link"; + path: string; + } + | { + type: "help"; + feedbackType: FeedbackType; + }; +}; + export type ToastMessageOptions = { + title?: string; /** Ephemeral means it disappears after a delay, defaults to true */ ephemeral?: boolean; + /** This display a button and make it not ephemeral, unless ephemeral is explicitlyset to false */ + action?: ToastMessageAction; }; const ONE_YEAR = 1000 * 60 * 60 * 24 * 365; @@ -36,6 +54,7 @@ export function setSuccessMessage( message, type: "success", options: { + ...options, ephemeral: options?.ephemeral ?? true, }, } as ToastMessage); @@ -46,6 +65,7 @@ export function setErrorMessage(session: Session, message: string, options?: Toa message, type: "error", options: { + ...options, ephemeral: options?.ephemeral ?? true, }, } as ToastMessage); @@ -121,6 +141,44 @@ export async function jsonWithErrorMessage( }); } +export async function typedJsonWithSuccessMessage( + data: T, + request: Request, + message: string, + options?: ToastMessageOptions +) { + const session = await getSession(request.headers.get("cookie")); + + setSuccessMessage(session, message, options); + + return typedjson(data, { + headers: { + "Set-Cookie": await commitSession(session, { + expires: new Date(Date.now() + ONE_YEAR), + }), + }, + }); +} + +export async function typedJsonWithErrorMessage( + data: T, + request: Request, + message: string, + options?: ToastMessageOptions +) { + const session = await getSession(request.headers.get("cookie")); + + setErrorMessage(session, message, options); + + return typedjson(data, { + headers: { + "Set-Cookie": await commitSession(session, { + expires: new Date(Date.now() + ONE_YEAR), + }), + }, + }); +} + export async function redirectWithSuccessMessage( path: string, request: Request, diff --git a/apps/webapp/app/models/organization.server.ts b/apps/webapp/app/models/organization.server.ts index 9309e66179..66b1d5c5b2 100644 --- a/apps/webapp/app/models/organization.server.ts +++ b/apps/webapp/app/models/organization.server.ts @@ -12,7 +12,7 @@ import { prisma, type PrismaClientOrTransaction } from "~/db.server"; import { env } from "~/env.server"; import { featuresForUrl } from "~/features.server"; import { createApiKeyForEnv, createPkApiKeyForEnv, envSlug } from "./api-key.server"; - +import { getDefaultEnvironmentConcurrencyLimit } from "~/services/platform.v3.server"; export type { Organization }; const nanoid = customAlphabet("1234567890abcdef", 4); @@ -66,7 +66,7 @@ export async function createOrganization( role: "ADMIN", }, }, - v3Enabled: !features.isManagedCloud, + v3Enabled: true, }, include: { members: true, @@ -96,6 +96,8 @@ export async function createEnvironment({ const pkApiKey = createPkApiKeyForEnv(type); const shortcode = createShortcode().join("-"); + const limit = await getDefaultEnvironmentConcurrencyLimit(organization.id, type); + return await prismaClient.runtimeEnvironment.create({ data: { slug, @@ -103,7 +105,7 @@ export async function createEnvironment({ pkApiKey, shortcode, autoEnableInternalSources: type !== "DEVELOPMENT", - maximumConcurrencyLimit: organization.maximumConcurrencyLimit / 3, + maximumConcurrencyLimit: limit, organization: { connect: { id: organization.id, diff --git a/apps/webapp/app/models/project.server.ts b/apps/webapp/app/models/project.server.ts index 10a2f0c02a..736df96ba1 100644 --- a/apps/webapp/app/models/project.server.ts +++ b/apps/webapp/app/models/project.server.ts @@ -16,12 +16,26 @@ type Options = { version: "v2" | "v3"; }; +export class ExceededProjectLimitError extends Error { + constructor(message: string) { + super(message); + this.name = "ExceededProjectLimitError"; + } +} + export async function createProject( { organizationSlug, name, userId, version }: Options, attemptCount = 0 ): Promise { //check the user has permissions to do this const organization = await prisma.organization.findFirst({ + select: { + id: true, + slug: true, + v3Enabled: true, + maximumConcurrencyLimit: true, + maximumProjectCount: true, + }, where: { slug: organizationSlug, members: { some: { userId } }, @@ -40,6 +54,19 @@ export async function createProject( } } + const projectCount = await prisma.project.count({ + where: { + organizationId: organization.id, + deletedAt: null, + }, + }); + + if (projectCount >= organization.maximumProjectCount) { + throw new ExceededProjectLimitError( + `This organization has reached the maximum number of projects (${organization.maximumProjectCount}).` + ); + } + //ensure the slug is globally unique const uniqueProjectSlug = `${slug(name)}-${nanoid(4)}`; const projectWithSameSlug = await prisma.project.findFirst({ diff --git a/apps/webapp/app/models/runtimeEnvironment.server.ts b/apps/webapp/app/models/runtimeEnvironment.server.ts index adde2db5ca..67119acd08 100644 --- a/apps/webapp/app/models/runtimeEnvironment.server.ts +++ b/apps/webapp/app/models/runtimeEnvironment.server.ts @@ -37,7 +37,7 @@ export async function findEnvironmentByApiKey( if (environment.type === "PREVIEW") { if (!branchName) { - logger.error("findEnvironmentByApiKey(): Preview env with no branch name provided", { + logger.warn("findEnvironmentByApiKey(): Preview env with no branch name provided", { environmentId: environment.id, }); return null; diff --git a/apps/webapp/app/models/schedules.server.ts b/apps/webapp/app/models/schedules.server.ts new file mode 100644 index 0000000000..58e4d9a870 --- /dev/null +++ b/apps/webapp/app/models/schedules.server.ts @@ -0,0 +1,37 @@ +import { Prisma } from "~/db.server"; + +export function scheduleUniqWhereClause( + projectId: string, + scheduleId: string +): Prisma.TaskScheduleWhereUniqueInput { + if (scheduleId.startsWith("sched_")) { + return { + friendlyId: scheduleId, + projectId, + }; + } + + return { + projectId_deduplicationKey: { + projectId, + deduplicationKey: scheduleId, + }, + }; +} + +export function scheduleWhereClause( + projectId: string, + scheduleId: string +): Prisma.TaskScheduleWhereInput { + if (scheduleId.startsWith("sched_")) { + return { + friendlyId: scheduleId, + projectId, + }; + } + + return { + projectId, + deduplicationKey: scheduleId, + }; +} diff --git a/apps/webapp/app/models/taskRun.server.ts b/apps/webapp/app/models/taskRun.server.ts index cfd13a424b..1035c1b7ad 100644 --- a/apps/webapp/app/models/taskRun.server.ts +++ b/apps/webapp/app/models/taskRun.server.ts @@ -129,6 +129,7 @@ export function batchTaskRunItemStatusForRunStatus( case TaskRunStatus.WAITING_FOR_DEPLOY: case TaskRunStatus.WAITING_TO_RESUME: case TaskRunStatus.RETRYING_AFTER_FAILURE: + case TaskRunStatus.DEQUEUED: case TaskRunStatus.EXECUTING: case TaskRunStatus.PAUSED: case TaskRunStatus.DELAYED: diff --git a/apps/webapp/app/models/user.server.ts b/apps/webapp/app/models/user.server.ts index 8a381a8394..3c5fbe1688 100644 --- a/apps/webapp/app/models/user.server.ts +++ b/apps/webapp/app/models/user.server.ts @@ -1,5 +1,6 @@ import type { Prisma, User } from "@trigger.dev/database"; import type { GitHubProfile } from "remix-auth-github"; +import type { GoogleProfile } from "remix-auth-google"; import { prisma } from "~/db.server"; import { env } from "~/env.server"; import { @@ -8,6 +9,8 @@ import { } from "~/services/dashboardPreferences.server"; export type { User } from "@trigger.dev/database"; import { assertEmailAllowed } from "~/utils/email"; +import { logger } from "~/services/logger.server"; + type FindOrCreateMagicLink = { authenticationMethod: "MAGIC_LINK"; email: string; @@ -20,7 +23,14 @@ type FindOrCreateGithub = { authenticationExtraParams: Record; }; -type FindOrCreateUser = FindOrCreateMagicLink | FindOrCreateGithub; +type FindOrCreateGoogle = { + authenticationMethod: "GOOGLE"; + email: User["email"]; + authenticationProfile: GoogleProfile; + authenticationExtraParams: Record; +}; + +type FindOrCreateUser = FindOrCreateMagicLink | FindOrCreateGithub | FindOrCreateGoogle; type LoggedInUser = { user: User; @@ -35,6 +45,9 @@ export async function findOrCreateUser(input: FindOrCreateUser): Promise { + assertEmailAllowed(email); + + const name = authenticationProfile._json.name; + let avatarUrl: string | undefined = undefined; + if (authenticationProfile.photos[0]) { + avatarUrl = authenticationProfile.photos[0].value; + } + const displayName = authenticationProfile.displayName; + const authProfile = authenticationProfile + ? (authenticationProfile as unknown as Prisma.JsonObject) + : undefined; + const authExtraParams = authenticationExtraParams + ? (authenticationExtraParams as unknown as Prisma.JsonObject) + : undefined; + + const authIdentifier = `google:${authenticationProfile.id}`; + + const existingUser = await prisma.user.findUnique({ + where: { + authIdentifier, + }, + }); + + const existingEmailUser = await prisma.user.findUnique({ + where: { + email, + }, + }); + + if (existingEmailUser && !existingUser) { + // Link existing email account to Google auth, preserving original authenticationMethod + const user = await prisma.user.update({ + where: { + email, + }, + data: { + authenticationProfile: authProfile, + authenticationExtraParams: authExtraParams, + avatarUrl, + authIdentifier, + }, + }); + + return { + user, + isNewUser: false, + }; + } + + if (existingEmailUser && existingUser) { + // Check if email user and auth user are the same + if (existingEmailUser.id !== existingUser.id) { + // Different users: email is taken by one user, Google auth belongs to another + logger.error( + `Google auth conflict: Google ID ${authenticationProfile.id} belongs to user ${existingUser.id} but email ${email} is taken by user ${existingEmailUser.id}`, + { + email, + existingEmailUserId: existingEmailUser.id, + existingAuthUserId: existingUser.id, + authIdentifier, + } + ); + + return { + user: existingUser, + isNewUser: false, + }; + } + + // Same user: update all profile fields + const user = await prisma.user.update({ + where: { + id: existingUser.id, + }, + data: { + email, + displayName, + name, + avatarUrl, + authenticationProfile: authProfile, + authenticationExtraParams: authExtraParams, + }, + }); + + return { + user, + isNewUser: false, + }; + } + + // When the IDP user (Google) already exists, the "update" path will be taken and the email will be updated + // It's not possible that the email is already taken by a different user because that would have been handled + // by one of the if statements above. + const user = await prisma.user.upsert({ + where: { + authIdentifier, + }, + update: { + email, + displayName, + name, + avatarUrl, + authenticationProfile: authProfile, + authenticationExtraParams: authExtraParams, + }, + create: { + authenticationProfile: authProfile, + authenticationExtraParams: authExtraParams, + name, + avatarUrl, + displayName, + authIdentifier, + email, + authenticationMethod: "GOOGLE", + }, + }); + + return { + user, + isNewUser: !existingUser, + }; +} + export type UserWithDashboardPreferences = User & { dashboardPreferences: DashboardPreferences; }; diff --git a/apps/webapp/app/presenters/RunFilters.server.ts b/apps/webapp/app/presenters/RunFilters.server.ts new file mode 100644 index 0000000000..8d70b4d3bd --- /dev/null +++ b/apps/webapp/app/presenters/RunFilters.server.ts @@ -0,0 +1,57 @@ +import { + getRunFiltersFromSearchParams, + TaskRunListSearchFilters, +} from "~/components/runs/v3/RunFilters"; +import { getRootOnlyFilterPreference } from "~/services/preferences/uiPreferences.server"; +import { type ParsedRunFilters } from "~/services/runsRepository/runsRepository.server"; + +type FiltersFromRequest = ParsedRunFilters & Required>; + +export async function getRunFiltersFromRequest(request: Request): Promise { + const url = new URL(request.url); + let rootOnlyValue = false; + if (url.searchParams.has("rootOnly")) { + rootOnlyValue = url.searchParams.get("rootOnly") === "true"; + } else { + rootOnlyValue = await getRootOnlyFilterPreference(request); + } + + const s = getRunFiltersFromSearchParams(url.searchParams); + + const { + tasks, + versions, + statuses, + tags, + period, + bulkId, + from, + to, + cursor, + direction, + runId, + batchId, + scheduleId, + queues, + machines, + } = TaskRunListSearchFilters.parse(s); + + return { + tasks, + versions, + statuses, + tags, + period, + bulkId, + from, + to, + batchId, + runId, + scheduleId, + rootOnly: rootOnlyValue, + direction: direction, + cursor: cursor, + queues, + machines, + }; +} diff --git a/apps/webapp/app/presenters/v3/ApiRetrieveRunPresenter.server.ts b/apps/webapp/app/presenters/v3/ApiRetrieveRunPresenter.server.ts index 52c93bc6f0..671496586a 100644 --- a/apps/webapp/app/presenters/v3/ApiRetrieveRunPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/ApiRetrieveRunPresenter.server.ts @@ -1,6 +1,5 @@ import { AttemptStatus, - RetrieveRunResponse, RunStatus, SerializedError, TaskRunError, @@ -8,14 +7,16 @@ import { conditionallyImportPacket, createJsonErrorObject, logger, - parsePacket, } from "@trigger.dev/core/v3"; +import { parsePacketAsJson } from "@trigger.dev/core/v3/utils/ioSerialization"; import { Prisma, TaskRunAttemptStatus, TaskRunStatus } from "@trigger.dev/database"; import assertNever from "assert-never"; +import { API_VERSIONS, CURRENT_API_VERSION, RunStatusUnspecifiedApiVersion } from "~/api/versions"; +import { $replica, prisma } from "~/db.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { generatePresignedUrl } from "~/v3/r2.server"; -import { BasePresenter } from "./basePresenter.server"; -import { $replica, prisma } from "~/db.server"; +import { tracer } from "~/v3/tracer.server"; +import { startSpanWithEnv } from "~/v3/tracing.server"; // Build 'select' object const commonRunSelect = { @@ -63,7 +64,9 @@ type CommonRelatedRun = Prisma.Result< type FoundRun = NonNullable>>; -export class ApiRetrieveRunPresenter extends BasePresenter { +export class ApiRetrieveRunPresenter { + constructor(private readonly apiVersion: API_VERSIONS) {} + public static async findRun(friendlyId: string, env: AuthenticatedEnvironment) { return $replica.taskRun.findFirst({ where: { @@ -72,6 +75,7 @@ export class ApiRetrieveRunPresenter extends BasePresenter { }, select: { ...commonRunSelect, + traceId: true, payload: true, payloadType: true, output: true, @@ -98,11 +102,8 @@ export class ApiRetrieveRunPresenter extends BasePresenter { }); } - public async call( - taskRun: FoundRun, - env: AuthenticatedEnvironment - ): Promise { - return this.traceWithEnv("call", env, async (span) => { + public async call(taskRun: FoundRun, env: AuthenticatedEnvironment) { + return startSpanWithEnv(tracer, "ApiRetrieveRunPresenter.call", env, async () => { let $payload: any; let $payloadPresignedUrl: string | undefined; let $output: any; @@ -133,7 +134,7 @@ export class ApiRetrieveRunPresenter extends BasePresenter { }); } } else { - $payload = await parsePacket(payloadPacket); + $payload = await parsePacketAsJson(payloadPacket); } if (taskRun.status === "COMPLETED_SUCCESSFULLY") { @@ -162,12 +163,12 @@ export class ApiRetrieveRunPresenter extends BasePresenter { }); } } else { - $output = await parsePacket(outputPacket); + $output = await parsePacketAsJson(outputPacket); } } return { - ...(await createCommonRunStructure(taskRun)), + ...(await createCommonRunStructure(taskRun, this.apiVersion)), payload: $payload, payloadPresignedUrl: $payloadPresignedUrl, output: $output, @@ -180,13 +181,13 @@ export class ApiRetrieveRunPresenter extends BasePresenter { attempts: [], relatedRuns: { root: taskRun.rootTaskRun - ? await createCommonRunStructure(taskRun.rootTaskRun) + ? await createCommonRunStructure(taskRun.rootTaskRun, this.apiVersion) : undefined, parent: taskRun.parentTaskRun - ? await createCommonRunStructure(taskRun.parentTaskRun) + ? await createCommonRunStructure(taskRun.parentTaskRun, this.apiVersion) : undefined, children: await Promise.all( - taskRun.childRuns.map(async (r) => await createCommonRunStructure(r)) + taskRun.childRuns.map(async (r) => await createCommonRunStructure(r, this.apiVersion)) ), }, }; @@ -205,7 +206,7 @@ export class ApiRetrieveRunPresenter extends BasePresenter { } } - static isStatusFinished(status: RunStatus) { + static isStatusFinished(status: RunStatus | RunStatusUnspecifiedApiVersion) { return ( status === "COMPLETED" || status === "FAILED" || @@ -216,7 +217,21 @@ export class ApiRetrieveRunPresenter extends BasePresenter { ); } - static apiStatusFromRunStatus(status: TaskRunStatus): RunStatus { + static apiStatusFromRunStatus( + status: TaskRunStatus, + apiVersion: API_VERSIONS + ): RunStatus | RunStatusUnspecifiedApiVersion { + switch (apiVersion) { + case CURRENT_API_VERSION: { + return this.apiStatusFromRunStatusV2(status); + } + default: { + return this.apiStatusFromRunStatusV1(status); + } + } + } + + static apiStatusFromRunStatusV1(status: TaskRunStatus): RunStatusUnspecifiedApiVersion { switch (status) { case "DELAYED": { return "DELAYED"; @@ -237,6 +252,7 @@ export class ApiRetrieveRunPresenter extends BasePresenter { case "RETRYING_AFTER_FAILURE": { return "REATTEMPTING"; } + case "DEQUEUED": case "EXECUTING": { return "EXECUTING"; } @@ -270,19 +286,76 @@ export class ApiRetrieveRunPresenter extends BasePresenter { } } - static apiBooleanHelpersFromTaskRunStatus(status: TaskRunStatus) { + static apiStatusFromRunStatusV2(status: TaskRunStatus): RunStatus { + switch (status) { + case "DELAYED": { + return "DELAYED"; + } + case "PENDING_VERSION": { + return "PENDING_VERSION"; + } + case "WAITING_FOR_DEPLOY": { + return "PENDING_VERSION"; + } + case "PENDING": { + return "QUEUED"; + } + case "PAUSED": + case "WAITING_TO_RESUME": { + return "WAITING"; + } + case "DEQUEUED": { + return "DEQUEUED"; + } + case "RETRYING_AFTER_FAILURE": + case "EXECUTING": { + return "EXECUTING"; + } + case "CANCELED": { + return "CANCELED"; + } + case "COMPLETED_SUCCESSFULLY": { + return "COMPLETED"; + } + case "SYSTEM_FAILURE": { + return "SYSTEM_FAILURE"; + } + case "CRASHED": { + return "CRASHED"; + } + case "INTERRUPTED": + case "COMPLETED_WITH_ERRORS": { + return "FAILED"; + } + case "EXPIRED": { + return "EXPIRED"; + } + case "TIMED_OUT": { + return "TIMED_OUT"; + } + default: { + assertNever(status); + } + } + } + + static apiBooleanHelpersFromTaskRunStatus(status: TaskRunStatus, apiVersion: API_VERSIONS) { return ApiRetrieveRunPresenter.apiBooleanHelpersFromRunStatus( - ApiRetrieveRunPresenter.apiStatusFromRunStatus(status) + ApiRetrieveRunPresenter.apiStatusFromRunStatus(status, apiVersion) ); } - static apiBooleanHelpersFromRunStatus(status: RunStatus) { + static apiBooleanHelpersFromRunStatus(status: RunStatus | RunStatusUnspecifiedApiVersion) { const isQueued = status === "QUEUED" || status === "WAITING_FOR_DEPLOY" || status === "DELAYED" || status === "PENDING_VERSION"; - const isExecuting = status === "EXECUTING" || status === "REATTEMPTING" || status === "FROZEN"; + const isExecuting = + status === "EXECUTING" || + status === "REATTEMPTING" || + status === "FROZEN" || + status === "DEQUEUED"; const isCompleted = status === "COMPLETED" || status === "CANCELED" || @@ -293,6 +366,7 @@ export class ApiRetrieveRunPresenter extends BasePresenter { const isFailed = isCompleted && status !== "COMPLETED"; const isSuccess = isCompleted && status === "COMPLETED"; const isCancelled = status === "CANCELED"; + const isWaiting = status === "WAITING"; return { isQueued, @@ -301,6 +375,7 @@ export class ApiRetrieveRunPresenter extends BasePresenter { isFailed, isSuccess, isCancelled, + isWaiting, }; } @@ -358,8 +433,8 @@ async function resolveSchedule(run: CommonRelatedRun) { }; } -async function createCommonRunStructure(run: CommonRelatedRun) { - const metadata = await parsePacket({ +async function createCommonRunStructure(run: CommonRelatedRun, apiVersion: API_VERSIONS) { + const metadata = await parsePacketAsJson({ data: run.metadata ?? undefined, dataType: run.metadataType, }); @@ -369,7 +444,7 @@ async function createCommonRunStructure(run: CommonRelatedRun) { taskIdentifier: run.taskIdentifier, idempotencyKey: run.idempotencyKey ?? undefined, version: run.lockedToVersion?.version, - status: ApiRetrieveRunPresenter.apiStatusFromRunStatus(run.status), + status: ApiRetrieveRunPresenter.apiStatusFromRunStatus(run.status, apiVersion), createdAt: run.createdAt, startedAt: run.startedAt ?? undefined, updatedAt: run.updatedAt, @@ -385,7 +460,7 @@ async function createCommonRunStructure(run: CommonRelatedRun) { tags: run.tags .map((t: { name: string }) => t.name) .sort((a: string, b: string) => a.localeCompare(b)), - ...ApiRetrieveRunPresenter.apiBooleanHelpersFromTaskRunStatus(run.status), + ...ApiRetrieveRunPresenter.apiBooleanHelpersFromTaskRunStatus(run.status, apiVersion), triggerFunction: resolveTriggerFunction(run), batchId: run.batch?.friendlyId, metadata, diff --git a/apps/webapp/app/presenters/v3/ApiRunListPresenter.server.ts b/apps/webapp/app/presenters/v3/ApiRunListPresenter.server.ts index 26c992d45c..254ec18d1c 100644 --- a/apps/webapp/app/presenters/v3/ApiRunListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/ApiRunListPresenter.server.ts @@ -1,18 +1,21 @@ import { type ListRunResponse, type ListRunResponseItem, + MachinePresetName, parsePacket, RunStatus, } from "@trigger.dev/core/v3"; import { type Project, type RuntimeEnvironment, type TaskRunStatus } from "@trigger.dev/database"; import assertNever from "assert-never"; import { z } from "zod"; +import { API_VERSIONS, RunStatusUnspecifiedApiVersion } from "~/api/versions"; +import { clickhouseClient } from "~/services/clickhouseInstance.server"; import { logger } from "~/services/logger.server"; import { CoercedDate } from "~/utils/zod"; +import { ServiceValidationError } from "~/v3/services/baseService.server"; import { ApiRetrieveRunPresenter } from "./ApiRetrieveRunPresenter.server"; -import { type RunListOptions, RunListPresenter } from "./RunListPresenter.server"; +import { NextRunListPresenter, type RunListOptions } from "./NextRunListPresenter.server"; import { BasePresenter } from "./basePresenter.server"; -import { ServiceValidationError } from "~/v3/services/baseService.server"; export const ApiRunListSearchParams = z.object({ "page[size]": z.coerce.number().int().positive().min(1).max(100).optional(), @@ -27,7 +30,9 @@ export const ApiRunListSearchParams = z.object({ } const statuses = value.split(","); - const parsedStatuses = statuses.map((status) => RunStatus.safeParse(status)); + const parsedStatuses = statuses.map((status) => + RunStatus.or(RunStatusUnspecifiedApiVersion).safeParse(status) + ); if (parsedStatuses.some((result) => !result.success)) { const invalidStatuses: string[] = []; @@ -105,6 +110,39 @@ export const ApiRunListSearchParams = z.object({ "filter[createdAt][to]": CoercedDate, "filter[createdAt][period]": z.string().optional(), "filter[batch]": z.string().optional(), + "filter[queue]": z + .string() + .optional() + .transform((value) => { + return value ? value.split(",") : undefined; + }), + "filter[machine]": z + .string() + .optional() + .transform((value, ctx) => { + const values = value ? value.split(",") : undefined; + if (!values) { + return undefined; + } + + const parsedValues = values.map((v) => MachinePresetName.safeParse(v)); + const invalidValues: string[] = []; + parsedValues.forEach((result, index) => { + if (!result.success) { + invalidValues.push(values[index]); + } + }); + if (invalidValues.length > 0) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: `Invalid machine values: ${invalidValues.join(", ")}`, + }); + + return z.NEVER; + } + + return parsedValues.map((result) => result.data).filter(Boolean); + }), }); type ApiRunListSearchParams = z.infer; @@ -113,8 +151,9 @@ export class ApiRunListPresenter extends BasePresenter { public async call( project: Project, searchParams: ApiRunListSearchParams, + apiVersion: API_VERSIONS, environment?: RuntimeEnvironment - ): Promise { + ) { return this.trace("call", async (span) => { const options: RunListOptions = { projectId: project.id, @@ -136,13 +175,15 @@ export class ApiRunListPresenter extends BasePresenter { } let environmentId: string | undefined; + let organizationId: string | undefined; // filters if (environment) { environmentId = environment.id; + organizationId = environment.organizationId; } else { if (searchParams["filter[env]"]) { - const environments = await this._prisma.runtimeEnvironment.findMany({ + const environments = await this._replica.runtimeEnvironment.findMany({ where: { projectId: project.id, slug: { @@ -152,6 +193,7 @@ export class ApiRunListPresenter extends BasePresenter { }); environmentId = environments.at(0)?.id; + organizationId = environments.at(0)?.organizationId; } } @@ -159,6 +201,10 @@ export class ApiRunListPresenter extends BasePresenter { throw new ServiceValidationError("No environment found"); } + if (!organizationId) { + throw new ServiceValidationError("No organization found"); + } + if (searchParams["filter[status]"]) { options.statuses = searchParams["filter[status]"].flatMap((status) => ApiRunListPresenter.apiStatusToRunStatuses(status) @@ -205,15 +251,23 @@ export class ApiRunListPresenter extends BasePresenter { options.batchId = searchParams["filter[batch]"]; } - const presenter = new RunListPresenter(); + if (searchParams["filter[queue]"]) { + options.queues = searchParams["filter[queue]"]; + } + + if (searchParams["filter[machine]"]) { + options.machines = searchParams["filter[machine]"]; + } + + const presenter = new NextRunListPresenter(this._replica, clickhouseClient); logger.debug("Calling RunListPresenter", { options }); - const results = await presenter.call(environmentId, options); + const results = await presenter.call(organizationId, environmentId, options); logger.debug("RunListPresenter results", { runs: results.runs.length }); - const data: ListRunResponseItem[] = await Promise.all( + const data = await Promise.all( results.runs.map(async (run) => { const metadata = await parsePacket( { @@ -227,7 +281,7 @@ export class ApiRunListPresenter extends BasePresenter { return { id: run.friendlyId, - status: ApiRetrieveRunPresenter.apiStatusFromRunStatus(run.status), + status: ApiRetrieveRunPresenter.apiStatusFromRunStatus(run.status, apiVersion), taskIdentifier: run.taskIdentifier, idempotencyKey: run.idempotencyKey, version: run.version ?? undefined, @@ -251,7 +305,7 @@ export class ApiRunListPresenter extends BasePresenter { depth: run.depth, metadata, ...ApiRetrieveRunPresenter.apiBooleanHelpersFromRunStatus( - ApiRetrieveRunPresenter.apiStatusFromRunStatus(run.status) + ApiRetrieveRunPresenter.apiStatusFromRunStatus(run.status, apiVersion) ), }; }) @@ -267,7 +321,9 @@ export class ApiRunListPresenter extends BasePresenter { }); } - static apiStatusToRunStatuses(status: RunStatus): TaskRunStatus[] | TaskRunStatus { + static apiStatusToRunStatuses( + status: RunStatus | RunStatusUnspecifiedApiVersion + ): TaskRunStatus[] | TaskRunStatus { switch (status) { case "DELAYED": return "DELAYED"; @@ -313,6 +369,12 @@ export class ApiRunListPresenter extends BasePresenter { case "TIMED_OUT": { return "TIMED_OUT"; } + case "DEQUEUED": { + return "DEQUEUED"; + } + case "WAITING": { + return "WAITING_TO_RESUME"; + } default: { assertNever(status); } diff --git a/apps/webapp/app/presenters/v3/BatchListPresenter.server.ts b/apps/webapp/app/presenters/v3/BatchListPresenter.server.ts index c223b2de80..83de5f36d1 100644 --- a/apps/webapp/app/presenters/v3/BatchListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/BatchListPresenter.server.ts @@ -9,10 +9,10 @@ import { timeFilters } from "~/components/runs/v3/SharedFilters"; export type BatchListOptions = { userId?: string; projectId: string; + environmentId: string; //filters friendlyId?: string; statuses?: BatchTaskRunStatus[]; - environments?: string[]; period?: string; from?: number; to?: number; @@ -34,7 +34,7 @@ export class BatchListPresenter extends BasePresenter { projectId, friendlyId, statuses, - environments, + environmentId, period, from, to, @@ -81,16 +81,6 @@ export class BatchListPresenter extends BasePresenter { }, }); - let environmentIds = project.environments.map((e) => e.id); - if (environments && environments.length > 0) { - //if environments are passed in, we only include them if they're in the project - environmentIds = environments.filter((e) => project.environments.some((pe) => pe.id === e)); - } - - if (environmentIds.length === 0) { - throw new Error("No matching environments found for the project"); - } - const periodMs = time.period ? parse(time.period) : undefined; //get the batches @@ -120,8 +110,8 @@ export class BatchListPresenter extends BasePresenter { FROM ${sqlDatabaseSchema}."BatchTaskRun" b WHERE - -- environments - b."runtimeEnvironmentId" IN (${Prisma.join(environmentIds)}) + -- environment + b."runtimeEnvironmentId" = ${environmentId} -- cursor ${ cursor @@ -186,9 +176,7 @@ WHERE if (!hasAnyBatches) { const firstBatch = await this._replica.batchTaskRun.findFirst({ where: { - runtimeEnvironmentId: { - in: environmentIds, - }, + runtimeEnvironmentId: environmentId, }, }); @@ -207,7 +195,7 @@ WHERE throw new Error(`Environment not found for Batch ${batch.id}`); } - const hasFinished = batch.status !== "PENDING"; + const hasFinished = batch.status !== "PENDING" && batch.status !== "PROCESSING"; return { id: batch.id, @@ -233,7 +221,6 @@ WHERE filters: { friendlyId, statuses: statuses || [], - environments: environments || [], }, hasFilters, hasAnyBatches, diff --git a/apps/webapp/app/presenters/v3/BatchPresenter.server.ts b/apps/webapp/app/presenters/v3/BatchPresenter.server.ts new file mode 100644 index 0000000000..bf4298508b --- /dev/null +++ b/apps/webapp/app/presenters/v3/BatchPresenter.server.ts @@ -0,0 +1,122 @@ +import { type BatchTaskRunStatus } from "@trigger.dev/database"; +import { displayableEnvironment } from "~/models/runtimeEnvironment.server"; +import { engine } from "~/v3/runEngine.server"; +import { BasePresenter } from "./basePresenter.server"; + +type BatchPresenterOptions = { + environmentId: string; + batchId: string; + userId?: string; +}; + +export type BatchPresenterData = Awaited>; + +export class BatchPresenter extends BasePresenter { + public async call({ environmentId, batchId, userId }: BatchPresenterOptions) { + const batch = await this._replica.batchTaskRun.findFirst({ + select: { + id: true, + friendlyId: true, + status: true, + runCount: true, + batchVersion: true, + createdAt: true, + updatedAt: true, + completedAt: true, + processingStartedAt: true, + processingCompletedAt: true, + successfulRunCount: true, + failedRunCount: true, + idempotencyKey: true, + runtimeEnvironment: { + select: { + id: true, + type: true, + slug: true, + orgMember: { + select: { + user: { + select: { + id: true, + name: true, + displayName: true, + }, + }, + }, + }, + }, + }, + errors: { + select: { + id: true, + index: true, + taskIdentifier: true, + error: true, + errorCode: true, + createdAt: true, + }, + orderBy: { + index: "asc", + }, + }, + }, + where: { + runtimeEnvironmentId: environmentId, + friendlyId: batchId, + }, + }); + + if (!batch) { + throw new Error("Batch not found"); + } + + const hasFinished = batch.status !== "PENDING" && batch.status !== "PROCESSING"; + const isV2 = batch.batchVersion === "runengine:v2"; + + // For v2 batches in PROCESSING state, get live progress from Redis + // This provides real-time updates without waiting for the batch to complete + let liveSuccessCount = batch.successfulRunCount ?? 0; + let liveFailureCount = batch.failedRunCount ?? 0; + + if (isV2 && batch.status === "PROCESSING") { + const liveProgress = await engine.getBatchQueueProgress(batch.id); + if (liveProgress) { + liveSuccessCount = liveProgress.successCount; + liveFailureCount = liveProgress.failureCount; + } + } + + return { + id: batch.id, + friendlyId: batch.friendlyId, + status: batch.status as BatchTaskRunStatus, + runCount: batch.runCount, + batchVersion: batch.batchVersion, + isV2, + createdAt: batch.createdAt.toISOString(), + updatedAt: batch.updatedAt.toISOString(), + completedAt: batch.completedAt?.toISOString(), + processingStartedAt: batch.processingStartedAt?.toISOString(), + processingCompletedAt: batch.processingCompletedAt?.toISOString(), + finishedAt: batch.completedAt + ? batch.completedAt.toISOString() + : hasFinished + ? batch.updatedAt.toISOString() + : undefined, + hasFinished, + successfulRunCount: liveSuccessCount, + failedRunCount: liveFailureCount, + idempotencyKey: batch.idempotencyKey, + environment: displayableEnvironment(batch.runtimeEnvironment, userId), + errors: batch.errors.map((error) => ({ + id: error.id, + index: error.index, + taskIdentifier: error.taskIdentifier, + error: error.error, + errorCode: error.errorCode, + createdAt: error.createdAt.toISOString(), + })), + }; + } +} + diff --git a/apps/webapp/app/presenters/v3/BranchesPresenter.server.ts b/apps/webapp/app/presenters/v3/BranchesPresenter.server.ts index 9951fe4b40..4aafc1844b 100644 --- a/apps/webapp/app/presenters/v3/BranchesPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/BranchesPresenter.server.ts @@ -1,6 +1,6 @@ import { GitMeta } from "@trigger.dev/core/v3"; import { type z } from "zod"; -import { Prisma, type PrismaClient, prisma } from "~/db.server"; +import { type Prisma, type PrismaClient, prisma } from "~/db.server"; import { type Project } from "~/models/project.server"; import { type User } from "~/models/user.server"; import { type BranchesOptions } from "~/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.branches/route"; @@ -38,6 +38,13 @@ export type GitMetaLinks = { commitMessage: string; /** The commit author */ commitAuthor: string; + + /** The git provider, e.g., `github` */ + provider?: string; + + source?: "trigger_github_app" | "github_actions" | "local"; + ghUsername?: string; + ghUserAvatarUrl?: string; }; export class BranchesPresenter { @@ -239,5 +246,9 @@ export function processGitMetadata(data: Prisma.JsonValue): GitMetaLinks | null isDirty: parsed.data.dirty ?? false, commitMessage: parsed.data.commitMessage ?? "", commitAuthor: parsed.data.commitAuthorName ?? "", + provider: parsed.data.provider, + source: parsed.data.source, + ghUsername: parsed.data.ghUsername, + ghUserAvatarUrl: parsed.data.ghUserAvatarUrl, }; } diff --git a/apps/webapp/app/presenters/v3/BulkActionListPresenter.server.ts b/apps/webapp/app/presenters/v3/BulkActionListPresenter.server.ts new file mode 100644 index 0000000000..a4c6ef7b62 --- /dev/null +++ b/apps/webapp/app/presenters/v3/BulkActionListPresenter.server.ts @@ -0,0 +1,62 @@ +import { getUsername } from "~/utils/username"; +import { BasePresenter } from "./basePresenter.server"; + +type BulkActionListOptions = { + environmentId: string; + page?: number; +}; + +const DEFAULT_PAGE_SIZE = 25; + +export type BulkActionListItem = Awaited< + ReturnType +>["bulkActions"][number]; + +export class BulkActionListPresenter extends BasePresenter { + public async call({ environmentId, page }: BulkActionListOptions) { + const totalCount = await this._replica.bulkActionGroup.count({ + where: { + environmentId, + }, + }); + + const bulkActions = await this._replica.bulkActionGroup.findMany({ + select: { + friendlyId: true, + name: true, + status: true, + type: true, + createdAt: true, + completedAt: true, + totalCount: true, + user: { + select: { + name: true, + displayName: true, + avatarUrl: true, + }, + }, + }, + where: { + environmentId, + }, + orderBy: { + createdAt: "desc", + }, + skip: ((page ?? 1) - 1) * DEFAULT_PAGE_SIZE, + take: DEFAULT_PAGE_SIZE, + }); + + return { + currentPage: page ?? 1, + totalPages: Math.ceil(totalCount / DEFAULT_PAGE_SIZE), + totalCount: totalCount, + bulkActions: bulkActions.map((bulkAction) => ({ + ...bulkAction, + user: bulkAction.user + ? { name: getUsername(bulkAction.user), avatarUrl: bulkAction.user.avatarUrl } + : undefined, + })), + }; + } +} diff --git a/apps/webapp/app/presenters/v3/BulkActionPresenter.server.ts b/apps/webapp/app/presenters/v3/BulkActionPresenter.server.ts new file mode 100644 index 0000000000..f98d0819cb --- /dev/null +++ b/apps/webapp/app/presenters/v3/BulkActionPresenter.server.ts @@ -0,0 +1,73 @@ +import { getUsername } from "~/utils/username"; +import { BasePresenter } from "./basePresenter.server"; +import { type BulkActionMode } from "~/components/BulkActionFilterSummary"; +import { parseRunListInputOptions } from "~/services/runsRepository/runsRepository.server"; +import { TaskRunListSearchFilters } from "~/components/runs/v3/RunFilters"; + +type BulkActionOptions = { + environmentId: string; + bulkActionId: string; +}; + +export class BulkActionPresenter extends BasePresenter { + public async call({ environmentId, bulkActionId }: BulkActionOptions) { + const bulkAction = await this._replica.bulkActionGroup.findFirst({ + select: { + friendlyId: true, + name: true, + status: true, + type: true, + createdAt: true, + completedAt: true, + totalCount: true, + successCount: true, + failureCount: true, + user: { + select: { + name: true, + displayName: true, + avatarUrl: true, + }, + }, + params: true, + project: { + select: { + id: true, + organizationId: true, + }, + }, + }, + where: { + environmentId, + friendlyId: bulkActionId, + }, + }); + + if (!bulkAction) { + throw new Error("Bulk action not found"); + } + + //parse filters + const filtersParsed = TaskRunListSearchFilters.safeParse( + bulkAction.params && typeof bulkAction.params === "object" ? bulkAction.params : {} + ); + + let mode: BulkActionMode = "filter"; + if ( + filtersParsed.success && + Object.keys(filtersParsed.data).length === 1 && + filtersParsed.data.runId?.length + ) { + mode = "selected"; + } + + return { + ...bulkAction, + user: bulkAction.user + ? { name: getUsername(bulkAction.user), avatarUrl: bulkAction.user.avatarUrl } + : undefined, + filters: filtersParsed.data ?? {}, + mode, + }; + } +} diff --git a/apps/webapp/app/presenters/v3/CreateBulkActionPresenter.server.ts b/apps/webapp/app/presenters/v3/CreateBulkActionPresenter.server.ts new file mode 100644 index 0000000000..acf511f0f5 --- /dev/null +++ b/apps/webapp/app/presenters/v3/CreateBulkActionPresenter.server.ts @@ -0,0 +1,46 @@ +import { type PrismaClient } from "@trigger.dev/database"; +import { CreateBulkActionSearchParams } from "~/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.bulkaction"; +import { clickhouseClient } from "~/services/clickhouseInstance.server"; +import { RunsRepository } from "~/services/runsRepository/runsRepository.server"; +import { getRunFiltersFromRequest } from "../RunFilters.server"; +import { BasePresenter } from "./basePresenter.server"; + +type CreateBulkActionOptions = { + organizationId: string; + projectId: string; + environmentId: string; + request: Request; +}; + +export class CreateBulkActionPresenter extends BasePresenter { + public async call({ + organizationId, + projectId, + environmentId, + request, + }: CreateBulkActionOptions) { + const filters = await getRunFiltersFromRequest(request); + const { mode, action } = CreateBulkActionSearchParams.parse( + Object.fromEntries(new URL(request.url).searchParams) + ); + + const runsRepository = new RunsRepository({ + clickhouse: clickhouseClient, + prisma: this._replica as PrismaClient, + }); + + const count = await runsRepository.countRuns({ + organizationId, + projectId, + environmentId, + ...filters, + }); + + return { + filters, + mode, + action, + count, + }; + } +} diff --git a/apps/webapp/app/presenters/v3/DeploymentListPresenter.server.ts b/apps/webapp/app/presenters/v3/DeploymentListPresenter.server.ts index c63bb3f5b0..0b920e2942 100644 --- a/apps/webapp/app/presenters/v3/DeploymentListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/DeploymentListPresenter.server.ts @@ -1,5 +1,5 @@ import { - Prisma, + type Prisma, type WorkerDeploymentStatus, type WorkerInstanceGroupType, } from "@trigger.dev/database"; @@ -9,6 +9,7 @@ import { type Project } from "~/models/project.server"; import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; import { type User } from "~/models/user.server"; import { processGitMetadata } from "./BranchesPresenter.server"; +import { BranchTrackingConfigSchema, getTrackedBranchForEnvironment } from "~/v3/github"; const pageSize = 20; @@ -56,6 +57,18 @@ export class DeploymentListPresenter { }, }, }, + connectedGithubRepository: { + select: { + branchTracking: true, + previewDeploymentsEnabled: true, + repository: { + select: { + htmlUrl: true, + fullName: true, + }, + }, + }, + }, }, where: { slug: projectSlug, @@ -97,6 +110,8 @@ export class DeploymentListPresenter { id: string; shortCode: string; version: string; + runtime: string | null; + runtimeVersion: string | null; status: WorkerDeploymentStatus; environmentId: string; builtAt: Date | null; @@ -114,6 +129,8 @@ export class DeploymentListPresenter { wd."id", wd."shortCode", wd."version", + wd."runtime", + wd."runtimeVersion", (SELECT COUNT(*) FROM ${sqlDatabaseSchema}."BackgroundWorkerTask" WHERE "BackgroundWorkerTask"."workerId" = wd."workerId") AS "tasksCount", wd."environmentId", wd."status", @@ -127,7 +144,7 @@ export class DeploymentListPresenter { wd."git" FROM ${sqlDatabaseSchema}."WorkerDeployment" as wd -INNER JOIN +LEFT JOIN ${sqlDatabaseSchema}."User" as u ON wd."triggeredById" = u."id" WHERE wd."projectId" = ${project.id} @@ -136,9 +153,28 @@ ORDER BY string_to_array(wd."version", '.')::int[] DESC LIMIT ${pageSize} OFFSET ${pageSize * (page - 1)};`; + const { connectedGithubRepository } = project; + + const branchTrackingOrError = + connectedGithubRepository && + BranchTrackingConfigSchema.safeParse(connectedGithubRepository.branchTracking); + const environmentGitHubBranch = + branchTrackingOrError && branchTrackingOrError.success + ? getTrackedBranchForEnvironment( + branchTrackingOrError.data, + connectedGithubRepository.previewDeploymentsEnabled, + { + type: environment.type, + branchName: environment.branchName ?? undefined, + } + ) + : undefined; + return { currentPage: page, totalPages: Math.ceil(totalCount / pageSize), + connectedGithubRepository: project.connectedGithubRepository ?? undefined, + environmentGitHubBranch, deployments: deployments.map((deployment, index) => { const label = labeledDeployments.find( (labeledDeployment) => labeledDeployment.deploymentId === deployment.id @@ -148,6 +184,8 @@ LIMIT ${pageSize} OFFSET ${pageSize * (page - 1)};`; id: deployment.id, shortCode: deployment.shortCode, version: deployment.version, + runtime: deployment.runtime, + runtimeVersion: deployment.runtimeVersion, status: deployment.status, builtAt: deployment.builtAt, deployedAt: deployment.deployedAt, diff --git a/apps/webapp/app/presenters/v3/DeploymentPresenter.server.ts b/apps/webapp/app/presenters/v3/DeploymentPresenter.server.ts index e62a68f844..ea59c65722 100644 --- a/apps/webapp/app/presenters/v3/DeploymentPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/DeploymentPresenter.server.ts @@ -1,9 +1,10 @@ import { + BuildServerMetadata, DeploymentErrorData, ExternalBuildData, prepareDeploymentError, } from "@trigger.dev/core/v3"; -import { RuntimeEnvironment, type WorkerDeployment } from "@trigger.dev/database"; +import { type RuntimeEnvironment, type WorkerDeployment } from "@trigger.dev/database"; import { type PrismaClient, prisma } from "~/db.server"; import { type Organization } from "~/models/organization.server"; import { type Project } from "~/models/project.server"; @@ -11,6 +12,24 @@ import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; import { type User } from "~/models/user.server"; import { getUsername } from "~/utils/username"; import { processGitMetadata } from "./BranchesPresenter.server"; +import { S2 } from "@s2-dev/streamstore"; +import { env } from "~/env.server"; +import { createRedisClient } from "~/redis.server"; +import { tryCatch } from "@trigger.dev/core"; +import { logger } from "~/services/logger.server"; + +const S2_TOKEN_KEY_PREFIX = "s2-token:project:"; + +const s2TokenRedis = createRedisClient("s2-token-cache", { + host: env.CACHE_REDIS_HOST, + port: env.CACHE_REDIS_PORT, + username: env.CACHE_REDIS_USERNAME, + password: env.CACHE_REDIS_PASSWORD, + tlsDisabled: env.CACHE_REDIS_TLS_DISABLED === "true", + clusterMode: env.CACHE_REDIS_CLUSTER_MODE_ENABLED === "1", +}); + +const s2 = env.S2_ENABLED === "1" ? new S2({ accessToken: env.S2_ACCESS_TOKEN }) : undefined; export type ErrorData = { name: string; @@ -43,6 +62,7 @@ export class DeploymentPresenter { select: { id: true, organizationId: true, + externalRef: true, }, where: { slug: projectSlug, @@ -72,6 +92,8 @@ export class DeploymentPresenter { id: true, shortCode: true, version: true, + runtime: true, + runtimeVersion: true, errorData: true, imageReference: true, imagePlatform: true, @@ -100,6 +122,10 @@ export class DeploymentPresenter { builtAt: true, deployedAt: true, createdAt: true, + startedAt: true, + installedAt: true, + canceledAt: true, + canceledReason: true, git: true, promotions: { select: { @@ -129,22 +155,52 @@ export class DeploymentPresenter { avatarUrl: true, }, }, + buildServerMetadata: true, }, }); + const gitMetadata = processGitMetadata(deployment.git); const externalBuildData = deployment.externalBuildData ? ExternalBuildData.safeParse(deployment.externalBuildData) : undefined; + const buildServerMetadata = deployment.buildServerMetadata + ? BuildServerMetadata.safeParse(deployment.buildServerMetadata) + : undefined; + + let eventStream = undefined; + if ( + env.S2_ENABLED === "1" && + (buildServerMetadata || gitMetadata?.source === "trigger_github_app") + ) { + const [error, accessToken] = await tryCatch(this.getS2AccessToken(project.externalRef)); + + if (error) { + logger.error("Failed getting S2 access token", { error }); + } else { + eventStream = { + s2: { + basin: env.S2_DEPLOYMENT_LOGS_BASIN_NAME, + stream: `projects/${project.externalRef}/deployments/${deployment.shortCode}`, + accessToken, + }, + }; + } + } return { + eventStream, deployment: { id: deployment.id, shortCode: deployment.shortCode, version: deployment.version, status: deployment.status, createdAt: deployment.createdAt, + startedAt: deployment.startedAt, + installedAt: deployment.installedAt, builtAt: deployment.builtAt, deployedAt: deployment.deployedAt, + canceledAt: deployment.canceledAt, + canceledReason: deployment.canceledReason, tasks: deployment.worker?.tasks, label: deployment.promotions?.[0]?.label, environment: { @@ -157,6 +213,8 @@ export class DeploymentPresenter { deployedBy: deployment.triggeredBy, sdkVersion: deployment.worker?.sdkVersion, cliVersion: deployment.worker?.cliVersion, + runtime: deployment.runtime, + runtimeVersion: deployment.runtimeVersion, imageReference: deployment.imageReference, imagePlatform: deployment.imagePlatform, externalBuildData: @@ -166,11 +224,46 @@ export class DeploymentPresenter { errorData: DeploymentPresenter.prepareErrorData(deployment.errorData), isBuilt: !!deployment.builtAt, type: deployment.type, - git: processGitMetadata(deployment.git), + git: gitMetadata, }, }; } + private async getS2AccessToken(projectRef: string): Promise { + if (env.S2_ENABLED !== "1" || !s2) { + throw new Error("Failed getting S2 access token: S2 is not enabled"); + } + + const redisKey = `${S2_TOKEN_KEY_PREFIX}${projectRef}`; + const cachedToken = await s2TokenRedis.get(redisKey); + + if (cachedToken) { + return cachedToken; + } + + const { access_token: accessToken } = await s2.accessTokens.issue({ + id: `${projectRef}-${new Date().getTime()}`, + expires_at: new Date(Date.now() + 60 * 60 * 1000).toISOString(), // 1 hour + scope: { + ops: ["read"], + basins: { + exact: env.S2_DEPLOYMENT_LOGS_BASIN_NAME, + }, + streams: { + prefix: `projects/${projectRef}/deployments/`, + }, + }, + }); + + await s2TokenRedis.setex( + redisKey, + 59 * 60, // slightly shorter than the token validity period + accessToken + ); + + return accessToken; + } + public static prepareErrorData(errorData: WorkerDeployment["errorData"]): ErrorData | undefined { if (!errorData) { return; diff --git a/apps/webapp/app/presenters/v3/EnvironmentQueuePresenter.server.ts b/apps/webapp/app/presenters/v3/EnvironmentQueuePresenter.server.ts index 7469a2c0b1..f408511a83 100644 --- a/apps/webapp/app/presenters/v3/EnvironmentQueuePresenter.server.ts +++ b/apps/webapp/app/presenters/v3/EnvironmentQueuePresenter.server.ts @@ -7,6 +7,8 @@ export type Environment = { running: number; queued: number; concurrencyLimit: number; + burstFactor: number; + runsEnabled: boolean; }; export class EnvironmentQueuePresenter extends BasePresenter { @@ -22,10 +24,25 @@ export class EnvironmentQueuePresenter extends BasePresenter { const running = (engineV1Executing ?? 0) + (engineV2Executing ?? 0); const queued = (engineV1Queued ?? 0) + (engineV2Queued ?? 0); + const organization = await this._replica.organization.findFirst({ + where: { + id: environment.organizationId, + }, + select: { + runsEnabled: true, + }, + }); + + if (!organization) { + throw new Error("Organization not found"); + } + return { running, queued, concurrencyLimit: environment.maximumConcurrencyLimit, + burstFactor: environment.concurrencyLimitBurstFactor.toNumber(), + runsEnabled: environment.type === "DEVELOPMENT" || organization.runsEnabled, }; } } diff --git a/apps/webapp/app/presenters/v3/GitHubSettingsPresenter.server.ts b/apps/webapp/app/presenters/v3/GitHubSettingsPresenter.server.ts new file mode 100644 index 0000000000..c3f715deff --- /dev/null +++ b/apps/webapp/app/presenters/v3/GitHubSettingsPresenter.server.ts @@ -0,0 +1,137 @@ +import { type PrismaClient } from "@trigger.dev/database"; +import { err, fromPromise, ok, ResultAsync } from "neverthrow"; +import { env } from "~/env.server"; +import { BranchTrackingConfigSchema } from "~/v3/github"; +import { BasePresenter } from "./basePresenter.server"; + +type GitHubSettingsOptions = { + projectId: string; + organizationId: string; +}; + +export class GitHubSettingsPresenter extends BasePresenter { + public call({ projectId, organizationId }: GitHubSettingsOptions) { + const githubAppEnabled = env.GITHUB_APP_ENABLED === "1"; + + if (!githubAppEnabled) { + return ok({ + enabled: false, + connectedRepository: undefined, + installations: undefined, + isPreviewEnvironmentEnabled: undefined, + }); + } + + const findConnectedGithubRepository = () => + fromPromise( + (this._replica as PrismaClient).connectedGithubRepository.findFirst({ + where: { + projectId, + repository: { + installation: { + deletedAt: null, + suspendedAt: null, + }, + }, + }, + select: { + branchTracking: true, + previewDeploymentsEnabled: true, + createdAt: true, + repository: { + select: { + id: true, + name: true, + fullName: true, + htmlUrl: true, + private: true, + }, + }, + }, + }), + (error) => ({ + type: "other" as const, + cause: error, + }) + ).map((connectedGithubRepository) => { + if (!connectedGithubRepository) { + return undefined; + } + + const branchTrackingOrFailure = BranchTrackingConfigSchema.safeParse( + connectedGithubRepository.branchTracking + ); + const branchTracking = branchTrackingOrFailure.success + ? branchTrackingOrFailure.data + : undefined; + + return { + ...connectedGithubRepository, + branchTracking, + }; + }); + + const listGithubAppInstallations = () => + fromPromise( + (this._replica as PrismaClient).githubAppInstallation.findMany({ + where: { + organizationId, + deletedAt: null, + suspendedAt: null, + }, + select: { + id: true, + accountHandle: true, + targetType: true, + appInstallationId: true, + repositories: { + select: { + id: true, + name: true, + fullName: true, + htmlUrl: true, + private: true, + }, + take: 200, + }, + }, + take: 20, + orderBy: { + createdAt: "desc", + }, + }), + (error) => ({ + type: "other" as const, + cause: error, + }) + ); + + const isPreviewEnvironmentEnabled = () => + fromPromise( + (this._replica as PrismaClient).runtimeEnvironment.findFirst({ + select: { + id: true, + }, + where: { + projectId: projectId, + slug: "preview", + }, + }), + (error) => ({ + type: "other" as const, + cause: error, + }) + ).map((previewEnvironment) => previewEnvironment !== null); + + return ResultAsync.combine([ + isPreviewEnvironmentEnabled(), + findConnectedGithubRepository(), + listGithubAppInstallations(), + ]).map(([isPreviewEnvironmentEnabled, connectedGithubRepository, githubAppInstallations]) => ({ + enabled: true, + connectedRepository: connectedGithubRepository, + installations: githubAppInstallations, + isPreviewEnvironmentEnabled, + })); + } +} diff --git a/apps/webapp/app/presenters/v3/ManageConcurrencyPresenter.server.ts b/apps/webapp/app/presenters/v3/ManageConcurrencyPresenter.server.ts new file mode 100644 index 0000000000..2b9af8dce5 --- /dev/null +++ b/apps/webapp/app/presenters/v3/ManageConcurrencyPresenter.server.ts @@ -0,0 +1,141 @@ +import { type RuntimeEnvironmentType } from "@trigger.dev/database"; +import { + getCurrentPlan, + getDefaultEnvironmentLimitFromPlan, + getPlans, +} from "~/services/platform.v3.server"; +import { BasePresenter } from "./basePresenter.server"; +import { sortEnvironments } from "~/utils/environmentSort"; + +export type ConcurrencyResult = { + canAddConcurrency: boolean; + environments: EnvironmentWithConcurrency[]; + extraConcurrency: number; + extraAllocatedConcurrency: number; + extraUnallocatedConcurrency: number; + maxQuota: number; + concurrencyPricing: { + stepSize: number; + centsPerStep: number; + }; +}; + +export type EnvironmentWithConcurrency = { + id: string; + type: RuntimeEnvironmentType; + isBranchableEnvironment: boolean; + branchName: string | null; + parentEnvironmentId: string | null; + maximumConcurrencyLimit: number; + planConcurrencyLimit: number; +}; + +export class ManageConcurrencyPresenter extends BasePresenter { + public async call({ + userId, + projectId, + organizationId, + }: { + userId: string; + projectId: string; + organizationId: string; + }): Promise { + // Get plan + const currentPlan = await getCurrentPlan(organizationId); + if (!currentPlan) { + throw new Error("No plan found"); + } + + const canAddConcurrency = + currentPlan.v3Subscription.plan?.limits.concurrentRuns.canExceed === true; + + const environments = await this._replica.runtimeEnvironment.findMany({ + select: { + id: true, + projectId: true, + type: true, + branchName: true, + parentEnvironmentId: true, + isBranchableEnvironment: true, + maximumConcurrencyLimit: true, + orgMember: { + select: { + userId: true, + }, + }, + project: { + select: { + deletedAt: true, + }, + }, + }, + where: { + organizationId, + archivedAt: null, + }, + }); + + const extraConcurrency = currentPlan?.v3Subscription.addOns?.concurrentRuns?.purchased ?? 0; + + // Go through all environments and add up extra concurrency above their allowed allocation + let extraAllocatedConcurrency = 0; + const projectEnvironments: EnvironmentWithConcurrency[] = []; + for (const environment of environments) { + // Don't count parent environments + if (environment.isBranchableEnvironment) continue; + + // Don't count deleted projects + if (environment.project.deletedAt) continue; + + const limit = currentPlan + ? getDefaultEnvironmentLimitFromPlan(environment.type, currentPlan) + : 0; + if (!limit) continue; + + // If it's not DEV and they've increased, track that + // You can't spend money to increase DEV concurrency + if (environment.type !== "DEVELOPMENT" && environment.maximumConcurrencyLimit > limit) { + extraAllocatedConcurrency += environment.maximumConcurrencyLimit - limit; + } + + // We only want to show this project's environments + if (environment.projectId === projectId) { + if (environment.type === "DEVELOPMENT" && environment.orgMember?.userId !== userId) { + continue; + } + + projectEnvironments.push({ + id: environment.id, + type: environment.type, + isBranchableEnvironment: environment.isBranchableEnvironment, + branchName: environment.branchName, + parentEnvironmentId: environment.parentEnvironmentId, + maximumConcurrencyLimit: environment.maximumConcurrencyLimit, + planConcurrencyLimit: limit, + }); + } + } + + const extraAllocated = Math.min(extraConcurrency, extraAllocatedConcurrency); + + const plans = await getPlans(); + if (!plans) { + throw new Error("Couldn't retrieve add on pricing"); + } + + return { + canAddConcurrency, + extraConcurrency, + extraAllocatedConcurrency: extraAllocated, + extraUnallocatedConcurrency: extraConcurrency - extraAllocated, + maxQuota: currentPlan.v3Subscription.addOns?.concurrentRuns?.quota ?? 0, + environments: sortEnvironments(projectEnvironments, [ + "PRODUCTION", + "STAGING", + "PREVIEW", + "DEVELOPMENT", + ]), + concurrencyPricing: plans.addOnPricing.concurrency, + }; + } +} diff --git a/apps/webapp/app/presenters/v3/NextRunListPresenter.server.ts b/apps/webapp/app/presenters/v3/NextRunListPresenter.server.ts index ec5703b60b..2375ea161a 100644 --- a/apps/webapp/app/presenters/v3/NextRunListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/NextRunListPresenter.server.ts @@ -1,13 +1,18 @@ -import { ClickHouse } from "@internal/clickhouse"; -import { PrismaClient, PrismaClientOrTransaction, type TaskRunStatus } from "@trigger.dev/database"; +import { type ClickHouse } from "@internal/clickhouse"; +import { MachinePresetName } from "@trigger.dev/core/v3"; +import { + type PrismaClient, + type PrismaClientOrTransaction, + type TaskRunStatus, +} from "@trigger.dev/database"; import { type Direction } from "~/components/ListPagination"; import { timeFilters } from "~/components/runs/v3/SharedFilters"; import { findDisplayableEnvironment } from "~/models/runtimeEnvironment.server"; import { getAllTaskIdentifiers } from "~/models/task.server"; -import { RunsRepository } from "~/services/runsRepository.server"; +import { RunsRepository } from "~/services/runsRepository/runsRepository.server"; +import { machinePresetFromRun } from "~/v3/machinePresets.server"; import { ServiceValidationError } from "~/v3/services/baseService.server"; import { isCancellableRunStatus, isFinalRunStatus, isPendingRunStatus } from "~/v3/taskStatus"; -import parseDuration from "parse-duration"; export type RunListOptions = { userId?: string; @@ -25,7 +30,9 @@ export type RunListOptions = { isTest?: boolean; rootOnly?: boolean; batchId?: string; - runIds?: string[]; + runId?: string[]; + queues?: string[]; + machines?: MachinePresetName[]; //pagination direction?: Direction; cursor?: string; @@ -60,7 +67,9 @@ export class NextRunListPresenter { isTest, rootOnly, batchId, - runIds, + runId, + queues, + machines, from, to, direction = "forward", @@ -75,8 +84,6 @@ export class NextRunListPresenter { to, }); - const periodMs = time.period ? parseDuration(time.period) : undefined; - const hasStatusFilters = statuses && statuses.length > 0; const hasFilters = @@ -87,7 +94,9 @@ export class NextRunListPresenter { (scheduleId !== undefined && scheduleId !== "") || (tags !== undefined && tags.length > 0) || batchId !== undefined || - (runIds !== undefined && runIds.length > 0) || + (runId !== undefined && runId.length > 0) || + (queues !== undefined && queues.length > 0) || + (machines !== undefined && machines.length > 0) || typeof isTest === "boolean" || rootOnly === true || !time.isDefault; @@ -96,15 +105,16 @@ export class NextRunListPresenter { const possibleTasksAsync = getAllTaskIdentifiers(this.replica, environmentId); //get possible bulk actions - // TODO: we should replace this with the new bulk stuff and make it environment scoped const bulkActionsAsync = this.replica.bulkActionGroup.findMany({ select: { friendlyId: true, type: true, createdAt: true, + name: true, }, where: { projectId: projectId, + environmentId, }, orderBy: { createdAt: "desc", @@ -118,71 +128,29 @@ export class NextRunListPresenter { findDisplayableEnvironment(environmentId, userId), ]); - if (!displayableEnvironment) { - throw new ServiceValidationError("No environment found"); - } - - //we can restrict to specific runs using bulkId, or batchId - let restrictToRunIds: undefined | string[] = undefined; - - //bulk id - if (bulkId) { - const bulkAction = await this.replica.bulkActionGroup.findFirst({ + // If the bulk action isn't in the most recent ones, add it separately + if (bulkId && !bulkActions.some((bulkAction) => bulkAction.friendlyId === bulkId)) { + const selectedBulkAction = await this.replica.bulkActionGroup.findFirst({ select: { - items: { - select: { - destinationRunId: true, - }, - }, + friendlyId: true, + type: true, + createdAt: true, + name: true, }, where: { friendlyId: bulkId, + projectId, + environmentId, }, }); - if (bulkAction) { - const runIds = bulkAction.items.map((item) => item.destinationRunId).filter(Boolean); - restrictToRunIds = runIds; - } - } - - //batch id is a friendly id - if (batchId) { - const batch = await this.replica.batchTaskRun.findFirst({ - select: { - id: true, - }, - where: { - friendlyId: batchId, - runtimeEnvironmentId: environmentId, - }, - }); - - if (batch) { - batchId = batch.id; + if (selectedBulkAction) { + bulkActions.push(selectedBulkAction); } } - //scheduleId can be a friendlyId - if (scheduleId && scheduleId.startsWith("sched_")) { - const schedule = await this.replica.taskSchedule.findFirst({ - select: { - id: true, - }, - where: { - friendlyId: scheduleId, - projectId: projectId, - }, - }); - - if (schedule) { - scheduleId = schedule?.id; - } - } - - //show all runs if we are filtering by batchId or runId - if (batchId || runIds?.length || scheduleId || tasks?.length) { - rootOnly = false; + if (!displayableEnvironment) { + throw new ServiceValidationError("No environment found"); } const runsRepository = new RunsRepository({ @@ -190,6 +158,11 @@ export class NextRunListPresenter { prisma: this.replica as PrismaClient, }); + function clampToNow(date: Date): Date { + const now = new Date(); + return date > now ? now : date; + } + const { runs, pagination } = await runsRepository.listRuns({ organizationId, environmentId, @@ -199,14 +172,16 @@ export class NextRunListPresenter { statuses, tags, scheduleId, - period: periodMs ?? undefined, - from, - to, + period, + from: time.from ? time.from.getTime() : undefined, + to: time.to ? clampToNow(time.to).getTime() : undefined, isTest, rootOnly, batchId, - runFriendlyIds: runIds, - runIds: restrictToRunIds, + runId, + bulkId, + queues, + machines, page: { size: pageSize, cursor, @@ -266,6 +241,11 @@ export class NextRunListPresenter { rootTaskRunId: run.rootTaskRunId, metadata: run.metadata, metadataType: run.metadataType, + machinePreset: run.machinePreset ? machinePresetFromRun(run)?.name : undefined, + queue: { + name: run.queue.replace("task/", ""), + type: run.queue.startsWith("task/") ? "task" : "custom", + }, }; }), pagination: { @@ -281,6 +261,7 @@ export class NextRunListPresenter { id: bulkAction.friendlyId, type: bulkAction.type, createdAt: bulkAction.createdAt, + name: bulkAction.name || bulkAction.friendlyId, })), filters: { tasks: tasks || [], diff --git a/apps/webapp/app/presenters/v3/QueueListPresenter.server.ts b/apps/webapp/app/presenters/v3/QueueListPresenter.server.ts index 93531934bd..0fe9e3f365 100644 --- a/apps/webapp/app/presenters/v3/QueueListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/QueueListPresenter.server.ts @@ -1,3 +1,4 @@ +import { TaskQueueType } from "@trigger.dev/database"; import { type AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { determineEngineVersion } from "~/v3/engineVersion.server"; import { engine } from "~/v3/runEngine.server"; @@ -6,6 +7,12 @@ import { toQueueItem } from "./QueueRetrievePresenter.server"; const DEFAULT_ITEMS_PER_PAGE = 25; const MAX_ITEMS_PER_PAGE = 100; + +const typeToDBQueueType: Record<"task" | "custom", TaskQueueType> = { + task: TaskQueueType.VIRTUAL, + custom: TaskQueueType.NAMED, +}; + export class QueueListPresenter extends BasePresenter { private readonly perPage: number; @@ -18,13 +25,15 @@ export class QueueListPresenter extends BasePresenter { environment, query, page, + type, }: { environment: AuthenticatedEnvironment; query?: string; page: number; perPage?: number; + type?: "task" | "custom"; }) { - const hasFilters = query !== undefined && query.length > 0; + const hasFilters = (query !== undefined && query.length > 0) || type !== undefined; // Get total count for pagination const totalQueues = await this._replica.taskQueue.count({ @@ -37,6 +46,7 @@ export class QueueListPresenter extends BasePresenter { mode: "insensitive", } : undefined, + type: type ? typeToDBQueueType[type] : undefined, }, }); @@ -70,7 +80,7 @@ export class QueueListPresenter extends BasePresenter { return { success: true as const, - queues: await this.getQueuesWithPagination(environment, query, page), + queues: await this.getQueuesWithPagination(environment, query, page, type), pagination: { currentPage: page, totalPages: Math.ceil(totalQueues / this.perPage), @@ -84,7 +94,8 @@ export class QueueListPresenter extends BasePresenter { private async getQueuesWithPagination( environment: AuthenticatedEnvironment, query: string | undefined, - page: number + page: number, + type: "task" | "custom" | undefined ) { const queues = await this._replica.taskQueue.findMany({ where: { @@ -96,15 +107,18 @@ export class QueueListPresenter extends BasePresenter { mode: "insensitive", } : undefined, + type: type ? typeToDBQueueType[type] : undefined, }, select: { friendlyId: true, name: true, orderableName: true, concurrencyLimit: true, + concurrencyLimitBase: true, + concurrencyLimitOverriddenAt: true, + concurrencyLimitOverriddenBy: true, type: true, paused: true, - releaseConcurrencyOnWaitpoint: true, }, orderBy: { orderableName: "asc", @@ -124,6 +138,17 @@ export class QueueListPresenter extends BasePresenter { ), ]); + // Manually "join" the overridden users because there is no way to implement the relationship + // in prisma without adding a foreign key constraint + const overriddenByIds = queues.map((q) => q.concurrencyLimitOverriddenBy).filter(Boolean); + const overriddenByUsers = await this._replica.user.findMany({ + where: { + id: { in: overriddenByIds }, + }, + }); + + const overriddenByMap = new Map(overriddenByUsers.map((u) => [u.id, u])); + // Transform queues to include running and queued counts return queues.map((queue) => toQueueItem({ @@ -133,8 +158,12 @@ export class QueueListPresenter extends BasePresenter { running: results[1][queue.name] ?? 0, queued: results[0][queue.name] ?? 0, concurrencyLimit: queue.concurrencyLimit ?? null, + concurrencyLimitBase: queue.concurrencyLimitBase ?? null, + concurrencyLimitOverriddenAt: queue.concurrencyLimitOverriddenAt ?? null, + concurrencyLimitOverriddenBy: queue.concurrencyLimitOverriddenBy + ? overriddenByMap.get(queue.concurrencyLimitOverriddenBy) ?? null + : null, paused: queue.paused, - releaseConcurrencyOnWaitpoint: queue.releaseConcurrencyOnWaitpoint, }) ); } diff --git a/apps/webapp/app/presenters/v3/QueueRetrievePresenter.server.ts b/apps/webapp/app/presenters/v3/QueueRetrievePresenter.server.ts index 12e8e86291..bd885ea738 100644 --- a/apps/webapp/app/presenters/v3/QueueRetrievePresenter.server.ts +++ b/apps/webapp/app/presenters/v3/QueueRetrievePresenter.server.ts @@ -1,12 +1,18 @@ import { type AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { engine } from "~/v3/runEngine.server"; import { BasePresenter } from "./basePresenter.server"; -import { type TaskQueueType } from "@trigger.dev/database"; +import { TaskQueue, User, type TaskQueueType } from "@trigger.dev/database"; import { assertExhaustive } from "@trigger.dev/core"; import { determineEngineVersion } from "~/v3/engineVersion.server"; -import { type QueueItem, type RetrieveQueueParam } from "@trigger.dev/core/v3"; +import { type Prettify, type QueueItem, type RetrieveQueueParam } from "@trigger.dev/core/v3"; import { PrismaClientOrTransaction } from "@trigger.dev/database"; +export type FoundQueue = Prettify< + Omit & { + concurrencyLimitOverriddenBy?: User | null; + } +>; + /** * Shared queue lookup logic used by both QueueRetrievePresenter and PauseQueueService */ @@ -16,22 +22,50 @@ export async function getQueue( queue: RetrieveQueueParam ) { if (typeof queue === "string") { - return prismaClient.taskQueue.findFirst({ + return joinQueueWithUser( + prismaClient, + await prismaClient.taskQueue.findFirst({ + where: { + friendlyId: queue, + runtimeEnvironmentId: environment.id, + }, + }) + ); + } + + const queueName = + queue.type === "task" ? `task/${queue.name.replace(/^task\//, "")}` : queue.name; + return joinQueueWithUser( + prismaClient, + await prismaClient.taskQueue.findFirst({ where: { - friendlyId: queue, + name: queueName, runtimeEnvironmentId: environment.id, }, - }); + }) + ); +} + +async function joinQueueWithUser( + prismaClient: PrismaClientOrTransaction, + queue?: TaskQueue | null +): Promise { + if (!queue) return undefined; + if (!queue.concurrencyLimitOverriddenBy) { + return { + ...queue, + concurrencyLimitOverriddenBy: undefined, + }; } - const queueName = - queue.type === "task" ? `task/${queue.name.replace(/^task\//, "")}` : queue.name; - return prismaClient.taskQueue.findFirst({ - where: { - name: queueName, - runtimeEnvironmentId: environment.id, - }, + const user = await prismaClient.user.findFirst({ + where: { id: queue.concurrencyLimitOverriddenBy }, }); + + return { + ...queue, + concurrencyLimitOverriddenBy: user, + }; } export class QueueRetrievePresenter extends BasePresenter { @@ -75,14 +109,16 @@ export class QueueRetrievePresenter extends BasePresenter { running: results[1]?.[queue.name] ?? 0, queued: results[0]?.[queue.name] ?? 0, concurrencyLimit: queue.concurrencyLimit ?? null, + concurrencyLimitBase: queue.concurrencyLimitBase ?? null, + concurrencyLimitOverriddenAt: queue.concurrencyLimitOverriddenAt ?? null, + concurrencyLimitOverriddenBy: queue.concurrencyLimitOverriddenBy ?? null, paused: queue.paused, - releaseConcurrencyOnWaitpoint: queue.releaseConcurrencyOnWaitpoint, }), }; } } -function queueTypeFromType(type: TaskQueueType) { +export function queueTypeFromType(type: TaskQueueType) { switch (type) { case "NAMED": return "custom" as const; @@ -105,9 +141,11 @@ export function toQueueItem(data: { running: number; queued: number; concurrencyLimit: number | null; + concurrencyLimitBase: number | null; + concurrencyLimitOverriddenAt: Date | null; + concurrencyLimitOverriddenBy: User | null; paused: boolean; - releaseConcurrencyOnWaitpoint: boolean; -}): QueueItem { +}): QueueItem & { releaseConcurrencyOnWaitpoint: boolean } { return { id: data.friendlyId, //remove the task/ prefix if it exists @@ -115,8 +153,22 @@ export function toQueueItem(data: { type: queueTypeFromType(data.type), running: data.running, queued: data.queued, - concurrencyLimit: data.concurrencyLimit, paused: data.paused, - releaseConcurrencyOnWaitpoint: data.releaseConcurrencyOnWaitpoint, + concurrencyLimit: data.concurrencyLimit, + concurrency: { + current: data.concurrencyLimit, + base: data.concurrencyLimitBase, + override: data.concurrencyLimitOverriddenAt ? data.concurrencyLimit : null, + overriddenBy: toQueueConcurrencyOverriddenBy(data.concurrencyLimitOverriddenBy), + overriddenAt: data.concurrencyLimitOverriddenAt, + }, + // TODO: This needs to be removed but keeping this here for now to avoid breaking existing clients + releaseConcurrencyOnWaitpoint: true, }; } + +function toQueueConcurrencyOverriddenBy(user: User | null) { + if (!user) return null; + + return user.displayName ?? user.name ?? null; +} diff --git a/apps/webapp/app/presenters/v3/RegionsPresenter.server.ts b/apps/webapp/app/presenters/v3/RegionsPresenter.server.ts new file mode 100644 index 0000000000..c304597bb1 --- /dev/null +++ b/apps/webapp/app/presenters/v3/RegionsPresenter.server.ts @@ -0,0 +1,159 @@ +import { type Project } from "~/models/project.server"; +import { type User } from "~/models/user.server"; +import { FEATURE_FLAG, makeFlags } from "~/v3/featureFlags.server"; +import { BasePresenter } from "./basePresenter.server"; +import { getCurrentPlan } from "~/services/platform.v3.server"; + +export type Region = { + id: string; + name: string; + description?: string; + cloudProvider?: string; + location?: string; + staticIPs?: string | null; + isDefault: boolean; + isHidden: boolean; +}; + +export class RegionsPresenter extends BasePresenter { + public async call({ + userId, + projectSlug, + isAdmin = false, + }: { + userId: User["id"]; + projectSlug: Project["slug"]; + isAdmin?: boolean; + }) { + const project = await this._replica.project.findFirst({ + select: { + id: true, + organizationId: true, + defaultWorkerGroupId: true, + allowedWorkerQueues: true, + }, + where: { + slug: projectSlug, + organization: { + members: { + some: { + userId, + }, + }, + }, + }, + }); + + if (!project) { + throw new Error("Project not found"); + } + + const getFlag = makeFlags(this._replica); + const defaultWorkerInstanceGroupId = await getFlag({ + key: FEATURE_FLAG.defaultWorkerInstanceGroupId, + }); + + if (!defaultWorkerInstanceGroupId) { + throw new Error("Default worker instance group not found"); + } + + const visibleRegions = await this._replica.workerInstanceGroup.findMany({ + select: { + id: true, + name: true, + description: true, + cloudProvider: true, + location: true, + staticIPs: true, + hidden: true, + }, + where: isAdmin + ? undefined + : // Hide hidden unless they're allowed to use them + project.allowedWorkerQueues.length > 0 + ? { + masterQueue: { in: project.allowedWorkerQueues }, + } + : { + hidden: false, + }, + orderBy: { + name: "asc", + }, + }); + + const regions: Region[] = visibleRegions.map((region) => ({ + id: region.id, + name: region.name, + description: region.description ?? undefined, + cloudProvider: region.cloudProvider ?? undefined, + location: region.location ?? undefined, + staticIPs: region.staticIPs ?? undefined, + isDefault: region.id === defaultWorkerInstanceGroupId, + isHidden: region.hidden, + })); + + if (project.defaultWorkerGroupId) { + const defaultWorkerGroup = await this._replica.workerInstanceGroup.findFirst({ + select: { + id: true, + name: true, + description: true, + cloudProvider: true, + location: true, + staticIPs: true, + hidden: true, + }, + where: { id: project.defaultWorkerGroupId }, + }); + + if (defaultWorkerGroup) { + // Unset the default region + const defaultRegion = regions.find((region) => region.isDefault); + if (defaultRegion) { + defaultRegion.isDefault = false; + } + + regions.push({ + id: defaultWorkerGroup.id, + name: defaultWorkerGroup.name, + description: defaultWorkerGroup.description ?? undefined, + cloudProvider: defaultWorkerGroup.cloudProvider ?? undefined, + location: defaultWorkerGroup.location ?? undefined, + staticIPs: defaultWorkerGroup.staticIPs ?? undefined, + isDefault: true, + isHidden: defaultWorkerGroup.hidden, + }); + } + } + + // Default first + const sorted = regions.sort((a, b) => { + if (a.isDefault) return -1; + if (b.isDefault) return 1; + return a.name.localeCompare(b.name); + }); + + // Remove later duplicates + let unique = sorted.filter((region, index, self) => { + const firstIndex = self.findIndex((t) => t.id === region.id); + return index === firstIndex; + }); + + // Don't show static IPs for free users + // Even if they had the IPs they wouldn't work, but this makes it less confusing + const currentPlan = await getCurrentPlan(project.organizationId); + const isPaying = currentPlan?.v3Subscription.isPaying === true; + if (!isPaying) { + unique = unique.map((region) => ({ + ...region, + staticIPs: region.staticIPs ? null : undefined, + })); + } + + return { + regions: unique.sort((a, b) => a.name.localeCompare(b.name)), + isPaying, + }; + } +} diff --git a/apps/webapp/app/presenters/v3/RunListPresenter.server.ts b/apps/webapp/app/presenters/v3/RunListPresenter.server.ts deleted file mode 100644 index 9244428646..0000000000 --- a/apps/webapp/app/presenters/v3/RunListPresenter.server.ts +++ /dev/null @@ -1,401 +0,0 @@ -import { Prisma, type TaskRunStatus } from "@trigger.dev/database"; -import parse from "parse-duration"; -import { type Direction } from "~/components/ListPagination"; -import { timeFilters } from "~/components/runs/v3/SharedFilters"; -import { sqlDatabaseSchema } from "~/db.server"; -import { findDisplayableEnvironment } from "~/models/runtimeEnvironment.server"; -import { getAllTaskIdentifiers } from "~/models/task.server"; -import { isCancellableRunStatus, isFinalRunStatus, isPendingRunStatus } from "~/v3/taskStatus"; -import { BasePresenter } from "./basePresenter.server"; -import { ServiceValidationError } from "~/v3/services/baseService.server"; - -export type RunListOptions = { - userId?: string; - projectId: string; - //filters - tasks?: string[]; - versions?: string[]; - statuses?: TaskRunStatus[]; - tags?: string[]; - scheduleId?: string; - period?: string; - bulkId?: string; - from?: number; - to?: number; - isTest?: boolean; - rootOnly?: boolean; - batchId?: string; - runIds?: string[]; - //pagination - direction?: Direction; - cursor?: string; - pageSize?: number; -}; - -const DEFAULT_PAGE_SIZE = 25; - -export type RunList = Awaited>; -export type RunListItem = RunList["runs"][0]; -export type RunListAppliedFilters = RunList["filters"]; - -export class RunListPresenter extends BasePresenter { - public async call( - environmentId: string, - { - userId, - projectId, - tasks, - versions, - statuses, - tags, - scheduleId, - period, - bulkId, - isTest, - rootOnly, - batchId, - runIds, - from, - to, - direction = "forward", - cursor, - pageSize = DEFAULT_PAGE_SIZE, - }: RunListOptions - ) { - //get the time values from the raw values (including a default period) - const time = timeFilters({ - period, - from, - to, - }); - - const hasStatusFilters = statuses && statuses.length > 0; - - const hasFilters = - (tasks !== undefined && tasks.length > 0) || - (versions !== undefined && versions.length > 0) || - hasStatusFilters || - (bulkId !== undefined && bulkId !== "") || - (scheduleId !== undefined && scheduleId !== "") || - (tags !== undefined && tags.length > 0) || - batchId !== undefined || - (runIds !== undefined && runIds.length > 0) || - typeof isTest === "boolean" || - rootOnly === true || - !time.isDefault; - - //get all possible tasks - const possibleTasksAsync = getAllTaskIdentifiers(this._replica, environmentId); - - //get possible bulk actions - // TODO: we should replace this with the new bulk stuff and make it environment scoped - const bulkActionsAsync = this._replica.bulkActionGroup.findMany({ - select: { - friendlyId: true, - type: true, - createdAt: true, - }, - where: { - projectId: projectId, - }, - orderBy: { - createdAt: "desc", - }, - take: 20, - }); - - const [possibleTasks, bulkActions, displayableEnvironment] = await Promise.all([ - possibleTasksAsync, - bulkActionsAsync, - findDisplayableEnvironment(environmentId, userId), - ]); - - if (!displayableEnvironment) { - throw new ServiceValidationError("No environment found"); - } - - //we can restrict to specific runs using bulkId, or batchId - let restrictToRunIds: undefined | string[] = undefined; - - //bulk id - if (bulkId) { - const bulkAction = await this._replica.bulkActionGroup.findFirst({ - select: { - items: { - select: { - destinationRunId: true, - }, - }, - }, - where: { - friendlyId: bulkId, - }, - }); - - if (bulkAction) { - const runIds = bulkAction.items.map((item) => item.destinationRunId).filter(Boolean); - restrictToRunIds = runIds; - } - } - - //batch id is a friendly id - if (batchId) { - const batch = await this._replica.batchTaskRun.findFirst({ - select: { - id: true, - }, - where: { - friendlyId: batchId, - runtimeEnvironmentId: environmentId, - }, - }); - - if (batch) { - batchId = batch.id; - } - } - - //scheduleId can be a friendlyId - if (scheduleId && scheduleId.startsWith("sched_")) { - const schedule = await this._replica.taskSchedule.findFirst({ - select: { - id: true, - }, - where: { - friendlyId: scheduleId, - projectId: projectId, - }, - }); - - if (schedule) { - scheduleId = schedule?.id; - } - } - - //show all runs if we are filtering by batchId or runId - if (batchId || runIds?.length || scheduleId || tasks?.length) { - rootOnly = false; - } - - const periodMs = time.period ? parse(time.period) : undefined; - - //get the runs - const runs = await this._replica.$queryRaw< - { - id: string; - number: BigInt; - runFriendlyId: string; - taskIdentifier: string; - version: string | null; - status: TaskRunStatus; - createdAt: Date; - startedAt: Date | null; - lockedAt: Date | null; - delayUntil: Date | null; - updatedAt: Date; - completedAt: Date | null; - isTest: boolean; - spanId: string; - idempotencyKey: string | null; - ttl: string | null; - expiredAt: Date | null; - costInCents: number; - baseCostInCents: number; - usageDurationMs: BigInt; - tags: null | string[]; - depth: number; - rootTaskRunId: string | null; - batchId: string | null; - metadata: string | null; - metadataType: string; - }[] - >` - SELECT - tr.id, - tr.number, - tr."friendlyId" AS "runFriendlyId", - tr."taskIdentifier" AS "taskIdentifier", - tr."taskVersion" AS version, - tr.status AS status, - tr."createdAt" AS "createdAt", - tr."startedAt" AS "startedAt", - tr."delayUntil" AS "delayUntil", - tr."lockedAt" AS "lockedAt", - tr."updatedAt" AS "updatedAt", - tr."completedAt" AS "completedAt", - tr."isTest" AS "isTest", - tr."spanId" AS "spanId", - tr."idempotencyKey" AS "idempotencyKey", - tr."ttl" AS "ttl", - tr."expiredAt" AS "expiredAt", - tr."baseCostInCents" AS "baseCostInCents", - tr."costInCents" AS "costInCents", - tr."usageDurationMs" AS "usageDurationMs", - tr."depth" AS "depth", - tr."rootTaskRunId" AS "rootTaskRunId", - tr."runTags" AS "tags", - tr."metadata" AS "metadata", - tr."metadataType" AS "metadataType" -FROM - ${sqlDatabaseSchema}."TaskRun" tr -WHERE - -- project - tr."runtimeEnvironmentId" = ${environmentId} - -- cursor - ${ - cursor - ? direction === "forward" - ? Prisma.sql`AND tr.id < ${cursor}` - : Prisma.sql`AND tr.id > ${cursor}` - : Prisma.empty - } - -- filters - ${runIds ? Prisma.sql`AND tr."friendlyId" IN (${Prisma.join(runIds)})` : Prisma.empty} - ${batchId ? Prisma.sql`AND tr."batchId" = ${batchId}` : Prisma.empty} - ${ - restrictToRunIds - ? restrictToRunIds.length === 0 - ? Prisma.sql`AND tr.id = ''` - : Prisma.sql`AND tr.id IN (${Prisma.join(restrictToRunIds)})` - : Prisma.empty - } - ${ - tasks && tasks.length > 0 - ? Prisma.sql`AND tr."taskIdentifier" IN (${Prisma.join(tasks)})` - : Prisma.empty - } - ${ - statuses && statuses.length > 0 - ? Prisma.sql`AND tr.status = ANY(ARRAY[${Prisma.join(statuses)}]::"TaskRunStatus"[])` - : Prisma.empty - } - ${scheduleId ? Prisma.sql`AND tr."scheduleId" = ${scheduleId}` : Prisma.empty} - ${typeof isTest === "boolean" ? Prisma.sql`AND tr."isTest" = ${isTest}` : Prisma.empty} - ${ - periodMs - ? Prisma.sql`AND tr."createdAt" >= NOW() - INTERVAL '1 millisecond' * ${periodMs}` - : Prisma.empty - } - ${ - time.from - ? Prisma.sql`AND tr."createdAt" >= ${time.from.toISOString()}::timestamp` - : Prisma.empty - } - ${ - time.to ? Prisma.sql`AND tr."createdAt" <= ${time.to.toISOString()}::timestamp` : Prisma.empty - } - ${ - tags && tags.length > 0 - ? Prisma.sql`AND tr."runTags" && ARRAY[${Prisma.join(tags)}]::text[]` - : Prisma.empty - } - ${rootOnly === true ? Prisma.sql`AND tr."rootTaskRunId" IS NULL` : Prisma.empty} - ORDER BY - ${direction === "forward" ? Prisma.sql`tr.id DESC` : Prisma.sql`tr.id ASC`} - LIMIT ${pageSize + 1}`; - - const hasMore = runs.length > pageSize; - - //get cursors for next and previous pages - let next: string | undefined; - let previous: string | undefined; - switch (direction) { - case "forward": - previous = cursor ? runs.at(0)?.id : undefined; - if (hasMore) { - next = runs[pageSize - 1]?.id; - } - break; - case "backward": - runs.reverse(); - if (hasMore) { - previous = runs[1]?.id; - next = runs[pageSize]?.id; - } else { - next = runs[pageSize - 1]?.id; - } - break; - } - - const runsToReturn = - direction === "backward" && hasMore ? runs.slice(1, pageSize + 1) : runs.slice(0, pageSize); - - let hasAnyRuns = runsToReturn.length > 0; - if (!hasAnyRuns) { - const firstRun = await this._replica.taskRun.findFirst({ - where: { - runtimeEnvironmentId: environmentId, - }, - }); - - if (firstRun) { - hasAnyRuns = true; - } - } - - return { - runs: runsToReturn.map((run) => { - const hasFinished = isFinalRunStatus(run.status); - - const startedAt = run.startedAt ?? run.lockedAt; - - return { - id: run.id, - friendlyId: run.runFriendlyId, - number: Number(run.number), - createdAt: run.createdAt.toISOString(), - updatedAt: run.updatedAt.toISOString(), - startedAt: startedAt ? startedAt.toISOString() : undefined, - delayUntil: run.delayUntil ? run.delayUntil.toISOString() : undefined, - hasFinished, - finishedAt: hasFinished - ? run.completedAt?.toISOString() ?? run.updatedAt.toISOString() - : undefined, - isTest: run.isTest, - status: run.status, - version: run.version, - taskIdentifier: run.taskIdentifier, - spanId: run.spanId, - isReplayable: true, - isCancellable: isCancellableRunStatus(run.status), - isPending: isPendingRunStatus(run.status), - environment: displayableEnvironment, - idempotencyKey: run.idempotencyKey ? run.idempotencyKey : undefined, - ttl: run.ttl ? run.ttl : undefined, - expiredAt: run.expiredAt ? run.expiredAt.toISOString() : undefined, - costInCents: run.costInCents, - baseCostInCents: run.baseCostInCents, - usageDurationMs: Number(run.usageDurationMs), - tags: run.tags ? run.tags.sort((a, b) => a.localeCompare(b)) : [], - depth: run.depth, - rootTaskRunId: run.rootTaskRunId, - metadata: run.metadata, - metadataType: run.metadataType, - }; - }), - pagination: { - next, - previous, - }, - possibleTasks: possibleTasks - .map((task) => ({ slug: task.slug, triggerSource: task.triggerSource })) - .sort((a, b) => { - return a.slug.localeCompare(b.slug); - }), - bulkActions: bulkActions.map((bulkAction) => ({ - id: bulkAction.friendlyId, - type: bulkAction.type, - createdAt: bulkAction.createdAt, - })), - filters: { - tasks: tasks || [], - versions: versions || [], - statuses: statuses || [], - from: time.from, - to: time.to, - }, - hasFilters, - hasAnyRuns, - }; - } -} diff --git a/apps/webapp/app/presenters/v3/RunPresenter.server.ts b/apps/webapp/app/presenters/v3/RunPresenter.server.ts index d3ea209924..437d6b6458 100644 --- a/apps/webapp/app/presenters/v3/RunPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/RunPresenter.server.ts @@ -1,11 +1,13 @@ import { millisecondsToNanoseconds } from "@trigger.dev/core/v3"; import { createTreeFromFlatItems, flattenTree } from "~/components/primitives/TreeView/TreeView"; -import { prisma, PrismaClient } from "~/db.server"; +import { prisma, type PrismaClient } from "~/db.server"; import { createTimelineSpanEventsFromSpanEvents } from "~/utils/timelineSpanEvents"; import { getUsername } from "~/utils/username"; -import { eventRepository } from "~/v3/eventRepository.server"; +import { resolveEventRepositoryForStore } from "~/v3/eventRepository/index.server"; +import { SpanSummary } from "~/v3/eventRepository/eventRepository.types"; import { getTaskEventStoreTableForRun } from "~/v3/taskEventStore.server"; import { isFinalRunStatus } from "~/v3/taskStatus"; +import { env } from "~/env.server"; type Result = Awaited>; export type Run = Result["run"]; @@ -28,7 +30,6 @@ export class RunPresenter { public async call({ userId, projectSlug, - organizationSlug, environmentSlug, runFriendlyId, showDeletedLogs, @@ -36,7 +37,6 @@ export class RunPresenter { }: { userId: string; projectSlug: string; - organizationSlug: string; environmentSlug: string; runFriendlyId: string; showDeletedLogs: boolean; @@ -47,9 +47,11 @@ export class RunPresenter { id: true, createdAt: true, taskEventStore: true, + taskIdentifier: true, number: true, traceId: true, spanId: true, + parentSpanId: true, friendlyId: true, status: true, startedAt: true, @@ -58,7 +60,13 @@ export class RunPresenter { rootTaskRun: { select: { friendlyId: true, - taskIdentifier: true, + spanId: true, + createdAt: true, + }, + }, + parentTaskRun: { + select: { + friendlyId: true, spanId: true, createdAt: true, }, @@ -87,6 +95,13 @@ export class RunPresenter { friendlyId: runFriendlyId, project: { slug: projectSlug, + organization: { + members: { + some: { + userId, + }, + }, + }, }, }, }); @@ -111,6 +126,7 @@ export class RunPresenter { completedAt: run.completedAt, logsDeletedAt: showDeletedLogs ? null : run.logsDeletedAt, rootTaskRun: run.rootTaskRun, + parentTaskRun: run.parentTaskRun, environment: { id: run.runtimeEnvironment.id, organizationId: run.runtimeEnvironment.organizationId, @@ -125,21 +141,56 @@ export class RunPresenter { return { run: runData, trace: undefined, + maximumLiveReloadingSetting: env.MAXIMUM_LIVE_RELOADING_EVENTS, }; } + const eventRepository = resolveEventRepositoryForStore(run.taskEventStore); + // get the events - const traceSummary = await eventRepository.getTraceSummary( + let traceSummary = await eventRepository.getTraceSummary( getTaskEventStoreTableForRun(run), + run.runtimeEnvironment.id, run.traceId, run.rootTaskRun?.createdAt ?? run.createdAt, run.completedAt ?? undefined, { includeDebugLogs: showDebug } ); + if (!traceSummary) { - return { - run: runData, - trace: undefined, + const spanSummary: SpanSummary = { + id: run.spanId, + parentId: run.parentSpanId ?? undefined, + runId: run.friendlyId, + data: { + message: run.taskIdentifier, + style: { icon: "task", variant: "primary" }, + events: [], + startTime: run.createdAt, + duration: 0, + isError: + run.status === "COMPLETED_WITH_ERRORS" || + run.status === "CRASHED" || + run.status === "EXPIRED" || + run.status === "SYSTEM_FAILURE" || + run.status === "TIMED_OUT", + isPartial: + run.status === "DELAYED" || + run.status === "PENDING" || + run.status === "PAUSED" || + run.status === "RETRYING_AFTER_FAILURE" || + run.status === "DEQUEUED" || + run.status === "EXECUTING" || + run.status === "WAITING_TO_RESUME", + isCancelled: run.status === "CANCELED", + isDebug: false, + level: "TRACE", + }, + }; + + traceSummary = { + rootSpan: spanSummary, + spans: [spanSummary], }; } @@ -202,15 +253,15 @@ export class RunPresenter { trace: { rootSpanStatus, events: events, - parentRunFriendlyId: - tree?.id === traceSummary.rootSpan.id ? undefined : traceSummary.rootSpan.runId, duration: totalDuration, rootStartedAt: tree?.data.startTime, startedAt: run.startedAt, queuedDuration: run.startedAt ? millisecondsToNanoseconds(run.startedAt.getTime() - run.createdAt.getTime()) : undefined, + overridesBySpanId: traceSummary.overridesBySpanId, }, + maximumLiveReloadingSetting: eventRepository.maximumLiveReloadingSetting, }; } } diff --git a/apps/webapp/app/presenters/v3/RunStreamPresenter.server.ts b/apps/webapp/app/presenters/v3/RunStreamPresenter.server.ts index 6624048e8c..9197b2c9d1 100644 --- a/apps/webapp/app/presenters/v3/RunStreamPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/RunStreamPresenter.server.ts @@ -3,7 +3,7 @@ import { eventStream } from "remix-utils/sse/server"; import { PrismaClient, prisma } from "~/db.server"; import { logger } from "~/services/logger.server"; import { throttle } from "~/utils/throttle"; -import { eventRepository } from "~/v3/eventRepository.server"; +import { tracePubSub } from "~/v3/services/tracePubSub.server"; const pingInterval = 1000; @@ -41,7 +41,7 @@ export class RunStreamPresenter { let pinger: NodeJS.Timeout | undefined = undefined; - const { unsubscribe, eventEmitter } = await eventRepository.subscribeToTrace(run.traceId); + const { unsubscribe, eventEmitter } = await tracePubSub.subscribeToTrace(run.traceId); return eventStream(request.signal, (send, close) => { const safeSend = (args: { event?: string; data: string }) => { diff --git a/apps/webapp/app/presenters/v3/RunTagListPresenter.server.ts b/apps/webapp/app/presenters/v3/RunTagListPresenter.server.ts index f159d3928e..e9de368ece 100644 --- a/apps/webapp/app/presenters/v3/RunTagListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/RunTagListPresenter.server.ts @@ -1,8 +1,16 @@ +import { RunsRepository } from "~/services/runsRepository/runsRepository.server"; import { BasePresenter } from "./basePresenter.server"; +import { clickhouseClient } from "~/services/clickhouseInstance.server"; +import { type PrismaClient } from "@trigger.dev/database"; +import { timeFilters } from "~/components/runs/v3/SharedFilters"; export type TagListOptions = { - userId?: string; + organizationId: string; + environmentId: string; projectId: string; + period?: string; + from?: Date; + to?: Date; //filters name?: string; //pagination @@ -17,40 +25,39 @@ export type TagListItem = TagList["tags"][number]; export class RunTagListPresenter extends BasePresenter { public async call({ - userId, + organizationId, + environmentId, projectId, name, + period, + from, + to, page = 1, pageSize = DEFAULT_PAGE_SIZE, }: TagListOptions) { const hasFilters = Boolean(name?.trim()); - const tags = await this._replica.taskRunTag.findMany({ - where: { - projectId, - name: name - ? { - startsWith: name, - mode: "insensitive", - } - : undefined, - }, - orderBy: { - id: "desc", - }, - take: pageSize + 1, - skip: (page - 1) * pageSize, + const runsRepository = new RunsRepository({ + clickhouse: clickhouseClient, + prisma: this._replica as PrismaClient, + }); + + const tags = await runsRepository.listTags({ + organizationId, + projectId, + environmentId, + query: name, + period, + from: from ? from.getTime() : undefined, + to: to ? to.getTime() : undefined, + offset: (page - 1) * pageSize, + limit: pageSize + 1, }); return { - tags: tags - .map((tag) => ({ - id: tag.friendlyId, - name: tag.name, - })) - .slice(0, pageSize), + tags: tags.tags, currentPage: page, - hasMore: tags.length > pageSize, + hasMore: tags.tags.length > pageSize, hasFilters, }; } diff --git a/apps/webapp/app/presenters/v3/SpanPresenter.server.ts b/apps/webapp/app/presenters/v3/SpanPresenter.server.ts index 262184dcb9..4c0e3405cf 100644 --- a/apps/webapp/app/presenters/v3/SpanPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/SpanPresenter.server.ts @@ -1,29 +1,42 @@ import { - type MachinePresetName, + type MachinePreset, prettyPrintPacket, SemanticInternalAttributes, + type TaskRunContext, TaskRunError, + TriggerTraceContext, + type V3TaskRunContext, } from "@trigger.dev/core/v3"; -import { getMaxDuration } from "@trigger.dev/core/v3/isomorphic"; +import { AttemptId, getMaxDuration, parseTraceparent } from "@trigger.dev/core/v3/isomorphic"; import { RUNNING_STATUSES } from "~/components/runs/v3/TaskRunStatus"; import { logger } from "~/services/logger.server"; -import { eventRepository, rehydrateAttribute } from "~/v3/eventRepository.server"; -import { machinePresetFromName } from "~/v3/machinePresets.server"; +import { rehydrateAttribute } from "~/v3/eventRepository/eventRepository.server"; +import { machinePresetFromRun } from "~/v3/machinePresets.server"; import { getTaskEventStoreTableForRun, type TaskEventStoreTable } from "~/v3/taskEventStore.server"; import { isFailedRunStatus, isFinalRunStatus } from "~/v3/taskStatus"; import { BasePresenter } from "./basePresenter.server"; import { WaitpointPresenter } from "./WaitpointPresenter.server"; +import { engine } from "~/v3/runEngine.server"; +import { resolveEventRepositoryForStore } from "~/v3/eventRepository/index.server"; +import { IEventRepository, SpanDetail } from "~/v3/eventRepository/eventRepository.types"; +import { safeJsonParse } from "~/utils/json"; type Result = Awaited>; export type Span = NonNullable["span"]>; export type SpanRun = NonNullable["run"]>; +type FindRunResult = NonNullable< + Awaited["findRun"]>> +>; +type GetSpanResult = SpanDetail; export class SpanPresenter extends BasePresenter { public async call({ + userId, projectSlug, spanId, runFriendlyId, }: { + userId: string; projectSlug: string; spanId: string; runFriendlyId: string; @@ -31,6 +44,13 @@ export class SpanPresenter extends BasePresenter { const project = await this._replica.project.findFirst({ where: { slug: projectSlug, + organization: { + members: { + some: { + userId, + }, + }, + }, }, }); @@ -49,6 +69,7 @@ export class SpanPresenter extends BasePresenter { }, where: { friendlyId: runFriendlyId, + projectId: project.id, }, }); @@ -58,14 +79,18 @@ export class SpanPresenter extends BasePresenter { const { traceId } = parentRun; + const eventRepository = resolveEventRepositoryForStore(parentRun.taskEventStore); + const eventStore = getTaskEventStoreTableForRun(parentRun); - const run = await this.#getRun({ + const run = await this.getRun({ eventStore, traceId, + eventRepository, spanId, createdAt: parentRun.createdAt, completedAt: parentRun.completedAt, + environmentId: parentRun.runtimeEnvironmentId, }); if (run) { return { @@ -74,15 +99,15 @@ export class SpanPresenter extends BasePresenter { }; } - //get the run const span = await this.#getSpan({ eventStore, - traceId, spanId, + traceId, environmentId: parentRun.runtimeEnvironmentId, projectId: parentRun.projectId, createdAt: parentRun.createdAt, completedAt: parentRun.completedAt, + eventRepository, }); if (!span) { @@ -95,136 +120,33 @@ export class SpanPresenter extends BasePresenter { }; } - async #getRun({ + async getRun({ eventStore, + environmentId, traceId, + eventRepository, spanId, createdAt, completedAt, }: { eventStore: TaskEventStoreTable; + environmentId: string; traceId: string; + eventRepository: IEventRepository; spanId: string; createdAt: Date; completedAt: Date | null; }) { - const span = await eventRepository.getSpan( + const originalRunId = await eventRepository.getSpanOriginalRunId( eventStore, + environmentId, spanId, traceId, createdAt, completedAt ?? undefined ); - if (!span) { - return; - } - - const run = await this._replica.taskRun.findFirst({ - select: { - id: true, - spanId: true, - traceId: true, - //metadata - number: true, - taskIdentifier: true, - friendlyId: true, - isTest: true, - maxDurationInSeconds: true, - taskEventStore: true, - tags: { - select: { - name: true, - }, - }, - machinePreset: true, - lockedToVersion: { - select: { - version: true, - sdkVersion: true, - }, - }, - engine: true, - workerQueue: true, - error: true, - output: true, - outputType: true, - //status + duration - status: true, - statusReason: true, - startedAt: true, - executedAt: true, - createdAt: true, - updatedAt: true, - queuedAt: true, - completedAt: true, - logsDeletedAt: true, - //idempotency - idempotencyKey: true, - idempotencyKeyExpiresAt: true, - //delayed - delayUntil: true, - //ttl - ttl: true, - expiredAt: true, - //queue - queue: true, - concurrencyKey: true, - //schedule - scheduleId: true, - //usage - baseCostInCents: true, - costInCents: true, - usageDurationMs: true, - //env - runtimeEnvironment: { - select: { id: true, slug: true, type: true }, - }, - payload: true, - payloadType: true, - metadata: true, - metadataType: true, - maxAttempts: true, - project: { - include: { - organization: true, - }, - }, - lockedBy: { - select: { - filePath: true, - }, - }, - //relationships - rootTaskRun: { - select: { - taskIdentifier: true, - friendlyId: true, - spanId: true, - createdAt: true, - }, - }, - parentTaskRun: { - select: { - taskIdentifier: true, - friendlyId: true, - spanId: true, - }, - }, - batch: { - select: { - friendlyId: true, - }, - }, - }, - where: span.originalRun - ? { - friendlyId: span.originalRun, - } - : { - spanId, - }, - }); + const run = await this.findRun({ originalRunId, spanId, environmentId }); if (!run) { return; @@ -266,48 +188,27 @@ export class SpanPresenter extends BasePresenter { }) : undefined; - const context = { - task: { - id: run.taskIdentifier, - filePath: run.lockedBy?.filePath, - }, - run: { - id: run.friendlyId, - createdAt: run.createdAt, - tags: run.tags.map((tag) => tag.name), - isTest: run.isTest, - idempotencyKey: run.idempotencyKey ?? undefined, - startedAt: run.startedAt ?? run.createdAt, - durationMs: run.usageDurationMs, - costInCents: run.costInCents, - baseCostInCents: run.baseCostInCents, - maxAttempts: run.maxAttempts ?? undefined, - version: run.lockedToVersion?.version, - maxDuration: run.maxDurationInSeconds ?? undefined, - }, - queue: { - name: run.queue, - }, - environment: { - id: run.runtimeEnvironment.id, - slug: run.runtimeEnvironment.slug, - type: run.runtimeEnvironment.type, - }, - organization: { - id: run.project.organization.id, - slug: run.project.organization.slug, - name: run.project.organization.title, - }, - project: { - id: run.project.id, - ref: run.project.externalRef, - slug: run.project.slug, - name: run.project.name, - }, - machine: run.machinePreset - ? machinePresetFromName(run.machinePreset as MachinePresetName) - : undefined, - }; + const machine = run.machinePreset ? machinePresetFromRun(run) : undefined; + + const context = await this.#getTaskRunContext({ run, machine: machine ?? undefined }); + + const externalTraceId = this.#getExternalTraceId(run.traceContext); + + let region: { name: string; location: string | null } | null = null; + + if (run.runtimeEnvironment.type !== "DEVELOPMENT" && run.engine !== "V1") { + const workerGroup = await this._replica.workerInstanceGroup.findFirst({ + select: { + name: true, + location: true, + }, + where: { + masterQueue: run.workerQueue, + }, + }); + + region = workerGroup ?? null; + } return { id: run.id, @@ -326,17 +227,21 @@ export class SpanPresenter extends BasePresenter { taskIdentifier: run.taskIdentifier, version: run.lockedToVersion?.version, sdkVersion: run.lockedToVersion?.sdkVersion, + runtime: run.lockedToVersion?.runtime, + runtimeVersion: run.lockedToVersion?.runtimeVersion, isTest: run.isTest, + replayedFromTaskRunFriendlyId: run.replayedFromTaskRunFriendlyId, environmentId: run.runtimeEnvironment.id, idempotencyKey: run.idempotencyKey, idempotencyKeyExpiresAt: run.idempotencyKeyExpiresAt, + debounce: run.debounce as { key: string; delay: string; createdAt: Date } | null, schedule: await this.resolveSchedule(run.scheduleId ?? undefined), queue: { name: run.queue, isCustomQueue: !run.queue.startsWith("task/"), concurrencyKey: run.concurrencyKey, }, - tags: run.tags.map((tag) => tag.name), + tags: run.runTags, baseCostInCents: run.baseCostInCents, costInCents: run.costInCents, totalCostInCents: run.costInCents + run.baseCostInCents, @@ -363,9 +268,14 @@ export class SpanPresenter extends BasePresenter { maxDurationInSeconds: getMaxDuration(run.maxDurationInSeconds), batch: run.batch ? { friendlyId: run.batch.friendlyId } : undefined, engine: run.engine, + region, workerQueue: run.workerQueue, + traceId: run.traceId, spanId: run.spanId, - isCached: !!span.originalRun, + isCached: !!originalRunId, + machinePreset: machine?.name, + taskEventStore: run.taskEventStore, + externalTraceId, }; } @@ -398,8 +308,143 @@ export class SpanPresenter extends BasePresenter { }; } + async findRun({ + originalRunId, + spanId, + environmentId, + }: { + originalRunId?: string; + spanId: string; + environmentId: string; + }) { + const run = await this._replica.taskRun.findFirst({ + select: { + id: true, + spanId: true, + traceId: true, + traceContext: true, + //metadata + number: true, + taskIdentifier: true, + friendlyId: true, + isTest: true, + maxDurationInSeconds: true, + taskEventStore: true, + runTags: true, + machinePreset: true, + lockedToVersion: { + select: { + version: true, + sdkVersion: true, + runtime: true, + runtimeVersion: true, + }, + }, + engine: true, + workerQueue: true, + error: true, + output: true, + outputType: true, + //status + duration + status: true, + statusReason: true, + startedAt: true, + executedAt: true, + createdAt: true, + updatedAt: true, + queuedAt: true, + completedAt: true, + logsDeletedAt: true, + //idempotency + idempotencyKey: true, + idempotencyKeyExpiresAt: true, + //debounce + debounce: true, + //delayed + delayUntil: true, + //ttl + ttl: true, + expiredAt: true, + //queue + queue: true, + concurrencyKey: true, + //schedule + scheduleId: true, + //usage + baseCostInCents: true, + costInCents: true, + usageDurationMs: true, + //env + runtimeEnvironment: { + select: { id: true, slug: true, type: true }, + }, + payload: true, + payloadType: true, + metadata: true, + metadataType: true, + maxAttempts: true, + project: { + include: { + organization: true, + }, + }, + lockedBy: { + select: { + filePath: true, + }, + }, + //relationships + rootTaskRun: { + select: { + taskIdentifier: true, + friendlyId: true, + spanId: true, + createdAt: true, + }, + }, + parentTaskRun: { + select: { + taskIdentifier: true, + friendlyId: true, + spanId: true, + }, + }, + batch: { + select: { + friendlyId: true, + }, + }, + replayedFromTaskRunFriendlyId: true, + attempts: { + take: 1, + orderBy: { + createdAt: "desc", + }, + select: { + number: true, + status: true, + createdAt: true, + friendlyId: true, + }, + }, + }, + where: originalRunId + ? { + friendlyId: originalRunId, + runtimeEnvironmentId: environmentId, + } + : { + spanId, + runtimeEnvironmentId: environmentId, + }, + }); + + return run; + } + async #getSpan({ eventStore, + eventRepository, traceId, spanId, environmentId, @@ -407,6 +452,7 @@ export class SpanPresenter extends BasePresenter { createdAt, completedAt, }: { + eventRepository: IEventRepository; traceId: string; spanId: string; environmentId: string; @@ -417,12 +463,14 @@ export class SpanPresenter extends BasePresenter { }) { const span = await eventRepository.getSpan( eventStore, + environmentId, spanId, traceId, createdAt, completedAt ?? undefined, { includeDebugLogs: true } ); + if (!span) { return; } @@ -434,11 +482,7 @@ export class SpanPresenter extends BasePresenter { spanId: true, createdAt: true, number: true, - lockedToVersion: { - select: { - version: true, - }, - }, + taskVersion: true, }, where: { parentSpanId: spanId, @@ -446,11 +490,32 @@ export class SpanPresenter extends BasePresenter { }); const data = { - ...span, + spanId: span.spanId, + parentId: span.parentId, + message: span.message, + isError: span.isError, + isPartial: span.isPartial, + isCancelled: span.isCancelled, + level: span.level, + startTime: span.startTime, + duration: span.duration, events: span.events, - properties: span.properties ? JSON.stringify(span.properties, null, 2) : undefined, + style: span.style, + properties: + span.properties && + typeof span.properties === "object" && + Object.keys(span.properties).length > 0 + ? JSON.stringify(span.properties, null, 2) + : undefined, + resourceProperties: + span.resourceProperties && + typeof span.resourceProperties === "object" && + Object.keys(span.resourceProperties).length > 0 + ? JSON.stringify(span.resourceProperties, null, 2) + : undefined, + entity: span.entity, + metadata: span.metadata, triggeredRuns, - showActionBar: span.show?.actions === true, }; switch (span.entity.type) { @@ -502,8 +567,144 @@ export class SpanPresenter extends BasePresenter { }, }; } + case "realtime-stream": { + if (!span.entity.id) { + logger.error(`SpanPresenter: No realtime stream id`, { + spanId, + realtimeStreamId: span.entity.id, + }); + return { ...data, entity: null }; + } + + const [runId, streamKey] = span.entity.id.split(":"); + + if (!runId || !streamKey) { + logger.error(`SpanPresenter: Invalid realtime stream id`, { + spanId, + realtimeStreamId: span.entity.id, + }); + return { ...data, entity: null }; + } + + const metadata = span.entity.metadata + ? (safeJsonParse(span.entity.metadata) as Record | undefined) + : undefined; + + return { + ...data, + entity: { + type: "realtime-stream" as const, + object: { + runId, + streamKey, + metadata, + }, + }, + }; + } default: return { ...data, entity: null }; } } + + async #getTaskRunContext({ run, machine }: { run: FindRunResult; machine?: MachinePreset }) { + if (run.engine === "V1") { + return this.#getV3TaskRunContext({ run, machine }); + } else { + return this.#getV4TaskRunContext({ run }); + } + } + + async #getV3TaskRunContext({ + run, + machine, + }: { + run: FindRunResult; + machine?: MachinePreset; + }): Promise { + const attempt = run.attempts[0]; + + const context = { + attempt: attempt + ? { + id: attempt.friendlyId, + number: attempt.number, + status: attempt.status, + startedAt: attempt.createdAt, + } + : { + id: AttemptId.generate().friendlyId, + number: 1, + status: "PENDING" as const, + startedAt: run.updatedAt, + }, + task: { + id: run.taskIdentifier, + filePath: run.lockedBy?.filePath ?? "", + }, + run: { + id: run.friendlyId, + createdAt: run.createdAt, + tags: run.runTags, + isTest: run.isTest, + idempotencyKey: run.idempotencyKey ?? undefined, + startedAt: run.startedAt ?? run.createdAt, + durationMs: run.usageDurationMs, + costInCents: run.costInCents, + baseCostInCents: run.baseCostInCents, + maxAttempts: run.maxAttempts ?? undefined, + version: run.lockedToVersion?.version, + maxDuration: run.maxDurationInSeconds ?? undefined, + }, + queue: { + name: run.queue, + id: run.queue, + }, + environment: { + id: run.runtimeEnvironment.id, + slug: run.runtimeEnvironment.slug, + type: run.runtimeEnvironment.type, + }, + organization: { + id: run.project.organization.id, + slug: run.project.organization.slug, + name: run.project.organization.title, + }, + project: { + id: run.project.id, + ref: run.project.externalRef, + slug: run.project.slug, + name: run.project.name, + }, + machine, + } satisfies V3TaskRunContext; + + return context; + } + + async #getV4TaskRunContext({ run }: { run: FindRunResult }): Promise { + return engine.resolveTaskRunContext(run.id); + } + + #getExternalTraceId(traceContext: unknown) { + if (!traceContext) { + return; + } + + const parsedTraceContext = TriggerTraceContext.safeParse(traceContext); + + if (!parsedTraceContext.success) { + return; + } + + const externalTraceparent = parsedTraceContext.data.external?.traceparent; + + if (!externalTraceparent) { + return; + } + + const parsedTraceparent = parseTraceparent(externalTraceparent); + + return parsedTraceparent?.traceId; + } } diff --git a/apps/webapp/app/presenters/v3/TaskListPresenter.server.ts b/apps/webapp/app/presenters/v3/TaskListPresenter.server.ts index 59313a41c1..f1635f2337 100644 --- a/apps/webapp/app/presenters/v3/TaskListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/TaskListPresenter.server.ts @@ -11,7 +11,6 @@ import { type CurrentRunningStats, type DailyTaskActivity, type EnvironmentMetricsRepository, - PostgrestEnvironmentMetricsRepository, } from "~/services/environmentMetricsRepository.server"; import { singleton } from "~/utils/singleton"; import { findCurrentWorkerFromEnvironment } from "~/v3/models/workerDeployment.server"; @@ -110,13 +109,9 @@ export class TaskListPresenter { export const taskListPresenter = singleton("taskListPresenter", setupTaskListPresenter); function setupTaskListPresenter() { - const environmentMetricsRepository = clickhouseClient - ? new ClickHouseEnvironmentMetricsRepository({ - clickhouse: clickhouseClient, - }) - : new PostgrestEnvironmentMetricsRepository({ - prisma: $replica, - }); + const environmentMetricsRepository = new ClickHouseEnvironmentMetricsRepository({ + clickhouse: clickhouseClient, + }); return new TaskListPresenter(environmentMetricsRepository, $replica); } diff --git a/apps/webapp/app/presenters/v3/TestTaskPresenter.server.ts b/apps/webapp/app/presenters/v3/TestTaskPresenter.server.ts index 6109cd1b17..2817b7c8b8 100644 --- a/apps/webapp/app/presenters/v3/TestTaskPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/TestTaskPresenter.server.ts @@ -1,11 +1,21 @@ +import { ClickHouse } from "@internal/clickhouse"; import { ScheduledTaskPayload, parsePacket, prettyPrintPacket } from "@trigger.dev/core/v3"; -import { type RuntimeEnvironmentType, type TaskRunStatus } from "@trigger.dev/database"; -import { type PrismaClient, prisma, sqlDatabaseSchema } from "~/db.server"; -import { getTimezones } from "~/utils/timezones.server"; import { - type BackgroundWorkerTaskSlim, - findCurrentWorkerDeployment, -} from "~/v3/models/workerDeployment.server"; + type RuntimeEnvironmentType, + type TaskRunStatus, + type TaskRunTemplate, + PrismaClientOrTransaction, +} from "@trigger.dev/database"; +import parse from "parse-duration"; +import { type PrismaClient } from "~/db.server"; +import { RunsRepository } from "~/services/runsRepository/runsRepository.server"; +import { getTimezones } from "~/utils/timezones.server"; +import { findCurrentWorkerDeployment } from "~/v3/models/workerDeployment.server"; +import { queueTypeFromType } from "./QueueRetrievePresenter.server"; + +export type RunTemplate = TaskRunTemplate & { + scheduledTaskPayload?: ScheduledRun["payload"]; +}; type TestTaskOptions = { userId: string; @@ -13,6 +23,8 @@ type TestTaskOptions = { environment: { id: string; type: RuntimeEnvironmentType; + projectId: string; + organizationId: string; }; taskIdentifier: string; }; @@ -24,31 +36,53 @@ type Task = { friendlyId: string; }; -export type TestTask = +type Queue = { + id: string; + name: string; + type: "custom" | "task"; + paused: boolean; +}; + +export type TestTaskResult = | { + foundTask: true; triggerSource: "STANDARD"; + queue?: Queue; task: Task; runs: StandardRun[]; + latestVersions: string[]; + disableVersionSelection: boolean; + allowArbitraryQueues: boolean; + taskRunTemplates: TaskRunTemplate[]; } | { + foundTask: true; triggerSource: "SCHEDULED"; + queue?: Queue; task: Task; possibleTimezones: string[]; runs: ScheduledRun[]; - }; - -export type TestTaskResult = - | { - foundTask: true; - task: TestTask; + latestVersions: string[]; + disableVersionSelection: boolean; + allowArbitraryQueues: boolean; + taskRunTemplates: TaskRunTemplate[]; } | { foundTask: false; }; +export type StandardTaskResult = Extract< + TestTaskResult, + { foundTask: true; triggerSource: "STANDARD" } +>; +export type ScheduledTaskResult = Extract< + TestTaskResult, + { foundTask: true; triggerSource: "SCHEDULED" } +>; + type RawRun = { id: string; - number: BigInt; + queue: string; friendlyId: string; createdAt: Date; status: TaskRunStatus; @@ -57,28 +91,35 @@ type RawRun = { runtimeEnvironmentId: string; seedMetadata?: string; seedMetadataType?: string; + concurrencyKey?: string; + maxAttempts?: number; + maxDurationInSeconds?: number; + machinePreset?: string; + ttl?: string; + idempotencyKey?: string; + runTags: string[]; }; -export type StandardRun = Omit & { - number: number; +export type StandardRun = Omit & { + metadata?: string; + ttlSeconds?: number; }; -export type ScheduledRun = Omit & { - number: number; +export type ScheduledRun = Omit & { payload: { timestamp: Date; lastTimestamp?: Date; externalId?: string; timezone: string; }; + ttlSeconds?: number; }; export class TestTaskPresenter { - #prismaClient: PrismaClient; - - constructor(prismaClient: PrismaClient = prisma) { - this.#prismaClient = prismaClient; - } + constructor( + private readonly replica: PrismaClientOrTransaction, + private readonly clickhouse: ClickHouse + ) {} public async call({ userId, @@ -86,23 +127,20 @@ export class TestTaskPresenter { environment, taskIdentifier, }: TestTaskOptions): Promise { - let task: BackgroundWorkerTaskSlim | null = null; - if (environment.type !== "DEVELOPMENT") { - const deployment = await findCurrentWorkerDeployment({ environmentId: environment.id }); - if (deployment) { - task = deployment.worker?.tasks.find((t) => t.slug === taskIdentifier) ?? null; - } - } else { - task = await this.#prismaClient.backgroundWorkerTask.findFirst({ - where: { - slug: taskIdentifier, - runtimeEnvironmentId: environment.id, - }, - orderBy: { - createdAt: "desc", - }, - }); - } + const task = + environment.type !== "DEVELOPMENT" + ? ( + await findCurrentWorkerDeployment({ environmentId: environment.id }) + )?.worker?.tasks.find((t) => t.slug === taskIdentifier) + : await this.replica.backgroundWorkerTask.findFirst({ + where: { + slug: taskIdentifier, + runtimeEnvironmentId: environment.id, + }, + orderBy: { + createdAt: "desc", + }, + }); if (!task) { return { @@ -110,41 +148,101 @@ export class TestTaskPresenter { }; } - const latestRuns = await this.#prismaClient.$queryRaw` - WITH taskruns AS ( - SELECT - tr.* - FROM - ${sqlDatabaseSchema}."TaskRun" as tr - JOIN - ${sqlDatabaseSchema}."BackgroundWorkerTask" as bwt - ON - tr."taskIdentifier" = bwt.slug - WHERE - bwt."friendlyId" = ${task.friendlyId} AND - tr."runtimeEnvironmentId" = ${environment.id} - ORDER BY - tr."createdAt" DESC - LIMIT 10 - ) - SELECT - taskr.id, - taskr.number, - taskr."friendlyId", - taskr."taskIdentifier", - taskr."createdAt", - taskr.status, - taskr.payload, - taskr."payloadType", - taskr."seedMetadata", - taskr."seedMetadataType", - taskr."runtimeEnvironmentId" - FROM - taskruns AS taskr - WHERE - taskr."payloadType" = 'application/json' OR taskr."payloadType" = 'application/super+json' - ORDER BY - taskr."createdAt" DESC;`; + const taskQueue = task.queueId + ? await this.replica.taskQueue.findFirst({ + where: { + runtimeEnvironmentId: environment.id, + id: task.queueId, + }, + select: { + friendlyId: true, + name: true, + type: true, + paused: true, + }, + }) + : undefined; + + const backgroundWorkers = await this.replica.backgroundWorker.findMany({ + where: { + runtimeEnvironmentId: environment.id, + }, + select: { + version: true, + engine: true, + }, + orderBy: { + createdAt: "desc", + }, + take: 20, // last 20 versions should suffice + }); + + const taskRunTemplates = await this.replica.taskRunTemplate.findMany({ + where: { + projectId, + taskSlug: task.slug, + triggerSource: task.triggerSource, + }, + orderBy: { + createdAt: "desc", + }, + take: 50, + }); + + const latestVersions = backgroundWorkers.map((v) => v.version); + + const disableVersionSelection = environment.type === "DEVELOPMENT"; + const allowArbitraryQueues = backgroundWorkers[0]?.engine === "V1"; + + // Get the latest runs, for the payloads + const runsRepository = new RunsRepository({ + clickhouse: this.clickhouse, + prisma: this.replica as PrismaClient, + }); + + const runIds = await runsRepository.listRunIds({ + organizationId: environment.organizationId, + environmentId: environment.id, + projectId: environment.projectId, + tasks: [task.slug], + period: "30d", + page: { + size: 10, + }, + }); + + const latestRuns = await this.replica.taskRun.findMany({ + select: { + id: true, + queue: true, + friendlyId: true, + taskIdentifier: true, + createdAt: true, + status: true, + payload: true, + payloadType: true, + seedMetadata: true, + seedMetadataType: true, + runtimeEnvironmentId: true, + concurrencyKey: true, + maxAttempts: true, + maxDurationInSeconds: true, + machinePreset: true, + ttl: true, + runTags: true, + }, + where: { + id: { + in: runIds, + }, + payloadType: { + in: ["application/json", "application/super+json"], + }, + }, + orderBy: { + createdAt: "desc", + }, + }); const taskWithEnvironment = { id: task.id, @@ -157,61 +255,114 @@ export class TestTaskPresenter { case "STANDARD": return { foundTask: true, - task: { - triggerSource: "STANDARD", - task: taskWithEnvironment, - runs: await Promise.all( - latestRuns.map(async (r) => { - const number = Number(r.number); - - return { + triggerSource: "STANDARD", + queue: taskQueue + ? { + id: taskQueue.friendlyId, + name: taskQueue.name.replace(/^task\//, ""), + type: queueTypeFromType(taskQueue.type), + paused: taskQueue.paused, + } + : undefined, + task: taskWithEnvironment, + runs: await Promise.all( + latestRuns.map( + async (r) => + ({ ...r, - number, + seedMetadata: r.seedMetadata ?? undefined, + seedMetadataType: r.seedMetadataType ?? undefined, + concurrencyKey: r.concurrencyKey ?? undefined, + maxAttempts: r.maxAttempts ?? undefined, + maxDurationInSeconds: r.maxDurationInSeconds ?? undefined, + machinePreset: r.machinePreset ?? undefined, payload: await prettyPrintPacket(r.payload, r.payloadType), metadata: r.seedMetadata ? await prettyPrintPacket(r.seedMetadata, r.seedMetadataType) : undefined, - }; - }) - ), - }, + ttlSeconds: r.ttl ? parse(r.ttl, "s") ?? undefined : undefined, + } satisfies StandardRun) + ) + ), + latestVersions, + disableVersionSelection, + allowArbitraryQueues, + taskRunTemplates: await Promise.all( + taskRunTemplates.map(async (t) => ({ + ...t, + payload: await prettyPrintPacket(t.payload, t.payloadType), + metadata: t.metadata ? await prettyPrintPacket(t.metadata, t.metadataType) : null, + })) + ), }; - case "SCHEDULED": + case "SCHEDULED": { const possibleTimezones = getTimezones(); return { foundTask: true, - task: { - triggerSource: "SCHEDULED", - task: taskWithEnvironment, - possibleTimezones, - runs: ( - await Promise.all( - latestRuns.map(async (r) => { - const number = Number(r.number); - - const payload = await getScheduleTaskRunPayload(r); - - if (payload.success) { - return { - ...r, - number, - payload: payload.data, - }; - } - }) - ) - ).filter(Boolean), - }, + triggerSource: "SCHEDULED", + queue: taskQueue + ? { + id: taskQueue.friendlyId, + name: taskQueue.name.replace(/^task\//, ""), + type: queueTypeFromType(taskQueue.type), + paused: taskQueue.paused, + } + : undefined, + task: taskWithEnvironment, + possibleTimezones, + runs: ( + await Promise.all( + latestRuns.map(async (r) => { + const payload = await getScheduleTaskRunPayload(r.payload, r.payloadType); + + if (payload.success) { + return { + ...r, + seedMetadata: r.seedMetadata ?? undefined, + seedMetadataType: r.seedMetadataType ?? undefined, + concurrencyKey: r.concurrencyKey ?? undefined, + maxAttempts: r.maxAttempts ?? undefined, + maxDurationInSeconds: r.maxDurationInSeconds ?? undefined, + machinePreset: r.machinePreset ?? undefined, + payload: payload.data, + ttlSeconds: r.ttl ? parse(r.ttl, "s") ?? undefined : undefined, + } satisfies ScheduledRun; + } + }) + ) + ).filter(Boolean), + latestVersions, + disableVersionSelection, + allowArbitraryQueues, + taskRunTemplates: await Promise.all( + taskRunTemplates.map(async (t) => { + const scheduledTaskPayload = t.payload + ? await getScheduleTaskRunPayload(t.payload, t.payloadType) + : undefined; + + return { + ...t, + scheduledTaskPayload: + scheduledTaskPayload && scheduledTaskPayload.success + ? scheduledTaskPayload.data + : undefined, + }; + }) + ), }; + } + default: { + return task.triggerSource satisfies never; + } } } } -async function getScheduleTaskRunPayload(run: RawRun) { - const payload = await parsePacket({ data: run.payload, dataType: run.payloadType }); - if (!payload.timezone) { - payload.timezone = "UTC"; +async function getScheduleTaskRunPayload(payload: string, payloadType: string) { + const packet = await parsePacket({ data: payload, dataType: payloadType }); + if (!packet.timezone) { + packet.timezone = "UTC"; } - const parsed = ScheduledTaskPayload.safeParse(payload); + const parsed = ScheduledTaskPayload.safeParse(packet); return parsed; } diff --git a/apps/webapp/app/presenters/v3/UsagePresenter.server.ts b/apps/webapp/app/presenters/v3/UsagePresenter.server.ts index d599c78481..2fac95617a 100644 --- a/apps/webapp/app/presenters/v3/UsagePresenter.server.ts +++ b/apps/webapp/app/presenters/v3/UsagePresenter.server.ts @@ -124,60 +124,24 @@ async function getTaskUsageByOrganization( endOfMonth: Date, replica: PrismaClientOrTransaction ) { - if (clickhouseClient) { - const [queryError, tasks] = await clickhouseClient.taskRuns.getTaskUsageByOrganization({ - startTime: startOfMonth.getTime(), - endTime: endOfMonth.getTime(), - organizationId, - }); - - if (queryError) { - throw queryError; - } - - return tasks - .map((task) => ({ - taskIdentifier: task.task_identifier, - runCount: Number(task.run_count), - averageDuration: Number(task.average_duration), - averageCost: Number(task.average_cost) + env.CENTS_PER_RUN / 100, - totalDuration: Number(task.total_duration), - totalCost: Number(task.total_cost) + Number(task.total_base_cost), - })) - .sort((a, b) => b.totalCost - a.totalCost); - } else { - return replica.$queryRaw` - SELECT - tr."taskIdentifier", - COUNT(*) AS "runCount", - AVG(tr."usageDurationMs") AS "averageDuration", - SUM(tr."usageDurationMs") AS "totalDuration", - AVG(tr."costInCents") / 100.0 AS "averageCost", - SUM(tr."costInCents") / 100.0 AS "totalCost", - SUM(tr."baseCostInCents") / 100.0 AS "totalBaseCost" - FROM - ${sqlDatabaseSchema}."TaskRun" tr - JOIN ${sqlDatabaseSchema}."Project" pr ON pr.id = tr."projectId" - JOIN ${sqlDatabaseSchema}."Organization" org ON org.id = pr."organizationId" - JOIN ${sqlDatabaseSchema}."RuntimeEnvironment" env ON env."id" = tr."runtimeEnvironmentId" - WHERE - env.type <> 'DEVELOPMENT' - AND tr."createdAt" > ${startOfMonth} - AND tr."createdAt" < ${endOfMonth} - AND org.id = ${organizationId} - GROUP BY - tr."taskIdentifier"; - `.then((data) => { - return data - .map((item) => ({ - taskIdentifier: item.taskIdentifier, - runCount: Number(item.runCount), - averageDuration: Number(item.averageDuration), - averageCost: Number(item.averageCost) + env.CENTS_PER_RUN / 100, - totalDuration: Number(item.totalDuration), - totalCost: Number(item.totalCost) + Number(item.totalBaseCost), - })) - .sort((a, b) => b.totalCost - a.totalCost); - }); + const [queryError, tasks] = await clickhouseClient.taskRuns.getTaskUsageByOrganization({ + startTime: startOfMonth.getTime(), + endTime: endOfMonth.getTime(), + organizationId, + }); + + if (queryError) { + throw queryError; } + + return tasks + .map((task) => ({ + taskIdentifier: task.task_identifier, + runCount: Number(task.run_count), + averageDuration: Number(task.average_duration), + averageCost: Number(task.average_cost) + env.CENTS_PER_RUN / 100, + totalDuration: Number(task.total_duration), + totalCost: Number(task.total_cost) + Number(task.total_base_cost), + })) + .sort((a, b) => b.totalCost - a.totalCost); } diff --git a/apps/webapp/app/presenters/v3/VersionListPresenter.server.ts b/apps/webapp/app/presenters/v3/VersionListPresenter.server.ts new file mode 100644 index 0000000000..f541d884f8 --- /dev/null +++ b/apps/webapp/app/presenters/v3/VersionListPresenter.server.ts @@ -0,0 +1,74 @@ +import { CURRENT_DEPLOYMENT_LABEL } from "@trigger.dev/core/v3/isomorphic"; +import { type RuntimeEnvironment } from "@trigger.dev/database"; +import { BasePresenter } from "./basePresenter.server"; + +const DEFAULT_ITEMS_PER_PAGE = 25; +const MAX_ITEMS_PER_PAGE = 100; + +export class VersionListPresenter extends BasePresenter { + private readonly perPage: number; + + constructor(perPage: number = DEFAULT_ITEMS_PER_PAGE) { + super(); + this.perPage = Math.min(perPage, MAX_ITEMS_PER_PAGE); + } + + public async call({ + environment, + query, + }: { + environment: Pick; + query?: string; + }) { + const hasFilters = query !== undefined && query.length > 0; + + const versions = await this._replica.backgroundWorker.findMany({ + select: { + version: true, + }, + where: { + runtimeEnvironmentId: environment.id, + version: query + ? { + contains: query, + } + : undefined, + }, + orderBy: { + createdAt: "desc", + }, + take: this.perPage, + }); + + let currentVersion: string | undefined; + + if (environment.type !== "DEVELOPMENT") { + const currentWorker = await this._replica.workerDeploymentPromotion.findFirst({ + select: { + deployment: { + select: { + version: true, + }, + }, + }, + where: { + environmentId: environment.id, + label: CURRENT_DEPLOYMENT_LABEL, + }, + }); + + if (currentWorker) { + currentVersion = currentWorker.deployment.version; + } + } + + return { + success: true as const, + versions: versions.map((version) => ({ + version: version.version, + isCurrent: version.version === currentVersion, + })), + hasFilters, + }; + } +} diff --git a/apps/webapp/app/presenters/v3/ViewSchedulePresenter.server.ts b/apps/webapp/app/presenters/v3/ViewSchedulePresenter.server.ts index 08006490a9..f0e955fd04 100644 --- a/apps/webapp/app/presenters/v3/ViewSchedulePresenter.server.ts +++ b/apps/webapp/app/presenters/v3/ViewSchedulePresenter.server.ts @@ -1,8 +1,10 @@ import { ScheduleObject } from "@trigger.dev/core/v3"; import { PrismaClient, prisma } from "~/db.server"; import { displayableEnvironment } from "~/models/runtimeEnvironment.server"; +import { clickhouseClient } from "~/services/clickhouseInstance.server"; import { nextScheduledTimestamps } from "~/v3/utils/calculateNextSchedule.server"; -import { RunListPresenter } from "./RunListPresenter.server"; +import { NextRunListPresenter } from "./NextRunListPresenter.server"; +import { scheduleWhereClause } from "~/models/schedules.server"; type ViewScheduleOptions = { userId?: string; @@ -34,6 +36,7 @@ export class ViewSchedulePresenter { project: { select: { id: true, + organizationId: true, }, }, instances: { @@ -61,10 +64,7 @@ export class ViewSchedulePresenter { }, active: true, }, - where: { - friendlyId, - projectId, - }, + where: scheduleWhereClause(projectId, friendlyId), }); if (!schedule) { @@ -75,12 +75,12 @@ export class ViewSchedulePresenter { ? nextScheduledTimestamps(schedule.generatorExpression, schedule.timezone, new Date(), 5) : []; - const runPresenter = new RunListPresenter(this.#prismaClient); - - const { runs } = await runPresenter.call(environmentId, { + const runPresenter = new NextRunListPresenter(this.#prismaClient, clickhouseClient); + const { runs } = await runPresenter.call(schedule.project.organizationId, environmentId, { projectId: schedule.project.id, scheduleId: schedule.id, pageSize: 5, + period: "31d", }); return { diff --git a/apps/webapp/app/presenters/v3/WaitpointPresenter.server.ts b/apps/webapp/app/presenters/v3/WaitpointPresenter.server.ts index d61a68a00e..9abcdf3221 100644 --- a/apps/webapp/app/presenters/v3/WaitpointPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/WaitpointPresenter.server.ts @@ -1,8 +1,9 @@ import { isWaitpointOutputTimeout, prettyPrintPacket } from "@trigger.dev/core/v3"; +import { clickhouseClient } from "~/services/clickhouseInstance.server"; import { generateHttpCallbackUrl } from "~/services/httpCallback.server"; import { logger } from "~/services/logger.server"; import { BasePresenter } from "./basePresenter.server"; -import { type RunListItem, RunListPresenter } from "./RunListPresenter.server"; +import { NextRunListPresenter, type NextRunListItem } from "./NextRunListPresenter.server"; import { waitpointStatusToApiStatus } from "./WaitpointListPresenter.server"; export type WaitpointDetail = NonNullable>>; @@ -47,6 +48,7 @@ export class WaitpointPresenter extends BasePresenter { environment: { select: { apiKey: true, + organizationId: true, }, }, }, @@ -74,15 +76,21 @@ export class WaitpointPresenter extends BasePresenter { } const connectedRunIds = waitpoint.connectedRuns.map((run) => run.friendlyId); - const connectedRuns: RunListItem[] = []; + const connectedRuns: NextRunListItem[] = []; if (connectedRunIds.length > 0) { - const runPresenter = new RunListPresenter(); - const { runs } = await runPresenter.call(environmentId, { - projectId: projectId, - runIds: connectedRunIds, - pageSize: 5, - }); + const runPresenter = new NextRunListPresenter(this._prisma, clickhouseClient); + const { runs } = await runPresenter.call( + waitpoint.environment.organizationId, + environmentId, + { + projectId: projectId, + runId: connectedRunIds, + pageSize: 5, + period: "31d", + } + ); + connectedRuns.push(...runs); } diff --git a/apps/webapp/app/root.tsx b/apps/webapp/app/root.tsx index c6f0929eb8..fb5fef9c84 100644 --- a/apps/webapp/app/root.tsx +++ b/apps/webapp/app/root.tsx @@ -20,10 +20,17 @@ export const links: LinksFunction = () => { return [{ rel: "stylesheet", href: tailwindStylesheetUrl }]; }; +export const headers = () => ({ + "Referrer-Policy": "strict-origin-when-cross-origin", + "X-Content-Type-Options": "nosniff", + "Permissions-Policy": + "geolocation=(), microphone=(), camera=(), accelerometer=(), gyroscope=(), magnetometer=(), payment=(), usb=()", +}); + export const meta: MetaFunction = ({ data }) => { const typedData = data as UseDataFunctionReturn; return [ - { title: `Trigger.dev${appEnvTitleTag(typedData.appEnv)}` }, + { title: typedData?.appEnv ? `Trigger.dev${appEnvTitleTag(typedData.appEnv)}` : "Trigger.dev" }, { name: "viewport", content: "width=1024, initial-scale=1", @@ -56,6 +63,7 @@ export const loader = async ({ request }: LoaderFunctionArgs) => { features, appEnv: env.APP_ENV, appOrigin: env.APP_ORIGIN, + triggerCliTag: env.TRIGGER_CLI_TAG, kapa, }, { headers: { "Set-Cookie": await commitSession(session) } } @@ -83,11 +91,13 @@ export function ErrorBoundary() { - - - - - + + + + + + + diff --git a/apps/webapp/app/routes/_app.github.callback/route.tsx b/apps/webapp/app/routes/_app.github.callback/route.tsx new file mode 100644 index 0000000000..cd67f27f54 --- /dev/null +++ b/apps/webapp/app/routes/_app.github.callback/route.tsx @@ -0,0 +1,121 @@ +import { type LoaderFunctionArgs } from "@remix-run/node"; +import { z } from "zod"; +import { validateGitHubAppInstallSession } from "~/services/gitHubSession.server"; +import { linkGitHubAppInstallation, updateGitHubAppInstallation } from "~/services/gitHub.server"; +import { logger } from "~/services/logger.server"; +import { redirectWithErrorMessage, redirectWithSuccessMessage } from "~/models/message.server"; +import { tryCatch } from "@trigger.dev/core"; +import { $replica } from "~/db.server"; +import { requireUser } from "~/services/session.server"; +import { sanitizeRedirectPath } from "~/utils"; + +const QuerySchema = z.discriminatedUnion("setup_action", [ + z.object({ + setup_action: z.literal("install"), + installation_id: z.coerce.number(), + state: z.string(), + }), + z.object({ + setup_action: z.literal("update"), + installation_id: z.coerce.number(), + state: z.string(), + }), + z.object({ + setup_action: z.literal("request"), + state: z.string(), + }), +]); + +export async function loader({ request }: LoaderFunctionArgs) { + const url = new URL(request.url); + const queryParams = Object.fromEntries(url.searchParams); + const cookieHeader = request.headers.get("Cookie"); + + const result = QuerySchema.safeParse(queryParams); + + if (!result.success) { + logger.warn("GitHub App callback with invalid params", { + queryParams, + }); + return redirectWithErrorMessage("/", request, "Failed to install GitHub app"); + } + + const callbackData = result.data; + + const sessionResult = await validateGitHubAppInstallSession(cookieHeader, callbackData.state); + + if (!sessionResult.valid) { + logger.error("GitHub App callback with invalid session", { + callbackData, + error: sessionResult.error, + }); + + return redirectWithErrorMessage("/", request, "Failed to install GitHub app"); + } + + const { organizationId, redirectTo: unsafeRedirectTo } = sessionResult; + const redirectTo = sanitizeRedirectPath(unsafeRedirectTo); + + const user = await requireUser(request); + const org = await $replica.organization.findFirst({ + where: { id: organizationId, members: { some: { userId: user.id } }, deletedAt: null }, + orderBy: { createdAt: "desc" }, + select: { + id: true, + }, + }); + + if (!org) { + // the secure cookie approach should already protect against this + // just an additional check + logger.error("GitHub app installation attempt on unauthenticated org", { + userId: user.id, + organizationId, + }); + return redirectWithErrorMessage(redirectTo, request, "Failed to install GitHub app"); + } + + switch (callbackData.setup_action) { + case "install": { + const [error] = await tryCatch( + linkGitHubAppInstallation(callbackData.installation_id, organizationId) + ); + + if (error) { + logger.error("Failed to link GitHub App installation", { + error, + }); + return redirectWithErrorMessage(redirectTo, request, "Failed to install GitHub app"); + } + + return redirectWithSuccessMessage(redirectTo, request, "GitHub App installed successfully"); + } + + case "update": { + const [error] = await tryCatch(updateGitHubAppInstallation(callbackData.installation_id)); + + if (error) { + logger.error("Failed to update GitHub App installation", { + error, + }); + return redirectWithErrorMessage(redirectTo, request, "Failed to update GitHub App"); + } + + return redirectWithSuccessMessage(redirectTo, request, "GitHub App updated successfully"); + } + + case "request": { + // This happens when a non-admin user requests installation + // The installation_id won't be available until an admin approves + logger.info("GitHub App installation requested, awaiting approval", { + callbackData, + }); + + return redirectWithSuccessMessage(redirectTo, request, "GitHub App installation requested"); + } + + default: + callbackData satisfies never; + return redirectWithErrorMessage(redirectTo, request, "Failed to install GitHub app"); + } +} diff --git a/apps/webapp/app/routes/_app.github.install/route.tsx b/apps/webapp/app/routes/_app.github.install/route.tsx new file mode 100644 index 0000000000..42d68e5bec --- /dev/null +++ b/apps/webapp/app/routes/_app.github.install/route.tsx @@ -0,0 +1,52 @@ +import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { redirect } from "remix-typedjson"; +import { z } from "zod"; +import { $replica } from "~/db.server"; +import { createGitHubAppInstallSession } from "~/services/gitHubSession.server"; +import { requireUser } from "~/services/session.server"; +import { newOrganizationPath } from "~/utils/pathBuilder"; +import { logger } from "~/services/logger.server"; +import { sanitizeRedirectPath } from "~/utils"; + +const QuerySchema = z.object({ + org_slug: z.string(), + redirect_to: z.string().refine((value) => value === sanitizeRedirectPath(value), { + message: "Invalid redirect path", + }), +}); + +export const loader = async ({ request }: LoaderFunctionArgs) => { + const searchParams = new URL(request.url).searchParams; + const parsed = QuerySchema.safeParse(Object.fromEntries(searchParams)); + + if (!parsed.success) { + logger.warn("GitHub App installation redirect with invalid params", { + searchParams, + error: parsed.error, + }); + throw redirect("/"); + } + + const { org_slug, redirect_to } = parsed.data; + const user = await requireUser(request); + + const org = await $replica.organization.findFirst({ + where: { slug: org_slug, members: { some: { userId: user.id } }, deletedAt: null }, + orderBy: { createdAt: "desc" }, + select: { + id: true, + }, + }); + + if (!org) { + throw redirect(newOrganizationPath()); + } + + const { url, cookieHeader } = await createGitHubAppInstallSession(org.id, redirect_to); + + return redirect(url, { + headers: { + "Set-Cookie": cookieHeader, + }, + }); +}; diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.invite/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.invite/route.tsx index f3bf509b0f..8f82153052 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.invite/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.invite/route.tsx @@ -143,7 +143,7 @@ export default function Page() { const emailFields = useFieldList(form.ref, emails); return ( - +
} @@ -203,7 +203,7 @@ export default function Page() { } cancelButton={ - + Cancel } diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam._index/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam._index/route.tsx index 6152f8579c..5d6a947a42 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam._index/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam._index/route.tsx @@ -3,6 +3,7 @@ import { BookOpenIcon, ChevronDownIcon, ChevronUpIcon, + ExclamationTriangleIcon, LightBulbIcon, MagnifyingGlassIcon, UserPlusIcon, @@ -13,7 +14,7 @@ import { Link, useRevalidator, useSubmit } from "@remix-run/react"; import { type ActionFunctionArgs, type LoaderFunctionArgs } from "@remix-run/server-runtime"; import { DiscordIcon } from "@trigger.dev/companyicons"; import { formatDurationMilliseconds } from "@trigger.dev/core/v3"; -import { type TaskRunStatus } from "@trigger.dev/database"; +import type { TaskRunStatus } from "@trigger.dev/database"; import { Fragment, Suspense, useEffect, useState } from "react"; import { Bar, BarChart, ResponsiveContainer, Tooltip, type TooltipProps } from "recharts"; import { TypedAwait, typeddefer, useTypedLoaderData } from "remix-typedjson"; @@ -76,7 +77,6 @@ import { type TaskActivity, type TaskListItem, taskListPresenter, - TaskListPresenter, } from "~/presenters/v3/TaskListPresenter.server"; import { getUsefulLinksPreference, @@ -299,7 +299,10 @@ export default function Page() { } > - + } + > {(data) => { const taskData = data[task.slug]; return taskData?.running ?? "0"; @@ -309,7 +312,10 @@ export default function Page() { }> - + } + > {(data) => { const taskData = data[task.slug]; return taskData?.queued ?? "0"; @@ -319,7 +325,10 @@ export default function Page() { }> - + } + > {(data) => { const taskData = data[task.slug]; return ( @@ -339,7 +348,10 @@ export default function Page() { }> - + } + > {(data) => { const taskData = data[task.slug]; return taskData @@ -359,12 +371,13 @@ export default function Page() { icon={RunsIcon} to={path} title="View runs" - leadingIconClassName="text-teal-500" + leadingIconClassName="text-runs" /> } @@ -398,7 +411,7 @@ export default function Page() { ) : ( - + )} @@ -503,6 +516,7 @@ function TaskActivityGraph({ activity }: { activity: TaskActivity }) { barSize={10} isAnimationActive={false} /> + @@ -533,7 +547,7 @@ function TaskActivityGraph({ activity }: { activity: TaskActivity }) { - + @@ -827,3 +841,12 @@ function LinkWithIcon({ ); } + +function FailedToLoadStats() { + return ( + } + content="We were unable to load the task stats, please try again later." + /> + ); +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.alerts/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.alerts/route.tsx index a4debb8329..1bedd30d0f 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.alerts/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.alerts/route.tsx @@ -13,7 +13,7 @@ import { import { Form, type MetaFunction, Outlet, useActionData, useNavigation } from "@remix-run/react"; import { type ActionFunctionArgs, type LoaderFunctionArgs, json } from "@remix-run/server-runtime"; import { SlackIcon } from "@trigger.dev/companyicons"; -import { type ProjectAlertChannelType, type ProjectAlertType } from "@trigger.dev/database"; +import type { ProjectAlertChannelType, ProjectAlertType } from "@trigger.dev/database"; import assertNever from "assert-never"; import { typedjson, useTypedLoaderData } from "remix-typedjson"; import { z } from "zod"; diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.batches.$batchParam/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.batches.$batchParam/route.tsx new file mode 100644 index 0000000000..91403f4597 --- /dev/null +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.batches.$batchParam/route.tsx @@ -0,0 +1,307 @@ +import { ArrowRightIcon, ExclamationTriangleIcon } from "@heroicons/react/20/solid"; +import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { tryCatch } from "@trigger.dev/core"; +import { motion } from "framer-motion"; +import { typedjson, useTypedLoaderData } from "remix-typedjson"; +import { z } from "zod"; +import { ExitIcon } from "~/assets/icons/ExitIcon"; +import { RunsIcon } from "~/assets/icons/RunsIcon"; +import { LinkButton } from "~/components/primitives/Buttons"; +import { CopyableText } from "~/components/primitives/CopyableText"; +import { DateTime } from "~/components/primitives/DateTime"; +import { Header2, Header3 } from "~/components/primitives/Headers"; +import { Paragraph } from "~/components/primitives/Paragraph"; +import * as Property from "~/components/primitives/PropertyTable"; +import { + BatchStatusCombo, + descriptionForBatchStatus, +} from "~/components/runs/v3/BatchStatus"; +import { useAutoRevalidate } from "~/hooks/useAutoRevalidate"; +import { useEnvironment } from "~/hooks/useEnvironment"; +import { useOrganization } from "~/hooks/useOrganizations"; +import { useProject } from "~/hooks/useProject"; +import { findProjectBySlug } from "~/models/project.server"; +import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; +import { BatchPresenter, type BatchPresenterData } from "~/presenters/v3/BatchPresenter.server"; +import { requireUserId } from "~/services/session.server"; +import { cn } from "~/utils/cn"; +import { formatNumber } from "~/utils/numberFormatter"; +import { EnvironmentParamSchema, v3BatchesPath, v3BatchRunsPath } from "~/utils/pathBuilder"; + +const BatchParamSchema = EnvironmentParamSchema.extend({ + batchParam: z.string(), +}); + +export const loader = async ({ request, params }: LoaderFunctionArgs) => { + const userId = await requireUserId(request); + + const { organizationSlug, projectParam, envParam, batchParam } = + BatchParamSchema.parse(params); + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + if (!project) { + throw new Response("Not Found", { status: 404 }); + } + + const environment = await findEnvironmentBySlug(project.id, envParam, userId); + if (!environment) { + throw new Response("Not Found", { status: 404 }); + } + + try { + const presenter = new BatchPresenter(); + const [error, data] = await tryCatch( + presenter.call({ + environmentId: environment.id, + batchId: batchParam, + userId, + }) + ); + + if (error) { + throw new Error(error.message); + } + + return typedjson({ batch: data }); + } catch (error) { + console.error(error); + throw new Response(undefined, { + status: 400, + statusText: "Something went wrong, if this problem persists please contact support.", + }); + } +}; + +export default function Page() { + const { batch } = useTypedLoaderData(); + const organization = useOrganization(); + const project = useProject(); + const environment = useEnvironment(); + + // Auto-reload when batch is still in progress + useAutoRevalidate({ + interval: 1000, + onFocus: true, + disabled: batch.hasFinished, + }); + + const showProgressMeter = batch.isV2 && (batch.status === "PROCESSING" || batch.status === "PARTIAL_FAILED"); + + return ( +
+ {/* Header */} +
+ {batch.friendlyId} + +
+ + {/* Status bar */} +
+ + + {descriptionForBatchStatus(batch.status)} + +
+ + {/* Scrollable content */} +
+
+ {/* Progress meter for v2 batches */} + {showProgressMeter && ( +
+ +
+ )} + + {/* Properties */} +
+ + + ID + + + + + + Status + + + + + + Version + + {batch.isV2 ? "v2 (Run Engine)" : "v1 (Legacy)"} + + + + Total runs + {formatNumber(batch.runCount)} + + {batch.isV2 && ( + <> + + Successfully created + + {formatNumber(batch.successfulRunCount)} + + + {batch.failedRunCount > 0 && ( + + Failed to create + + {formatNumber(batch.failedRunCount)} + + + )} + + )} + {batch.idempotencyKey && ( + + Idempotency key + + + + + )} + + Created + + + + + {batch.processingStartedAt && ( + + Processing started + + + + + )} + {batch.processingCompletedAt && ( + + Processing completed + + + + + )} + + Finished + + {batch.finishedAt ? : "โ€“"} + + + +
+ + {/* Errors section */} + {batch.errors.length > 0 && ( +
+ + + Run creation errors ({batch.errors.length}) + +
+ {batch.errors.map((error) => ( +
+
+
+ + Item #{error.index} + + {error.taskIdentifier} +
+ {error.errorCode && ( + + {error.errorCode} + + )} +
+ + {error.error} + +
+ ))} +
+
+ )} +
+
+ + {/* Footer */} +
+ + View runs + +
+
+ ); +} + +type BatchProgressMeterProps = { + successCount: number; + failureCount: number; + totalCount: number; +}; + +function BatchProgressMeter({ successCount, failureCount, totalCount }: BatchProgressMeterProps) { + const processedCount = successCount + failureCount; + const successPercentage = totalCount === 0 ? 0 : (successCount / totalCount) * 100; + const failurePercentage = totalCount === 0 ? 0 : (failureCount / totalCount) * 100; + + return ( +
+
+ Run creation progress + + {formatNumber(processedCount)}/{formatNumber(totalCount)} + +
+
+ + +
+
+
+
+ {formatNumber(successCount)} created +
+ {failureCount > 0 && ( +
+
+ {formatNumber(failureCount)} failed +
+ )} +
+
+ ); +} + diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.batches/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.batches/route.tsx index 50c7b9a33d..a66e85c0f8 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.batches/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.batches/route.tsx @@ -1,10 +1,6 @@ -import { - ArrowPathRoundedSquareIcon, - ArrowRightIcon, - ExclamationCircleIcon, -} from "@heroicons/react/20/solid"; +import { ArrowRightIcon, ExclamationCircleIcon } from "@heroicons/react/20/solid"; import { BookOpenIcon } from "@heroicons/react/24/solid"; -import { type MetaFunction, useLocation, useNavigation } from "@remix-run/react"; +import { type MetaFunction, Outlet, useNavigation, useParams, useLocation } from "@remix-run/react"; import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; import { formatDuration } from "@trigger.dev/core/v3/utils/durations"; import { typedjson, useTypedLoaderData } from "remix-typedjson"; @@ -12,12 +8,15 @@ import { BatchesNone } from "~/components/BlankStatePanels"; import { ListPagination } from "~/components/ListPagination"; import { AdminDebugTooltip } from "~/components/admin/debugTooltip"; import { MainCenteredContainer, PageBody, PageContainer } from "~/components/layout/AppLayout"; -import { Button, LinkButton } from "~/components/primitives/Buttons"; +import { LinkButton } from "~/components/primitives/Buttons"; import { DateTime } from "~/components/primitives/DateTime"; -import { Dialog, DialogTrigger } from "~/components/primitives/Dialog"; import { NavBar, PageAccessories, PageTitle } from "~/components/primitives/PageHeader"; import { Paragraph } from "~/components/primitives/Paragraph"; -import { PopoverMenuItem } from "~/components/primitives/Popover"; +import { + ResizableHandle, + ResizablePanel, + ResizablePanelGroup, +} from "~/components/primitives/Resizable"; import { Spinner } from "~/components/primitives/Spinner"; import { Table, @@ -36,7 +35,6 @@ import { BatchStatusCombo, descriptionForBatchStatus, } from "~/components/runs/v3/BatchStatus"; -import { CheckBatchCompletionDialog } from "~/components/runs/v3/CheckBatchCompletionDialog"; import { LiveTimer } from "~/components/runs/v3/LiveTimer"; import { useEnvironment } from "~/hooks/useEnvironment"; import { useOrganization } from "~/hooks/useOrganizations"; @@ -44,13 +42,14 @@ import { useProject } from "~/hooks/useProject"; import { redirectWithErrorMessage } from "~/models/message.server"; import { findProjectBySlug } from "~/models/project.server"; import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; -import { - type BatchList, - type BatchListItem, - BatchListPresenter, -} from "~/presenters/v3/BatchListPresenter.server"; +import { type BatchList, BatchListPresenter } from "~/presenters/v3/BatchListPresenter.server"; import { requireUserId } from "~/services/session.server"; -import { docsPath, EnvironmentParamSchema, v3BatchRunsPath } from "~/utils/pathBuilder"; +import { + docsPath, + EnvironmentParamSchema, + v3BatchPath, + v3BatchRunsPath, +} from "~/utils/pathBuilder"; export const meta: MetaFunction = () => { return [ @@ -78,7 +77,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { const s = { cursor: url.searchParams.get("cursor") ?? undefined, direction: url.searchParams.get("direction") ?? undefined, - environments: [environment.id], statuses: url.searchParams.getAll("statuses"), period: url.searchParams.get("period") ?? undefined, from: url.searchParams.get("from") ?? undefined, @@ -93,6 +91,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { projectId: project.id, ...filters, friendlyId: filters.id, + environmentId: environment.id, }); return typedjson(list); @@ -101,6 +100,8 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { export default function Page() { const { batches, hasFilters, hasAnyBatches, filters, pagination } = useTypedLoaderData(); + const { batchParam } = useParams(); + const isShowingInspector = batchParam !== undefined; return ( @@ -123,22 +124,34 @@ export default function Page() { ) : ( -
-
- -
- -
-
+ + +
+
+ +
+ +
+
- -
+ +
+ + {isShowingInspector && ( + <> + + + + + + )} + )}
@@ -147,10 +160,14 @@ export default function Page() { function BatchesTable({ batches, hasFilters, filters }: BatchList) { const navigation = useNavigation(); - const isLoading = navigation.state !== "idle"; + const location = useLocation(); + const isLoading = + navigation.state !== "idle" && navigation.location?.pathname === location.pathname; + const organization = useOrganization(); const project = useProject(); const environment = useEnvironment(); + const { batchParam } = useParams(); return ( @@ -195,15 +212,19 @@ function BatchesTable({ batches, hasFilters, filters }: BatchList) { ) : ( - batches.map((batch, index) => { - const path = v3BatchRunsPath(organization, project, environment, batch); + batches.map((batch) => { + const basePath = v3BatchPath(organization, project, environment, batch); + const inspectorPath = `${basePath}${location.search}`; + const runsPath = v3BatchRunsPath(organization, project, environment, batch); + const isSelected = batchParam === batch.friendlyId; + return ( - - + + {batch.friendlyId} - + {batch.batchVersion === "v1" ? ( )} - {batch.runCount} - + {batch.runCount} + {batch.finishedAt ? ( formatDuration(new Date(batch.createdAt), new Date(batch.finishedAt), { style: "short", @@ -233,13 +258,13 @@ function BatchesTable({ batches, hasFilters, filters }: BatchList) { )} - + - + {batch.finishedAt ? : "โ€“"} - + ); }) @@ -257,48 +282,14 @@ function BatchesTable({ batches, hasFilters, filters }: BatchList) { ); } -function BatchActionsCell({ batch, path }: { batch: BatchListItem; path: string }) { - const location = useLocation(); - - if (batch.hasFinished || batch.environment.type === "DEVELOPMENT") { - return {""}; - } - +function BatchActionsCell({ runsPath }: { runsPath: string }) { return ( - - {!batch.hasFinished && ( - - - - - - - )} - + hiddenButtons={ + + View runs + } /> ); diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.branches/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.branches/route.tsx index 7726e6a9b1..f356ee6c81 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.branches/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.branches/route.tsx @@ -130,7 +130,10 @@ export async function action({ request }: ActionFunctionArgs) { } const upsertBranchService = new UpsertBranchService(); - const result = await upsertBranchService.call(userId, submission.value); + const result = await upsertBranchService.call( + { type: "userMembership", userId }, + submission.value + ); if (result.success) { if (result.alreadyExisted) { diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.bulk-actions.$bulkActionParam/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.bulk-actions.$bulkActionParam/route.tsx new file mode 100644 index 0000000000..0bd53caac3 --- /dev/null +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.bulk-actions.$bulkActionParam/route.tsx @@ -0,0 +1,333 @@ +import { ArrowPathIcon } from "@heroicons/react/20/solid"; +import { Form } from "@remix-run/react"; +import { type ActionFunctionArgs, type LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { tryCatch } from "@trigger.dev/core"; +import type { BulkActionType } from "@trigger.dev/database"; +import { motion } from "framer-motion"; +import { typedjson, useTypedLoaderData } from "remix-typedjson"; +import { z } from "zod"; +import { ExitIcon } from "~/assets/icons/ExitIcon"; +import { RunsIcon } from "~/assets/icons/RunsIcon"; +import { BulkActionFilterSummary } from "~/components/BulkActionFilterSummary"; +import { Button, LinkButton } from "~/components/primitives/Buttons"; +import { CopyableText } from "~/components/primitives/CopyableText"; +import { DateTime } from "~/components/primitives/DateTime"; +import { Header2 } from "~/components/primitives/Headers"; +import { Paragraph } from "~/components/primitives/Paragraph"; +import * as Property from "~/components/primitives/PropertyTable"; +import { BulkActionStatusCombo, BulkActionTypeCombo } from "~/components/runs/v3/BulkAction"; +import { UserAvatar } from "~/components/UserProfilePhoto"; +import { env } from "~/env.server"; +import { useAutoRevalidate } from "~/hooks/useAutoRevalidate"; +import { useEnvironment } from "~/hooks/useEnvironment"; +import { useOrganization } from "~/hooks/useOrganizations"; +import { useProject } from "~/hooks/useProject"; +import { redirectWithErrorMessage, redirectWithSuccessMessage } from "~/models/message.server"; +import { findProjectBySlug } from "~/models/project.server"; +import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; +import { BulkActionPresenter } from "~/presenters/v3/BulkActionPresenter.server"; +import { logger } from "~/services/logger.server"; +import { requireUserId } from "~/services/session.server"; +import { cn } from "~/utils/cn"; +import { formatNumber } from "~/utils/numberFormatter"; +import { + EnvironmentParamSchema, + v3BulkActionPath, + v3BulkActionsPath, + v3CreateBulkActionPath, + v3RunsPath, +} from "~/utils/pathBuilder"; +import { BulkActionService } from "~/v3/services/bulk/BulkActionV2.server"; + +const BulkActionParamSchema = EnvironmentParamSchema.extend({ + bulkActionParam: z.string(), +}); + +export const loader = async ({ request, params }: LoaderFunctionArgs) => { + const userId = await requireUserId(request); + + const { organizationSlug, projectParam, envParam, bulkActionParam } = + BulkActionParamSchema.parse(params); + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + if (!project) { + throw new Response("Not Found", { status: 404 }); + } + + const environment = await findEnvironmentBySlug(project.id, envParam, userId); + if (!environment) { + throw new Response("Not Found", { status: 404 }); + } + + try { + const presenter = new BulkActionPresenter(); + const [error, data] = await tryCatch( + presenter.call({ + environmentId: environment.id, + bulkActionId: bulkActionParam, + }) + ); + + if (error) { + throw new Error(error.message); + } + + const autoReloadPollIntervalMs = env.BULK_ACTION_AUTORELOAD_POLL_INTERVAL_MS; + + return typedjson({ bulkAction: data, autoReloadPollIntervalMs }); + } catch (error) { + console.error(error); + throw new Response(undefined, { + status: 400, + statusText: "Something went wrong, if this problem persists please contact support.", + }); + } +}; + +export const action = async ({ request, params }: ActionFunctionArgs) => { + const userId = await requireUserId(request); + const { organizationSlug, projectParam, envParam, bulkActionParam } = + BulkActionParamSchema.parse(params); + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + if (!project) { + throw new Response("Not Found", { status: 404 }); + } + + const environment = await findEnvironmentBySlug(project.id, envParam, userId); + if (!environment) { + throw new Response("Not Found", { status: 404 }); + } + + const service = new BulkActionService(); + const [error, result] = await tryCatch(service.abort(bulkActionParam, environment.id)); + + if (error) { + logger.error("Failed to abort bulk action", { + error, + }); + + return redirectWithErrorMessage( + v3BulkActionPath( + { slug: organizationSlug }, + { slug: projectParam }, + { slug: envParam }, + { friendlyId: bulkActionParam } + ), + request, + `Failed to abort bulk action: ${error.message}` + ); + } + + return redirectWithSuccessMessage( + v3BulkActionPath( + { slug: organizationSlug }, + { slug: projectParam }, + { slug: envParam }, + { friendlyId: bulkActionParam } + ), + request, + "Bulk action aborted" + ); +}; + +export default function Page() { + const { bulkAction, autoReloadPollIntervalMs } = useTypedLoaderData(); + const organization = useOrganization(); + const project = useProject(); + const environment = useEnvironment(); + + useAutoRevalidate({ + interval: autoReloadPollIntervalMs, + onFocus: true, + disabled: bulkAction.status !== "PENDING", + }); + + return ( +
+
+ + {bulkAction.name || bulkAction.friendlyId} + + +
+
+ + {bulkAction.status === "PENDING" ? ( +
+ + + ) : null} +
+
+
+
+ +
+
+ + + ID + + + + + + Bulk action + + + + + + User + + {bulkAction.user ? ( +
+ + {bulkAction.user.name} +
+ ) : ( + "โ€“" + )} +
+
+ + Created + + + + + + Completed + + {bulkAction.completedAt ? : "โ€“"} + + + + Summary + + + + +
+
+
+
+
+ + Replay runs + + + + View runs + +
+
+ ); +} + +type MeterProps = { + type: BulkActionType; + successCount: number; + failureCount: number; + totalCount: number; +}; + +function Meter({ type, successCount, failureCount, totalCount }: MeterProps) { + const successPercentage = totalCount === 0 ? 0 : (successCount / totalCount) * 100; + const failurePercentage = totalCount === 0 ? 0 : (failureCount / totalCount) * 100; + + return ( +
+
+ Runs + + {formatNumber(successCount + failureCount)}/{formatNumber(totalCount)} + +
+
+ + +
+
+
+
+ + {formatNumber(successCount)} {typeText(type)} successfully + +
+
+
+ + {formatNumber(failureCount)} {typeText(type)} failed{" "} + {type === "CANCEL" ? " (already finished)" : ""} + +
+
+
+ ); +} + +function typeText(type: BulkActionType) { + switch (type) { + case "CANCEL": + return "canceled"; + case "REPLAY": + return "replayed"; + } +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.bulk-actions/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.bulk-actions/route.tsx new file mode 100644 index 0000000000..f44ce5904d --- /dev/null +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.bulk-actions/route.tsx @@ -0,0 +1,297 @@ +import { BookOpenIcon, PlusIcon } from "@heroicons/react/20/solid"; +import { Outlet, useParams, type MetaFunction } from "@remix-run/react"; +import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { tryCatch } from "@trigger.dev/core"; +import { typedjson, useTypedLoaderData } from "remix-typedjson"; +import { z } from "zod"; +import { AdminDebugTooltip } from "~/components/admin/debugTooltip"; +import { BulkActionsNone } from "~/components/BlankStatePanels"; +import { MainCenteredContainer, PageBody, PageContainer } from "~/components/layout/AppLayout"; +import { LinkButton } from "~/components/primitives/Buttons"; +import { DateTime } from "~/components/primitives/DateTime"; +import { NavBar, PageAccessories, PageTitle } from "~/components/primitives/PageHeader"; +import { PaginationControls } from "~/components/primitives/Pagination"; +import { Paragraph } from "~/components/primitives/Paragraph"; +import { + ResizableHandle, + ResizablePanel, + ResizablePanelGroup, +} from "~/components/primitives/Resizable"; +import { + Table, + TableBlankRow, + TableBody, + TableCell, + TableHeader, + TableHeaderCell, + TableRow, +} from "~/components/primitives/Table"; +import { TruncatedCopyableValue } from "~/components/primitives/TruncatedCopyableValue"; +import { BulkActionStatusCombo, BulkActionTypeCombo } from "~/components/runs/v3/BulkAction"; +import { UserAvatar } from "~/components/UserProfilePhoto"; +import { useEnvironment } from "~/hooks/useEnvironment"; +import { useOrganization } from "~/hooks/useOrganizations"; +import { useProject } from "~/hooks/useProject"; +import { findProjectBySlug } from "~/models/project.server"; +import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; +import { + type BulkActionListItem, + BulkActionListPresenter, +} from "~/presenters/v3/BulkActionListPresenter.server"; +import { requireUserId } from "~/services/session.server"; +import { cn } from "~/utils/cn"; +import { + docsPath, + EnvironmentParamSchema, + v3BulkActionPath, + v3CreateBulkActionPath, +} from "~/utils/pathBuilder"; + +export const meta: MetaFunction = () => { + return [ + { + title: `Bulk actions | Trigger.dev`, + }, + ]; +}; + +const SearchParamsSchema = z.object({ + page: z.coerce.number().optional(), +}); + +export const loader = async ({ request, params }: LoaderFunctionArgs) => { + const userId = await requireUserId(request); + + const { organizationSlug, projectParam, envParam } = EnvironmentParamSchema.parse(params); + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + if (!project) { + throw new Response("Not Found", { status: 404 }); + } + + const environment = await findEnvironmentBySlug(project.id, envParam, userId); + if (!environment) { + throw new Response("Not Found", { status: 404 }); + } + + try { + const url = new URL(request.url); + const { page } = SearchParamsSchema.parse(Object.fromEntries(url.searchParams)); + + const presenter = new BulkActionListPresenter(); + const [error, data] = await tryCatch( + presenter.call({ + environmentId: environment.id, + page, + }) + ); + + if (error) { + throw new Error(error.message); + } + + return typedjson(data); + } catch (error) { + console.error(error); + throw new Response(undefined, { + status: 400, + statusText: "Something went wrong, if this problem persists please contact support.", + }); + } +}; + +export default function Page() { + const { bulkActions, currentPage, totalPages, totalCount } = useTypedLoaderData(); + const organization = useOrganization(); + const project = useProject(); + const environment = useEnvironment(); + const { bulkActionParam } = useParams(); + const isShowingInspector = bulkActionParam !== undefined; + + return ( + + + + + + + Bulk actions docs + + + New bulk action + + + + + {bulkActions.length === 0 ? ( + + + + ) : ( + + +
1 ? "grid-rows-[auto_1fr_auto]" : "grid-rows-[1fr]" + )} + > + {totalPages > 1 && ( +
+ +
+ )} + + + {totalPages > 1 && ( +
1 && "justify-end border-t border-grid-dimmed px-2 py-3" + )} + > + +
+ )} +
+
+ {isShowingInspector && ( + <> + + + + + + )} +
+ )} +
+
+ ); +} + +function BulkActionsTable({ + bulkActions, + totalPages, +}: { + bulkActions: BulkActionListItem[]; + totalPages: number; +}) { + const organization = useOrganization(); + const project = useProject(); + const environment = useEnvironment(); + const { bulkActionParam } = useParams(); + + return ( +
+ + + ID + Name + +
+
+ +
+ + The bulk action is currently in progress. They can take some time if there are + lots of runs. + +
+
+
+ +
+ + The bulk action has completed successfully. + +
+
+
+ +
+ + The bulk action was aborted. + +
+ + } + > + Status +
+ Bulk action + Runs + User + Created + Completed +
+
+ + {bulkActions.length === 0 ? ( + There are no matching bulk actions + ) : ( + bulkActions.map((bulkAction) => { + const path = v3BulkActionPath(organization, project, environment, bulkAction); + const isSelected = bulkActionParam === bulkAction.friendlyId; + + return ( + + + + + {bulkAction.name || "โ€“"} + + + + + + + {bulkAction.totalCount} + + {bulkAction.user ? ( +
+ + {bulkAction.user.name} +
+ ) : ( + "โ€“" + )} +
+ + + + + {bulkAction.completedAt ? : "โ€“"} + +
+ ); + }) + )} +
+
+ ); +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.concurrency/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.concurrency/route.tsx new file mode 100644 index 0000000000..5406815314 --- /dev/null +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.concurrency/route.tsx @@ -0,0 +1,808 @@ +import { conform, useFieldList, useForm } from "@conform-to/react"; +import { parse } from "@conform-to/zod"; +import { + EnvelopeIcon, + ExclamationTriangleIcon, + InformationCircleIcon, + PlusIcon, +} from "@heroicons/react/20/solid"; +import { DialogClose } from "@radix-ui/react-dialog"; +import { + Form, + useActionData, + useNavigate, + useNavigation, + useSearchParams, + type MetaFunction, +} from "@remix-run/react"; +import { json, type ActionFunctionArgs, type LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { tryCatch } from "@trigger.dev/core"; +import { useEffect, useState } from "react"; +import { typedjson, useTypedLoaderData } from "remix-typedjson"; +import { z } from "zod"; +import { AdminDebugTooltip } from "~/components/admin/debugTooltip"; +import { EnvironmentCombo } from "~/components/environments/EnvironmentLabel"; +import { + MainHorizontallyCenteredContainer, + PageBody, + PageContainer, +} from "~/components/layout/AppLayout"; +import { Button, LinkButton } from "~/components/primitives/Buttons"; +import { Dialog, DialogContent, DialogHeader, DialogTrigger } from "~/components/primitives/Dialog"; +import { Fieldset } from "~/components/primitives/Fieldset"; +import { FormButtons } from "~/components/primitives/FormButtons"; +import { FormError } from "~/components/primitives/FormError"; +import { Header2, Header3 } from "~/components/primitives/Headers"; +import { Input } from "~/components/primitives/Input"; +import { InputGroup } from "~/components/primitives/InputGroup"; +import { InputNumberStepper } from "~/components/primitives/InputNumberStepper"; +import { Label } from "~/components/primitives/Label"; +import { NavBar, PageAccessories, PageTitle } from "~/components/primitives/PageHeader"; +import { Paragraph } from "~/components/primitives/Paragraph"; +import * as Property from "~/components/primitives/PropertyTable"; +import { + Table, + TableBody, + TableCell, + TableHeader, + TableHeaderCell, + TableRow, +} from "~/components/primitives/Table"; +import { InfoIconTooltip } from "~/components/primitives/Tooltip"; +import { useFeatures } from "~/hooks/useFeatures"; +import { useOrganization } from "~/hooks/useOrganizations"; +import { redirectWithErrorMessage, redirectWithSuccessMessage } from "~/models/message.server"; +import { findProjectBySlug } from "~/models/project.server"; +import { + ManageConcurrencyPresenter, + type ConcurrencyResult, + type EnvironmentWithConcurrency, +} from "~/presenters/v3/ManageConcurrencyPresenter.server"; +import { getPlans } from "~/services/platform.v3.server"; +import { requireUserId } from "~/services/session.server"; +import { formatCurrency, formatNumber } from "~/utils/numberFormatter"; +import { concurrencyPath, EnvironmentParamSchema, v3BillingPath } from "~/utils/pathBuilder"; +import { SetConcurrencyAddOnService } from "~/v3/services/setConcurrencyAddOn.server"; +import { useCurrentPlan } from "../_app.orgs.$organizationSlug/route"; +import { SpinnerWhite } from "~/components/primitives/Spinner"; +import { cn } from "~/utils/cn"; +import { logger } from "~/services/logger.server"; +import { AllocateConcurrencyService } from "~/v3/services/allocateConcurrency.server"; + +export const meta: MetaFunction = () => { + return [ + { + title: `Manage concurrency | Trigger.dev`, + }, + ]; +}; + +export const loader = async ({ request, params }: LoaderFunctionArgs) => { + const userId = await requireUserId(request); + const { organizationSlug, projectParam, envParam } = EnvironmentParamSchema.parse(params); + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + if (!project) { + throw new Response(undefined, { + status: 404, + statusText: "Project not found", + }); + } + + const presenter = new ManageConcurrencyPresenter(); + const [error, result] = await tryCatch( + presenter.call({ + userId: userId, + projectId: project.id, + organizationId: project.organizationId, + }) + ); + + if (error) { + throw new Response(undefined, { + status: 400, + statusText: error.message, + }); + } + + const plans = await tryCatch(getPlans()); + if (!plans) { + throw new Response(null, { status: 404, statusText: "Plans not found" }); + } + + return typedjson(result); +}; + +const FormSchema = z.discriminatedUnion("action", [ + z.object({ + action: z.enum(["purchase"]), + amount: z.coerce.number().min(0, "Amount must be 0 or more"), + }), + z.object({ + action: z.enum(["quota-increase"]), + amount: z.coerce.number().min(1, "Amount must be greater than 0"), + }), + z.object({ + action: z.enum(["allocate"]), + // It will only update environments that are passed in + environments: z.array( + z.object({ + id: z.string(), + amount: z.coerce.number().min(0, "Amount must be 0 or more"), + }) + ), + }), +]); + +export const action = async ({ request, params }: ActionFunctionArgs) => { + const userId = await requireUserId(request); + const { organizationSlug, projectParam, envParam } = EnvironmentParamSchema.parse(params); + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + const redirectPath = concurrencyPath( + { slug: organizationSlug }, + { slug: projectParam }, + { slug: envParam } + ); + + if (!project) { + throw redirectWithErrorMessage(redirectPath, request, "Project not found"); + } + + const formData = await request.formData(); + const submission = parse(formData, { schema: FormSchema }); + + if (!submission.value || submission.intent !== "submit") { + return json(submission); + } + + if (submission.value.action === "allocate") { + const allocate = new AllocateConcurrencyService(); + const [error, result] = await tryCatch( + allocate.call({ + userId, + projectId: project.id, + organizationId: project.organizationId, + environments: submission.value.environments, + }) + ); + + if (error) { + submission.error.environments = [error instanceof Error ? error.message : "Unknown error"]; + return json(submission); + } + + if (!result.success) { + submission.error.environments = [result.error]; + return json(submission); + } + + return redirectWithSuccessMessage( + `${redirectPath}?success=true`, + request, + "Concurrency allocated successfully" + ); + } + + const service = new SetConcurrencyAddOnService(); + const [error, result] = await tryCatch( + service.call({ + userId, + projectId: project.id, + organizationId: project.organizationId, + action: submission.value.action, + amount: submission.value.amount, + }) + ); + + if (error) { + submission.error.amount = [error instanceof Error ? error.message : "Unknown error"]; + return json(submission); + } + + if (!result.success) { + submission.error.amount = [result.error]; + return json(submission); + } + + return redirectWithSuccessMessage( + `${redirectPath}?success=true`, + request, + submission.value.action === "purchase" + ? "Concurrency updated successfully" + : "Requested extra concurrency, we'll get back to you soon." + ); +}; + +export default function Page() { + const { + canAddConcurrency, + extraConcurrency, + extraAllocatedConcurrency, + extraUnallocatedConcurrency, + environments, + concurrencyPricing, + maxQuota, + } = useTypedLoaderData(); + + return ( + + + + + + + {environments.map((environment) => ( + + + {environment.type}{" "} + {environment.branchName ? ` (${environment.branchName})` : ""} + + {environment.id} + + ))} + + + + + + + {canAddConcurrency ? ( + + ) : ( + + )} + + + + ); +} + +function initialAllocation(environments: ConcurrencyResult["environments"]) { + return new Map( + environments + .filter((e) => e.type !== "DEVELOPMENT") + .map((e) => [e.id, Math.max(0, e.maximumConcurrencyLimit - e.planConcurrencyLimit)]) + ); +} + +function allocationTotal(environments: ConcurrencyResult["environments"]) { + const allocation = initialAllocation(environments); + return Array.from(allocation.values()).reduce((e, acc) => e + acc, 0); +} + +function Upgradable({ + extraConcurrency, + extraAllocatedConcurrency, + extraUnallocatedConcurrency, + environments, + concurrencyPricing, + maxQuota, +}: ConcurrencyResult) { + const lastSubmission = useActionData(); + const [form, { environments: formEnvironments }] = useForm({ + id: "purchase-concurrency", + // TODO: type this + lastSubmission: lastSubmission as any, + onValidate({ formData }) { + return parse(formData, { schema: FormSchema }); + }, + shouldRevalidate: "onSubmit", + }); + + const navigation = useNavigation(); + const isLoading = navigation.state !== "idle" && navigation.formMethod === "POST"; + + const [allocation, setAllocation] = useState(initialAllocation(environments)); + + const allocatedInProject = Array.from(allocation.values()).reduce((e, acc) => e + acc, 0); + const initialAllocationInProject = allocationTotal(environments); + const changeInAllocation = allocatedInProject - initialAllocationInProject; + const unallocated = extraUnallocatedConcurrency - changeInAllocation; + const allocationModified = changeInAllocation !== 0; + + return ( +
+
+ Manage your concurrency +
+ + Concurrency limits determine how many runs you can execute at the same time. You can add + extra concurrency to your organization which you can allocate to environments in your + projects. + +
+
+
+ Extra concurrency + +
+ + + + Extra concurrency purchased + + {extraConcurrency} + + + + Allocated concurrency + + {allocationModified ? ( + <> + + {extraAllocatedConcurrency} + {" "} + {extraAllocatedConcurrency + changeInAllocation} + + ) : ( + extraAllocatedConcurrency + )} + + + + Unallocated concurrency + 0 + ? "text-success" + : unallocated < 0 + ? "text-error" + : "text-text-bright" + )} + > + {allocationModified ? ( + <> + + {extraUnallocatedConcurrency} + {" "} + {extraUnallocatedConcurrency - changeInAllocation} + + ) : ( + extraUnallocatedConcurrency + )} + + + + +
+ {allocationModified ? ( + unallocated < 0 ? ( +
+ + + You're trying to allocate more concurrency than your total purchased + amount. + +
+ ) : ( +
+
+ + + Save your changes or{" "} + + . + +
+ +
+ ) + ) : ( + <> + )} +
+
+
+
+
+ {formEnvironments.error} +
+
+ +
+ Concurrency allocation +
+ + + + Environment + + + Included{" "} + + + + Extra concurrency + Total + + + + {environments.map((environment, index) => ( + + + + + {environment.planConcurrencyLimit} + +
+ {environment.type === "DEVELOPMENT" ? ( + Math.max( + 0, + environment.maximumConcurrencyLimit - environment.planConcurrencyLimit + ) + ) : ( + <> + + { + const value = e.target.value === "" ? 0 : Number(e.target.value); + setAllocation(new Map(allocation).set(environment.id, value)); + }} + min={0} + /> + + )} +
+
+ + {environment.planConcurrencyLimit + (allocation.get(environment.id) ?? 0)} + +
+ ))} +
+
+
+
+
+ ); +} + +function NotUpgradable({ environments }: { environments: EnvironmentWithConcurrency[] }) { + const { isManagedCloud } = useFeatures(); + const plan = useCurrentPlan(); + const organization = useOrganization(); + + return ( +
+
+ Your concurrency +
+ {isManagedCloud ? ( + <> + + Concurrency limits determine how many runs you can execute at the same time. You can + upgrade your plan to get more concurrency. You are currently on the{" "} + {plan?.v3Subscription?.plan?.title ?? "Free"} plan. + + + Upgrade for more concurrency + + + ) : null} +
+ + + + Environment + Concurrency limit + + + + {environments.map((environment) => ( + + + + + {environment.maximumConcurrencyLimit} + + ))} + +
+
+
+ ); +} + +function PurchaseConcurrencyModal({ + concurrencyPricing, + extraConcurrency, + extraUnallocatedConcurrency, + maxQuota, + disabled, +}: { + concurrencyPricing: { + stepSize: number; + centsPerStep: number; + }; + extraConcurrency: number; + extraUnallocatedConcurrency: number; + maxQuota: number; + disabled: boolean; +}) { + const lastSubmission = useActionData(); + const [form, { amount }] = useForm({ + id: "purchase-concurrency", + // TODO: type this + lastSubmission: lastSubmission as any, + onValidate({ formData }) { + return parse(formData, { schema: FormSchema }); + }, + shouldRevalidate: "onSubmit", + }); + + const [amountValue, setAmountValue] = useState(extraConcurrency); + const navigation = useNavigation(); + const isLoading = navigation.state !== "idle" && navigation.formMethod === "POST"; + + // Close the panel, when we've succeeded + // This is required because a redirect to the same path doesn't clear state + const [searchParams, setSearchParams] = useSearchParams(); + const [open, setOpen] = useState(false); + useEffect(() => { + const success = searchParams.get("success"); + if (success) { + setOpen(false); + setSearchParams((s) => { + s.delete("success"); + return s; + }); + } + }, [searchParams.get("success")]); + + const state = updateState({ + value: amountValue, + existingValue: extraConcurrency, + quota: maxQuota, + extraUnallocatedConcurrency, + }); + const changeClassName = + state === "decrease" ? "text-error" : state === "increase" ? "text-success" : undefined; + + const title = extraConcurrency === 0 ? "Purchase extra concurrency" : "Add/remove concurrency"; + + return ( + + + + + + {title} +
+
+ + You can purchase bundles of {concurrencyPricing.stepSize} concurrency for{" "} + {formatCurrency(concurrencyPricing.centsPerStep / 100, false)}/month. Or you can + remove any extra concurrency after you have unallocated it from your environments + first. + +
+ + + setAmountValue(Number(e.target.value))} + disabled={isLoading} + /> + {amount.error} + {form.error} + +
+ {state === "need_to_increase_unallocated" ? ( +
+ + You need to unallocate{" "} + {formatNumber(extraConcurrency - amountValue - extraUnallocatedConcurrency)} more + concurrency from your environments in order to remove{" "} + {formatNumber(extraConcurrency - amountValue)} concurrency from your account. + +
+ ) : state === "above_quota" ? ( +
+ + Currently you can only have up to {maxQuota} extra concurrency. Send a request + below to lift your current limit. We'll get back to you soon. + +
+ ) : ( +
+
+ Summary + Total +
+
+ + {formatNumber(extraConcurrency)}{" "} + current total + + + {formatCurrency( + (extraConcurrency * concurrencyPricing.centsPerStep) / + concurrencyPricing.stepSize / + 100, + true + )} + +
+
+ + ({extraConcurrency / concurrencyPricing.stepSize} bundles) + + /mth +
+
+ + {state === "increase" ? "+" : null} + {formatNumber(amountValue - extraConcurrency)} + + + {state === "increase" ? "+" : null} + {formatCurrency( + ((amountValue - extraConcurrency) * concurrencyPricing.centsPerStep) / + concurrencyPricing.stepSize / + 100, + true + )} + +
+
+ + ({(amountValue - extraConcurrency) / concurrencyPricing.stepSize} bundles @{" "} + {formatCurrency(concurrencyPricing.centsPerStep / 100, true)}/mth) + + /mth +
+
+ + {formatNumber(amountValue)} new total + + + {formatCurrency( + (amountValue * concurrencyPricing.centsPerStep) / + concurrencyPricing.stepSize / + 100, + true + )} + +
+
+ + ({amountValue / concurrencyPricing.stepSize} bundles) + + /mth +
+
+ )} +
+ + + + + ) : state === "decrease" || state === "need_to_increase_unallocated" ? ( + <> + + + + ) : ( + <> + + + + ) + } + cancelButton={ + + + + } + /> + +
+
+ ); +} + +function updateState({ + value, + existingValue, + quota, + extraUnallocatedConcurrency, +}: { + value: number; + existingValue: number; + quota: number; + extraUnallocatedConcurrency: number; +}): "no_change" | "increase" | "decrease" | "above_quota" | "need_to_increase_unallocated" { + if (value === existingValue) return "no_change"; + if (value < existingValue) { + const difference = existingValue - value; + if (difference > extraUnallocatedConcurrency) { + return "need_to_increase_unallocated"; + } + return "decrease"; + } + if (value > quota) return "above_quota"; + return "increase"; +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments.$deploymentParam/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments.$deploymentParam/route.tsx index 24e651cf73..aebc934ba3 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments.$deploymentParam/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments.$deploymentParam/route.tsx @@ -1,9 +1,12 @@ import { Link, useLocation } from "@remix-run/react"; import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; import { typedjson, useTypedLoaderData } from "remix-typedjson"; +import { useEffect, useState, useRef, useCallback } from "react"; +import { S2, S2Error } from "@s2-dev/streamstore"; +import { Clipboard, ClipboardCheck, ChevronDown, ChevronUp } from "lucide-react"; import { ExitIcon } from "~/assets/icons/ExitIcon"; import { GitMetadata } from "~/components/GitMetadata"; -import { UserAvatar } from "~/components/UserProfilePhoto"; +import { RuntimeIcon } from "~/components/RuntimeIcon"; import { AdminDebugTooltip } from "~/components/admin/debugTooltip"; import { EnvironmentCombo } from "~/components/environments/EnvironmentLabel"; import { Badge } from "~/components/primitives/Badge"; @@ -22,15 +25,22 @@ import { } from "~/components/primitives/Table"; import { DeploymentError } from "~/components/runs/v3/DeploymentError"; import { DeploymentStatus } from "~/components/runs/v3/DeploymentStatus"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "~/components/primitives/Tooltip"; import { useEnvironment } from "~/hooks/useEnvironment"; import { useOrganization } from "~/hooks/useOrganizations"; import { useProject } from "~/hooks/useProject"; -import { useUser } from "~/hooks/useUser"; import { DeploymentPresenter } from "~/presenters/v3/DeploymentPresenter.server"; import { requireUserId } from "~/services/session.server"; import { cn } from "~/utils/cn"; import { v3DeploymentParams, v3DeploymentsPath, v3RunsPath } from "~/utils/pathBuilder"; import { capitalizeWord } from "~/utils/string"; +import { UserTag } from "../_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments/route"; +import { DeploymentEventFromString } from "@trigger.dev/core/v3/schemas"; export const loader = async ({ request, params }: LoaderFunctionArgs) => { const userId = await requireUserId(request); @@ -39,7 +49,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { try { const presenter = new DeploymentPresenter(); - const { deployment } = await presenter.call({ + const { deployment, eventStream } = await presenter.call({ userId, organizationSlug, projectSlug: projectParam, @@ -47,7 +57,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { deploymentShortCode: deploymentParam, }); - return typedjson({ deployment }); + return typedjson({ deployment, eventStream }); } catch (error) { console.error(error); throw new Response(undefined, { @@ -57,15 +67,122 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { } }; +type LogEntry = { + message: string; + timestamp: Date; + level: "info" | "error" | "warn" | "debug"; +}; + export default function Page() { - const { deployment } = useTypedLoaderData(); + const { deployment, eventStream } = useTypedLoaderData(); const organization = useOrganization(); const project = useProject(); const environment = useEnvironment(); const location = useLocation(); - const user = useUser(); const page = new URLSearchParams(location.search).get("page"); + const logsDisabled = eventStream === undefined; + const [logs, setLogs] = useState([]); + const [isStreaming, setIsStreaming] = useState(true); + const [streamError, setStreamError] = useState(null); + const isPending = deployment.status === "PENDING"; + + useEffect(() => { + if (logsDisabled) return; + + const abortController = new AbortController(); + + setLogs([]); + setStreamError(null); + setIsStreaming(true); + + const streamLogs = async () => { + try { + const s2 = new S2({ accessToken: eventStream.s2.accessToken }); + const basin = s2.basin(eventStream.s2.basin); + const stream = basin.stream(eventStream.s2.stream); + + const readSession = await stream.readSession( + { + seq_num: 0, + wait: 60, + as: "bytes", + }, + { signal: abortController.signal } + ); + + const decoder = new TextDecoder(); + + for await (const record of readSession) { + const decoded = decoder.decode(record.body); + const result = DeploymentEventFromString.safeParse(decoded); + + if (!result.success) { + // fallback to the previous format in s2 logs for compatibility + try { + const headers: Record = {}; + + if (record.headers) { + for (const [nameBytes, valueBytes] of record.headers) { + headers[decoder.decode(nameBytes)] = decoder.decode(valueBytes); + } + } + const level = (headers["level"]?.toLowerCase() as LogEntry["level"]) ?? "info"; + + setLogs((prevLogs) => [ + ...prevLogs, + { + timestamp: new Date(record.timestamp), + message: decoded, + level, + }, + ]); + } catch (err) { + console.error("Failed to parse log record:", err); + } + + continue; + } + + const event = result.data; + if (event.type !== "log") { + continue; + } + + setLogs((prevLogs) => [ + ...prevLogs, + { + timestamp: new Date(record.timestamp), + message: event.data.message, + level: event.data.level, + }, + ]); + } + } catch (error) { + if (abortController.signal.aborted) return; + + const isNotFoundError = + error instanceof S2Error && + error.code && + ["permission_denied", "stream_not_found"].includes(error.code); + if (isNotFoundError) return; + + console.error("Failed to stream logs:", error); + setStreamError("Failed to stream logs"); + } finally { + if (!abortController.signal.aborted) { + setIsStreaming(false); + } + } + }; + + streamLogs(); + + return () => { + abortController.abort(); + }; + }, [eventStream?.s2?.basin, eventStream?.s2?.stream, eventStream?.s2?.accessToken, isPending]); + return (
@@ -130,7 +247,11 @@ export default function Page() { Deploy {deployment.shortCode} - {deployment.label && {deployment.label}} + {deployment.label && ( + + {deployment.label} + + )} @@ -153,6 +274,35 @@ export default function Page() { /> + {!logsDisabled && ( + + Logs + + + )} + {deployment.canceledAt && ( + + Canceled at + + <> + UTC + + + + )} + {deployment.canceledReason && ( + + Cancelation reason + {deployment.canceledReason} + + )} Tasks {deployment.tasks ? deployment.tasks.length : "โ€“"} @@ -169,6 +319,16 @@ export default function Page() { {deployment.cliVersion ? deployment.cliVersion : "โ€“"} + + Runtime + + + + Worker type {capitalizeWord(deployment.type)} @@ -176,7 +336,25 @@ export default function Page() { Started at - UTC + {deployment.startedAt ? ( + <> + UTC + + ) : ( + "โ€“" + )} + + + + Installed at + + {deployment.installedAt ? ( + <> + UTC + + ) : ( + "โ€“" + )} @@ -215,17 +393,16 @@ export default function Page() { Deployed by - {deployment.deployedBy ? ( -
- - - {deployment.deployedBy.name ?? deployment.deployedBy.displayName} - -
+ {deployment.git?.source === "trigger_github_app" ? ( + + ) : deployment.deployedBy ? ( + ) : ( "โ€“" )} @@ -272,3 +449,189 @@ export default function Page() {
); } + +function LogsDisplay({ + logs, + isStreaming, + streamError, + initialCollapsed = false, +}: { + logs: LogEntry[]; + isStreaming: boolean; + streamError: string | null; + initialCollapsed?: boolean; +}) { + const [copied, setCopied] = useState(false); + const [mouseOver, setMouseOver] = useState(false); + const [collapsed, setCollapsed] = useState(initialCollapsed); + const logsContainerRef = useRef(null); + + useEffect(() => { + setCollapsed(initialCollapsed); + }, [initialCollapsed]); + + // auto-scroll log container to bottom when new logs arrive + useEffect(() => { + if (logsContainerRef.current) { + logsContainerRef.current.scrollTop = logsContainerRef.current.scrollHeight; + } + }, [logs]); + + const onCopyLogs = useCallback( + (event: React.MouseEvent) => { + event.preventDefault(); + event.stopPropagation(); + const logsText = logs.map((log) => log.message).join("\n"); + navigator.clipboard.writeText(logsText); + setCopied(true); + setTimeout(() => { + setCopied(false); + }, 1500); + }, + [logs] + ); + + const errorCount = logs.filter((log) => log.level === "error").length; + const warningCount = logs.filter((log) => log.level === "warn").length; + + return ( +
+
+
+
+
0 ? "bg-error/80" : "bg-charcoal-600" + )} + /> + + {`${errorCount} ${errorCount === 1 ? "error" : "errors"}`} + +
+
+
0 ? "bg-warning/80" : "bg-charcoal-600" + )} + /> + + {`${warningCount} ${warningCount === 1 ? "warning" : "warnings"}`} + +
+
+ {logs.length > 0 && ( +
+ + + setMouseOver(true)} + onMouseLeave={() => setMouseOver(false)} + className={cn( + "transition-colors duration-100 focus-custom hover:cursor-pointer", + copied ? "text-success" : "text-text-dimmed hover:text-text-bright" + )} + > +
+ {copied ? ( + + ) : ( + + )} +
+
+ + {copied ? "Copied" : "Copy"} + +
+
+ + + + setCollapsed(!collapsed)} + className={cn( + "transition-colors duration-100 focus-custom hover:cursor-pointer", + "text-text-dimmed hover:text-text-bright" + )} + > + {collapsed ? ( + + ) : ( + + )} + + + {collapsed ? "Expand" : "Collapse"} + + + +
+ )} +
+ +
+
+
+ {logs.length === 0 && ( +
+ {streamError ? ( + Failed fetching logs + ) : ( + + {isStreaming ? "Waiting for logs..." : "No logs yet"} + + )} +
+ )} + {logs.map((log, index) => { + return ( +
+ + + + + {log.message} + +
+ ); + })} +
+
+ {collapsed && ( +
+ )} +
+
+ ); +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments/route.tsx index 701e6843f9..9b2b78f98b 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.deployments/route.tsx @@ -1,16 +1,42 @@ -import { ArrowPathIcon, ArrowUturnLeftIcon, BookOpenIcon } from "@heroicons/react/20/solid"; -import { type MetaFunction, Outlet, useLocation, useParams, useNavigate } from "@remix-run/react"; +import { + ArrowPathIcon, + ArrowUturnLeftIcon, + BookOpenIcon, + NoSymbolIcon, +} from "@heroicons/react/20/solid"; +import { + Form, + type MetaFunction, + Outlet, + useLocation, + useNavigate, + useNavigation, + useParams, +} from "@remix-run/react"; import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { CogIcon, GitBranchIcon } from "lucide-react"; +import { useEffect } from "react"; import { typedjson, useTypedLoaderData } from "remix-typedjson"; import { z } from "zod"; import { PromoteIcon } from "~/assets/icons/PromoteIcon"; import { DeploymentsNone, DeploymentsNoneDev } from "~/components/BlankStatePanels"; +import { OctoKitty } from "~/components/GitHubLoginButton"; +import { GitMetadata } from "~/components/GitMetadata"; +import { RuntimeIcon } from "~/components/RuntimeIcon"; import { UserAvatar } from "~/components/UserProfilePhoto"; import { MainCenteredContainer, PageBody, PageContainer } from "~/components/layout/AppLayout"; import { Badge } from "~/components/primitives/Badge"; import { Button, LinkButton } from "~/components/primitives/Buttons"; import { DateTime } from "~/components/primitives/DateTime"; -import { Dialog, DialogTrigger } from "~/components/primitives/Dialog"; +import { SpinnerWhite } from "~/components/primitives/Spinner"; +import { + Dialog, + DialogDescription, + DialogContent, + DialogHeader, + DialogTrigger, + DialogFooter, +} from "~/components/primitives/Dialog"; import { NavBar, PageAccessories, PageTitle } from "~/components/primitives/PageHeader"; import { PaginationControls } from "~/components/primitives/Pagination"; import { Paragraph } from "~/components/primitives/Paragraph"; @@ -34,11 +60,6 @@ import { deploymentStatusDescription, deploymentStatuses, } from "~/components/runs/v3/DeploymentStatus"; -import { RetryDeploymentIndexingDialog } from "~/components/runs/v3/RetryDeploymentIndexingDialog"; -import { - PromoteDeploymentDialog, - RollbackDeploymentDialog, -} from "~/components/runs/v3/RollbackDeploymentDialog"; import { useEnvironment } from "~/hooks/useEnvironment"; import { useOrganization } from "~/hooks/useOrganizations"; import { useProject } from "~/hooks/useProject"; @@ -48,12 +69,18 @@ import { } from "~/presenters/v3/DeploymentListPresenter.server"; import { requireUserId } from "~/services/session.server"; import { titleCase } from "~/utils"; -import { EnvironmentParamSchema, docsPath, v3DeploymentPath } from "~/utils/pathBuilder"; +import { cn } from "~/utils/cn"; +import { + EnvironmentParamSchema, + docsPath, + v3DeploymentPath, + v3ProjectSettingsPath, +} from "~/utils/pathBuilder"; import { createSearchParams } from "~/utils/searchParams"; -import { deploymentIndexingIsRetryable } from "~/v3/deploymentStatus"; import { compareDeploymentVersions } from "~/v3/utils/deploymentVersions"; -import { useEffect } from "react"; -import { GitMetadata } from "~/components/GitMetadata"; +import { useAutoRevalidate } from "~/hooks/useAutoRevalidate"; +import { env } from "~/env.server"; +import { DialogClose } from "@radix-ui/react-dialog"; export const meta: MetaFunction = () => { return [ @@ -109,7 +136,9 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { ? result.deployments.find((d) => d.version === version) : undefined; - return typedjson({ ...result, selectedDeployment }); + const autoReloadPollIntervalMs = env.DEPLOYMENTS_AUTORELOAD_POLL_INTERVAL_MS; + + return typedjson({ ...result, selectedDeployment, autoReloadPollIntervalMs }); } catch (error) { console.error(error); throw new Response(undefined, { @@ -123,14 +152,23 @@ export default function Page() { const organization = useOrganization(); const project = useProject(); const environment = useEnvironment(); - const { deployments, currentPage, totalPages, selectedDeployment } = - useTypedLoaderData(); + const { + deployments, + currentPage, + totalPages, + selectedDeployment, + connectedGithubRepository, + environmentGitHubBranch, + autoReloadPollIntervalMs, + } = useTypedLoaderData(); const hasDeployments = totalPages > 0; const { deploymentParam } = useParams(); const location = useLocation(); const navigate = useNavigate(); + useAutoRevalidate({ interval: autoReloadPollIntervalMs, onFocus: true }); + // If we have a selected deployment from the version param, show it useEffect(() => { if (selectedDeployment && !deploymentParam) { @@ -161,8 +199,8 @@ export default function Page() { {hasDeployments ? ( -
- +
+
Deploy @@ -191,6 +229,7 @@ export default function Page() { > Status + Runtime Tasks Deployed at Deployed by @@ -228,6 +267,12 @@ export default function Page() { isBuilt={deployment.isBuilt} /> + + + {deployment.tasksCount !== null ? deployment.tasksCount : "โ€“"} @@ -239,21 +284,20 @@ export default function Page() { )} - {deployment.deployedBy ? ( -
- - - {deployment.deployedBy.name ?? - deployment.deployedBy.displayName} - -
+ {deployment.git?.source === "trigger_github_app" ? ( + + ) : deployment.deployedBy ? ( + ) : ( "โ€“" )} @@ -273,7 +317,7 @@ export default function Page() { ); }) ) : ( - + No deploys match your filters @@ -281,18 +325,45 @@ export default function Page() { )}
- {totalPages > 1 && ( -
- -
- )} +
+ {connectedGithubRepository && environmentGitHubBranch && ( +
+ + Automatically triggered by pushes to{" "} +
+ + {environmentGitHubBranch} +
{" "} + in + + {connectedGithubRepository.repository.fullName} + + +
+ )} + +
) : environment.type === "DEVELOPMENT" ? ( - + ) : ( - + )} @@ -301,7 +372,7 @@ export default function Page() { {deploymentParam && ( <> - + @@ -312,6 +383,15 @@ export default function Page() { ); } +export function UserTag({ name, avatarUrl }: { name: string; avatarUrl?: string }) { + return ( +
+ + {name} +
+ ); +} + function DeploymentActionsCell({ deployment, path, @@ -327,14 +407,16 @@ function DeploymentActionsCell({ const project = useProject(); const canBeMadeCurrent = !deployment.isCurrent && deployment.isDeployed; - const canRetryIndexing = deployment.isLatest && deploymentIndexingIsRetryable(deployment); const canBeRolledBack = canBeMadeCurrent && currentDeployment?.version && compareDeploymentVersions(deployment.version, currentDeployment.version) === -1; const canBePromoted = canBeMadeCurrent && !canBeRolledBack; - if (!canBeMadeCurrent && !canRetryIndexing) { + const finalStatuses = ["CANCELED", "DEPLOYED", "FAILED", "TIMED_OUT"]; + const canBeCanceled = !finalStatuses.includes(deployment.status); + + if (!canBeRolledBack && !canBePromoted && !canBeCanceled) { return ( {""} @@ -358,7 +440,7 @@ function DeploymentActionsCell({ fullWidth textAlignLeft > - Rollbackโ€ฆ + Rollback - Promoteโ€ฆ + Promote )} - {canRetryIndexing && ( + {canBeCanceled && ( - ); } + +type RollbackDeploymentDialogProps = { + projectId: string; + deploymentShortCode: string; + redirectPath: string; +}; + +function RollbackDeploymentDialog({ + projectId, + deploymentShortCode, + redirectPath, +}: RollbackDeploymentDialogProps) { + const navigation = useNavigation(); + + const formAction = `/resources/${projectId}/deployments/${deploymentShortCode}/rollback`; + const isLoading = navigation.formAction === formAction; + + return ( + + Rollback to this deployment? + + This deployment will become the default for all future runs. Tasks triggered but not + included in this deploy will remain queued until you roll back to or create a new deployment + with these tasks included. + + + + + +
+ +
+
+
+ ); +} + +function PromoteDeploymentDialog({ + projectId, + deploymentShortCode, + redirectPath, +}: RollbackDeploymentDialogProps) { + const navigation = useNavigation(); + + const formAction = `/resources/${projectId}/deployments/${deploymentShortCode}/promote`; + const isLoading = navigation.formAction === formAction; + + return ( + + Promote this deployment? + + This deployment will become the default for all future runs not explicitly tied to a + specific deployment. + + + + + +
+ +
+
+
+ ); +} + +function CancelDeploymentDialog({ + projectId, + deploymentShortCode, + redirectPath, +}: RollbackDeploymentDialogProps) { + const navigation = useNavigation(); + + const formAction = `/resources/${projectId}/deployments/${deploymentShortCode}/cancel`; + const isLoading = navigation.formAction === formAction; + + return ( + + Cancel this deployment? + Canceling a deployment cannot be undone. Are you sure? + + + + +
+ +
+
+
+ ); +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs._index/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs._index/route.tsx deleted file mode 100644 index e73b1c883e..0000000000 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs._index/route.tsx +++ /dev/null @@ -1,511 +0,0 @@ -import { ArrowPathIcon, StopCircleIcon } from "@heroicons/react/20/solid"; -import { BeakerIcon, BookOpenIcon } from "@heroicons/react/24/solid"; -import { Form, type MetaFunction, useNavigation } from "@remix-run/react"; -import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; -import { IconCircleX } from "@tabler/icons-react"; -import { AnimatePresence, motion } from "framer-motion"; -import { ListChecks, ListX } from "lucide-react"; -import { Suspense, useState } from "react"; -import { TypedAwait, typeddefer, useTypedLoaderData } from "remix-typedjson"; -import { TaskIcon } from "~/assets/icons/TaskIcon"; -import { DevDisconnectedBanner, useDevPresence } from "~/components/DevPresence"; -import { StepContentContainer } from "~/components/StepContentContainer"; -import { MainCenteredContainer, PageBody } from "~/components/layout/AppLayout"; -import { Button, LinkButton } from "~/components/primitives/Buttons"; -import { - Dialog, - DialogContent, - DialogDescription, - DialogFooter, - DialogHeader, - DialogTrigger, -} from "~/components/primitives/Dialog"; -import { Header1, Header2 } from "~/components/primitives/Headers"; -import { InfoPanel } from "~/components/primitives/InfoPanel"; -import { NavBar, PageAccessories, PageTitle } from "~/components/primitives/PageHeader"; -import { Paragraph } from "~/components/primitives/Paragraph"; -import { - SelectedItemsProvider, - useSelectedItems, -} from "~/components/primitives/SelectedItemsProvider"; -import { Spinner, SpinnerWhite } from "~/components/primitives/Spinner"; -import { StepNumber } from "~/components/primitives/StepNumber"; -import { TextLink } from "~/components/primitives/TextLink"; -import { RunsFilters, TaskRunListSearchFilters } from "~/components/runs/v3/RunFilters"; -import { TaskRunsTable } from "~/components/runs/v3/TaskRunsTable"; -import { BULK_ACTION_RUN_LIMIT } from "~/consts"; -import { $replica } from "~/db.server"; -import { useEnvironment } from "~/hooks/useEnvironment"; -import { useOrganization } from "~/hooks/useOrganizations"; -import { useProject } from "~/hooks/useProject"; -import { findProjectBySlug } from "~/models/project.server"; -import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; -import { NextRunListPresenter } from "~/presenters/v3/NextRunListPresenter.server"; -import { clickhouseClient } from "~/services/clickhouseInstance.server"; -import { - getRootOnlyFilterPreference, - setRootOnlyFilterPreference, - uiPreferencesStorage, -} from "~/services/preferences/uiPreferences.server"; -import { requireUserId } from "~/services/session.server"; -import { cn } from "~/utils/cn"; -import { - docsPath, - EnvironmentParamSchema, - v3ProjectPath, - v3RunsNextPath, - v3TestPath, -} from "~/utils/pathBuilder"; -import { ListPagination } from "../../components/ListPagination"; - -export const meta: MetaFunction = () => { - return [ - { - title: `Runs | Trigger.dev`, - }, - ]; -}; - -export const loader = async ({ request, params }: LoaderFunctionArgs) => { - const userId = await requireUserId(request); - const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params); - - const url = new URL(request.url); - - let rootOnlyValue = false; - if (url.searchParams.has("rootOnly")) { - rootOnlyValue = url.searchParams.get("rootOnly") === "true"; - } else { - rootOnlyValue = await getRootOnlyFilterPreference(request); - } - - const project = await findProjectBySlug(organizationSlug, projectParam, userId); - if (!project) { - throw new Error("Project not found"); - } - - const environment = await findEnvironmentBySlug(project.id, envParam, userId); - if (!environment) { - throw new Error("Environment not found"); - } - - const s = { - cursor: url.searchParams.get("cursor") ?? undefined, - direction: url.searchParams.get("direction") ?? undefined, - statuses: url.searchParams.getAll("statuses"), - environments: [environment.id], - tasks: url.searchParams.getAll("tasks"), - period: url.searchParams.get("period") ?? undefined, - bulkId: url.searchParams.get("bulkId") ?? undefined, - tags: url.searchParams.getAll("tags").map((t) => decodeURIComponent(t)), - from: url.searchParams.get("from") ?? undefined, - to: url.searchParams.get("to") ?? undefined, - rootOnly: rootOnlyValue, - runId: url.searchParams.get("runId") ?? undefined, - batchId: url.searchParams.get("batchId") ?? undefined, - scheduleId: url.searchParams.get("scheduleId") ?? undefined, - }; - const { - tasks, - versions, - statuses, - environments, - tags, - period, - bulkId, - from, - to, - cursor, - direction, - rootOnly, - runId, - batchId, - scheduleId, - } = TaskRunListSearchFilters.parse(s); - - if (!clickhouseClient) { - throw new Error("Clickhouse is not supported yet"); - } - - const presenter = new NextRunListPresenter($replica, clickhouseClient); - const list = presenter.call(project.organizationId, environment.id, { - userId, - projectId: project.id, - tasks, - versions, - statuses, - tags, - period, - bulkId, - from, - to, - batchId, - runIds: runId ? [runId] : undefined, - scheduleId, - rootOnly, - direction: direction, - cursor: cursor, - }); - - const session = await setRootOnlyFilterPreference(rootOnlyValue, request); - const cookieValue = await uiPreferencesStorage.commitSession(session); - - return typeddefer( - { - data: list, - rootOnlyDefault: rootOnlyValue, - }, - { - headers: { - "Set-Cookie": cookieValue, - }, - } - ); -}; - -export default function Page() { - const { data, rootOnlyDefault } = useTypedLoaderData(); - const navigation = useNavigation(); - const isLoading = navigation.state !== "idle"; - const { isConnected } = useDevPresence(); - const project = useProject(); - const environment = useEnvironment(); - - return ( - <> - - - {environment.type === "DEVELOPMENT" && project.engine === "V2" && ( - - )} - - - Runs docs - - - - - - {({ selectedItems }) => ( -
- -
- - Loading runs -
-
- } - > - - {(list) => ( - <> - {list.runs.length === 0 && !list.hasAnyRuns ? ( - list.possibleTasks.length === 0 ? ( - - ) : ( - - ) - ) : ( -
-
- -
- -
-
- - -
- )} - - )} -
- - -
- )} - - - - ); -} - -function BulkActionBar() { - const { selectedItems, deselectAll } = useSelectedItems(); - const [barState, setBarState] = useState<"none" | "replay" | "cancel">("none"); - - const hasSelectedMaximum = selectedItems.size >= BULK_ACTION_RUN_LIMIT; - - return ( - - {selectedItems.size > 0 && ( - -
- - Bulk actions: - {hasSelectedMaximum ? ( - - Maximum of {selectedItems.size} runs selected - - ) : ( - {selectedItems.size} runs selected - )} -
-
- { - if (o) { - setBarState("cancel"); - } else { - setBarState("none"); - } - }} - /> - { - if (o) { - setBarState("replay"); - } else { - setBarState("none"); - } - }} - /> - -
-
- )} -
- ); -} - -function CancelRuns({ onOpen }: { onOpen: (open: boolean) => void }) { - const { selectedItems } = useSelectedItems(); - - const organization = useOrganization(); - const project = useProject(); - const environment = useEnvironment(); - const failedRedirect = v3RunsNextPath(organization, project, environment); - - const formAction = `/resources/taskruns/bulk/cancel`; - - const navigation = useNavigation(); - const isLoading = navigation.formAction === formAction; - - return ( - onOpen(o)}> - - - - - Cancel {selectedItems.size} runs? - - Canceling these runs will stop them from running. Only runs that are not already finished - will be canceled, the others will remain in their existing state. - - -
- - - - - {[...selectedItems].map((runId) => ( - - ))} - -
-
-
-
- ); -} - -function ReplayRuns({ onOpen }: { onOpen: (open: boolean) => void }) { - const { selectedItems } = useSelectedItems(); - - const organization = useOrganization(); - const project = useProject(); - const environment = useEnvironment(); - const failedRedirect = v3RunsNextPath(organization, project, environment); - - const formAction = `/resources/taskruns/bulk/replay`; - - const navigation = useNavigation(); - const isLoading = navigation.formAction === formAction; - - return ( - onOpen(o)}> - - - - - Replay runs? - - Replaying these runs will create a new run for each with the same payload and environment - as the original. It will use the latest version of the code for each task. - - -
- - - - - {[...selectedItems].map((runId) => ( - - ))} - -
-
-
-
- ); -} - -function CreateFirstTaskInstructions() { - const organization = useOrganization(); - const project = useProject(); - return ( - - - Create a task - - } - > - - Before running a task, you must first create one. Follow the instructions on the{" "} - Tasks page to create a - task, then return here to run it. - - - - ); -} - -function RunTaskInstructions() { - const organization = useOrganization(); - const project = useProject(); - const environment = useEnvironment(); - return ( - - How to run your tasks - - - - Perform a test run with a payload directly from the dashboard. - - - Test - -
-
- OR -
-
-
- - - - - Performing a real run depends on the type of trigger your task is using. - - - How to trigger a task - - -
- ); -} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs/route.tsx deleted file mode 100644 index f6723ddeba..0000000000 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.next.runs/route.tsx +++ /dev/null @@ -1,10 +0,0 @@ -import { Outlet } from "@remix-run/react"; -import { PageContainer } from "~/components/layout/AppLayout"; - -export default function Page() { - return ( - - - - ); -} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.queues/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.queues/route.tsx index 8ab4b24ba4..0c069f31e6 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.queues/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.queues/route.tsx @@ -1,4 +1,5 @@ import { + AdjustmentsHorizontalIcon, ArrowUpCircleIcon, BookOpenIcon, ChatBubbleLeftEllipsisIcon, @@ -8,19 +9,14 @@ import { RectangleStackIcon, } from "@heroicons/react/20/solid"; import { DialogClose } from "@radix-ui/react-dialog"; -import { - Form, - useNavigate, - useNavigation, - useRevalidator, - useSearchParams, - type MetaFunction, -} from "@remix-run/react"; +import { Form, useNavigation, useSearchParams, type MetaFunction } from "@remix-run/react"; import { type ActionFunctionArgs, type LoaderFunctionArgs } from "@remix-run/server-runtime"; -import { type RuntimeEnvironmentType } from "@trigger.dev/database"; +import type { RuntimeEnvironmentType } from "@trigger.dev/database"; +import type { QueueItem } from "@trigger.dev/core/v3/schemas"; import { useEffect, useState } from "react"; import { typedjson, useTypedLoaderData } from "remix-typedjson"; import { z } from "zod"; +import { RunsIcon } from "~/assets/icons/RunsIcon"; import { TaskIconSmall } from "~/assets/icons/TaskIcon"; import upgradeForQueuesPath from "~/assets/images/queues-dashboard.png"; import { AdminDebugTooltip } from "~/components/admin/debugTooltip"; @@ -30,13 +26,16 @@ import { Feedback } from "~/components/Feedback"; import { PageBody, PageContainer } from "~/components/layout/AppLayout"; import { BigNumber } from "~/components/metrics/BigNumber"; import { Badge } from "~/components/primitives/Badge"; -import { Button, LinkButton } from "~/components/primitives/Buttons"; +import { Button, LinkButton, type ButtonVariant } from "~/components/primitives/Buttons"; import { Callout } from "~/components/primitives/Callout"; import { Dialog, DialogContent, DialogHeader, DialogTrigger } from "~/components/primitives/Dialog"; import { FormButtons } from "~/components/primitives/FormButtons"; +import { Header3 } from "~/components/primitives/Headers"; +import { Input } from "~/components/primitives/Input"; import { NavBar, PageAccessories, PageTitle } from "~/components/primitives/PageHeader"; import { PaginationControls } from "~/components/primitives/Pagination"; import { Paragraph } from "~/components/primitives/Paragraph"; +import { PopoverMenuItem } from "~/components/primitives/Popover"; import { Spinner } from "~/components/primitives/Spinner"; import { Table, @@ -48,30 +47,39 @@ import { TableRow, } from "~/components/primitives/Table"; import { + InfoIconTooltip, SimpleTooltip, Tooltip, TooltipContent, TooltipProvider, TooltipTrigger, } from "~/components/primitives/Tooltip"; +import { env } from "~/env.server"; +import { useAutoRevalidate } from "~/hooks/useAutoRevalidate"; import { useEnvironment } from "~/hooks/useEnvironment"; -import { useEventSource } from "~/hooks/useEventSource"; import { useOrganization } from "~/hooks/useOrganizations"; import { useProject } from "~/hooks/useProject"; +import { useThrottle } from "~/hooks/useThrottle"; import { redirectWithErrorMessage, redirectWithSuccessMessage } from "~/models/message.server"; import { findProjectBySlug } from "~/models/project.server"; import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; +import { getUserById } from "~/models/user.server"; import { EnvironmentQueuePresenter } from "~/presenters/v3/EnvironmentQueuePresenter.server"; import { QueueListPresenter } from "~/presenters/v3/QueueListPresenter.server"; import { requireUserId } from "~/services/session.server"; import { cn } from "~/utils/cn"; -import { docsPath, EnvironmentParamSchema, v3BillingPath } from "~/utils/pathBuilder"; +import { + concurrencyPath, + docsPath, + EnvironmentParamSchema, + v3BillingPath, + v3RunsPath, +} from "~/utils/pathBuilder"; +import { concurrencySystem } from "~/v3/services/concurrencySystemInstance.server"; import { PauseEnvironmentService } from "~/v3/services/pauseEnvironment.server"; import { PauseQueueService } from "~/v3/services/pauseQueue.server"; import { useCurrentPlan } from "../_app.orgs.$organizationSlug/route"; -import { Header3 } from "~/components/primitives/Headers"; -import { Input } from "~/components/primitives/Input"; -import { useThrottle } from "~/hooks/useThrottle"; +import { ConcurrencyIcon } from "~/assets/icons/ConcurrencyIcon"; const SearchParamsSchema = z.object({ query: z.string().optional(), @@ -119,9 +127,12 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { const environmentQueuePresenter = new EnvironmentQueuePresenter(); + const autoReloadPollIntervalMs = env.QUEUES_AUTORELOAD_POLL_INTERVAL_MS; + return typedjson({ ...queues, environment: await environmentQueuePresenter.call(environment), + autoReloadPollIntervalMs, }); } catch (error) { console.error(error); @@ -209,34 +220,108 @@ export const action = async ({ request, params }: ActionFunctionArgs) => { `Queue ${action === "queue-pause" ? "paused" : "resumed"}` ); } + case "queue-override": { + const friendlyId = formData.get("friendlyId"); + const concurrencyLimit = formData.get("concurrencyLimit"); + + if (!friendlyId) { + return redirectWithErrorMessage(redirectPath, request, "Queue ID is required"); + } + + if (!concurrencyLimit) { + return redirectWithErrorMessage(redirectPath, request, "Concurrency limit is required"); + } + + const limitNumber = parseInt(concurrencyLimit.toString(), 10); + if (isNaN(limitNumber) || limitNumber < 0) { + return redirectWithErrorMessage( + redirectPath, + request, + "Concurrency limit must be a valid number" + ); + } + + const user = await getUserById(userId); + if (!user) { + return redirectWithErrorMessage(redirectPath, request, "User not found"); + } + + const result = await concurrencySystem.queues.overrideQueueConcurrencyLimit( + environment, + friendlyId.toString(), + limitNumber, + user + ); + + if (!result.isOk()) { + return redirectWithErrorMessage( + redirectPath, + request, + "Failed to override queue concurrency limit" + ); + } + + return redirectWithSuccessMessage( + redirectPath, + request, + "Queue concurrency limit overridden" + ); + } + case "queue-remove-override": { + const friendlyId = formData.get("friendlyId"); + + if (!friendlyId) { + return redirectWithErrorMessage(redirectPath, request, "Queue ID is required"); + } + + const result = await concurrencySystem.queues.resetConcurrencyLimit( + environment, + friendlyId.toString() + ); + + if (!result.isOk()) { + return redirectWithErrorMessage( + redirectPath, + request, + "Failed to reset queue concurrency limit" + ); + } + + return redirectWithSuccessMessage(redirectPath, request, "Queue concurrency limit reset"); + } default: return redirectWithErrorMessage(redirectPath, request, "Something went wrong"); } }; export default function Page() { - const { environment, queues, success, pagination, code, totalQueues, hasFilters } = - useTypedLoaderData(); + const { + environment, + queues, + success, + pagination, + code, + totalQueues, + hasFilters, + autoReloadPollIntervalMs, + } = useTypedLoaderData(); const organization = useOrganization(); const project = useProject(); const env = useEnvironment(); const plan = useCurrentPlan(); - // Reload the page periodically - const streamedEvents = useEventSource( - `/resources/orgs/${organization.slug}/projects/${project.slug}/env/${env.slug}/queues/stream`, - { - event: "update", - } - ); + useAutoRevalidate({ interval: autoReloadPollIntervalMs, onFocus: true }); - const revalidation = useRevalidator(); - useEffect(() => { - if (streamedEvents) { - revalidation.revalidate(); - } - }, [streamedEvents]); + const limitStatus = + environment.running === environment.concurrencyLimit * environment.burstFactor + ? "limit" + : environment.running > environment.concurrencyLimit + ? "burst" + : "within"; + + const limitClassName = + limitStatus === "burst" ? "text-warning" : limitStatus === "limit" ? "text-error" : undefined; return ( @@ -261,7 +346,23 @@ export default function Page() { value={environment.queued} suffix={env.paused && environment.queued > 0 ? "paused" : undefined} animate - accessory={} + accessory={ +
+ {environment.runsEnabled ? : null} + +
+ } valueClassName={env.paused ? "text-warning" : undefined} compactThreshold={1000000} /> @@ -269,13 +370,30 @@ export default function Page() { title="Running" value={environment.running} animate - valueClassName={ - environment.running === environment.concurrencyLimit ? "text-warning" : undefined - } + valueClassName={limitClassName} suffix={ - environment.running === environment.concurrencyLimit - ? "At concurrency limit" - : undefined + limitStatus === "burst" ? ( + + Including {environment.running - environment.concurrencyLimit} burst runs{" "} + + + ) : limitStatus === "limit" ? ( + "At concurrency limit" + ) : undefined + } + accessory={ + } compactThreshold={1000000} /> @@ -283,24 +401,26 @@ export default function Page() { title="Concurrency limit" value={environment.concurrencyLimit} animate - valueClassName={ - environment.running === environment.concurrencyLimit ? "text-warning" : undefined + valueClassName={limitClassName} + suffix={ + environment.burstFactor > 1 ? ( + + Burst limit {environment.burstFactor * environment.concurrencyLimit}{" "} + + + ) : undefined } accessory={ plan ? ( plan?.v3Subscription?.plan?.limits.concurrentRuns.canExceed ? ( - - Increase limitโ€ฆ - - } - defaultValue="concurrency" - /> + + Increase limit + ) : ( 1 && "grid-rows-[auto_1fr_auto]" )} > - +
+ + +
Name Queued - Running/limit + Running + Limit +
+ Override + + This queue's concurrency limit has been manually overridden from the + dashboard or API. + +
} > Limited by
- - - When a task executing on this queue is paused and waiting for a - waitpoint to complete, the queue will release the concurrency being used - by the run so other runs can be started. - - - Read docs - - - } - > - Release on waitpoint - Pause/resume @@ -395,7 +508,10 @@ export default function Page() { {queues.length > 0 ? ( queues.map((queue) => { const limit = queue.concurrencyLimit ?? environment.concurrencyLimit; - const isAtLimit = queue.running === limit; + const isAtLimit = queue.running >= limit; + const queueFilterableName = `${queue.type === "task" ? "task/" : ""}${ + queue.name + }`; return ( @@ -428,6 +544,18 @@ export default function Page() { {queue.name} + {queue.concurrency?.overriddenAt ? ( + + Concurrency limit overridden + + } + content="This queue's concurrency limit has been manually overridden from the dashboard or API." + className="max-w-xs" + disableHoverableContent + /> + ) : null} {queue.paused ? ( Paused @@ -442,41 +570,50 @@ export default function Page() { {queue.queued} 0 && "text-text-bright", isAtLimit && "text-warning" )} > - {queue.running}/ - - {limit} - + {queue.running} - {queue.concurrencyLimit ? "User" : "Environment"} + {limit} - {queue.releaseConcurrencyOnWaitpoint ? "Yes" : "No"} + {queue.concurrency?.overriddenAt ? ( + Override + ) : queue.concurrencyLimit ? ( + "User" + ) : ( + "Environment" + )} } + popoverContent={ + <> + {queue.paused ? ( + + ) : ( + + )} + + + + + + + } /> ); }) ) : ( - +
{hasFilters @@ -639,40 +832,59 @@ function EnvironmentPauseResumeButton({ function QueuePauseResumeButton({ queue, + variant = "tertiary/small", + fullWidth = false, + showTooltip = true, }: { /** The "id" here is a friendlyId */ queue: { id: string; name: string; paused: boolean }; + variant?: ButtonVariant; + fullWidth?: boolean; + showTooltip?: boolean; }) { - const navigation = useNavigation(); const [isOpen, setIsOpen] = useState(false); + const trigger = showTooltip ? ( +
+ + + +
+ + + +
+
+ + {queue.paused + ? `Resume processing runs in queue "${queue.name}"` + : `Pause processing runs in queue "${queue.name}"`} + +
+
+
+ ) : ( + + + + ); + return ( -
- - - -
- - - -
-
- - {queue.paused - ? `Resume processing runs in queue "${queue.name}"` - : `Pause processing runs in queue "${queue.name}"`} - -
-
-
+ {trigger} {queue.paused ? "Resume queue?" : "Pause queue?"}
@@ -714,6 +926,124 @@ function QueuePauseResumeButton({ ); } +function QueueOverrideConcurrencyButton({ + queue, + environmentConcurrencyLimit, +}: { + queue: QueueItem; + environmentConcurrencyLimit: number; +}) { + const navigation = useNavigation(); + const [isOpen, setIsOpen] = useState(false); + const [concurrencyLimit, setConcurrencyLimit] = useState( + queue.concurrencyLimit?.toString() ?? environmentConcurrencyLimit.toString() + ); + + const isOverridden = !!queue.concurrency?.overriddenAt; + const currentLimit = queue.concurrencyLimit ?? environmentConcurrencyLimit; + + useEffect(() => { + if (navigation.state === "loading" || navigation.state === "idle") { + setIsOpen(false); + } + }, [navigation.state]); + + const isLoading = Boolean( + navigation.formData?.get("action") === "queue-override" || + navigation.formData?.get("action") === "queue-remove-override" + ); + + return ( + + + + + + + {isOverridden ? "Edit concurrency override" : "Override concurrency limit"} + +
+ {isOverridden ? ( + + This queue's concurrency limit is currently overridden to {currentLimit}. + {typeof queue.concurrency?.base === "number" && + ` The original limit set in code was ${queue.concurrency.base}.`}{" "} + You can update the override or remove it to restore the{" "} + {typeof queue.concurrency?.base === "number" + ? "limit set in code" + : "environment concurrency limit"} + . + + ) : ( + + Override this queue's concurrency limit. The current limit is {currentLimit}, which is + set {queue.concurrencyLimit !== null ? "in code" : "by the environment"}. + + )} +
setIsOpen(false)} className="space-y-3"> + +
+ + setConcurrencyLimit(e.target.value)} + placeholder={currentLimit.toString()} + autoFocus + /> +
+ + } + shortcut={{ modifiers: ["mod"], key: "enter" }} + > + {isOverridden ? "Update override" : "Override limit"} + + } + cancelButton={ +
+ {isOverridden && ( + + )} + + + +
+ } + /> + +
+
+
+ ); +} + function EngineVersionUpgradeCallout() { return (
@@ -779,7 +1109,7 @@ export function QueueFilters() { const search = searchParams.get("query") ?? ""; return ( -
+
); } + +function BurstFactorTooltip({ + environment, +}: { + environment: { burstFactor: number; concurrencyLimit: number }; +}) { + return ( + + ); +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.regions/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.regions/route.tsx new file mode 100644 index 0000000000..3484e1378b --- /dev/null +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.regions/route.tsx @@ -0,0 +1,464 @@ +import { + ArrowRightIcon, + ArrowUpCircleIcon, + BookOpenIcon, + ChatBubbleLeftEllipsisIcon, + InformationCircleIcon, + MapPinIcon, +} from "@heroicons/react/20/solid"; +import { Form } from "@remix-run/react"; +import { type ActionFunctionArgs, type LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { tryCatch } from "@trigger.dev/core"; +import { useState } from "react"; +import { typedjson, useTypedLoaderData } from "remix-typedjson"; +import { z } from "zod"; +import { CloudProviderIcon } from "~/assets/icons/CloudProviderIcon"; +import { FlagIcon } from "~/assets/icons/RegionIcons"; +import { cloudProviderTitle } from "~/components/CloudProvider"; +import { Feedback } from "~/components/Feedback"; +import { V4Title } from "~/components/V4Badge"; +import { AdminDebugTooltip } from "~/components/admin/debugTooltip"; +import { MainCenteredContainer, PageBody, PageContainer } from "~/components/layout/AppLayout"; +import { Badge } from "~/components/primitives/Badge"; +import { Button, LinkButton } from "~/components/primitives/Buttons"; +import { ClipboardField } from "~/components/primitives/ClipboardField"; +import { CopyableText } from "~/components/primitives/CopyableText"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, + DialogTrigger, +} from "~/components/primitives/Dialog"; +import { InfoPanel } from "~/components/primitives/InfoPanel"; +import { NavBar, PageAccessories, PageTitle } from "~/components/primitives/PageHeader"; +import { Paragraph } from "~/components/primitives/Paragraph"; +import * as Property from "~/components/primitives/PropertyTable"; +import { + Table, + TableBlankRow, + TableBody, + TableCell, + TableCellMenu, + TableHeader, + TableHeaderCell, + TableRow, +} from "~/components/primitives/Table"; +import { TextLink } from "~/components/primitives/TextLink"; +import { useFeatures } from "~/hooks/useFeatures"; +import { useOrganization } from "~/hooks/useOrganizations"; +import { useHasAdminAccess } from "~/hooks/useUser"; +import { redirectWithErrorMessage, redirectWithSuccessMessage } from "~/models/message.server"; +import { findProjectBySlug } from "~/models/project.server"; +import { type Region, RegionsPresenter } from "~/presenters/v3/RegionsPresenter.server"; +import { requireUser } from "~/services/session.server"; +import { + docsPath, + EnvironmentParamSchema, + ProjectParamSchema, + regionsPath, + v3BillingPath, +} from "~/utils/pathBuilder"; +import { SetDefaultRegionService } from "~/v3/services/setDefaultRegion.server"; + +export const loader = async ({ request, params }: LoaderFunctionArgs) => { + const user = await requireUser(request); + const { projectParam } = ProjectParamSchema.parse(params); + + const presenter = new RegionsPresenter(); + const [error, result] = await tryCatch( + presenter.call({ + userId: user.id, + projectSlug: projectParam, + isAdmin: user.admin || user.isImpersonating, + }) + ); + + if (error) { + throw new Response(undefined, { + status: 400, + statusText: error.message, + }); + } + + return typedjson(result); +}; + +const FormSchema = z.object({ + regionId: z.string(), +}); + +export const action = async ({ request, params }: ActionFunctionArgs) => { + const user = await requireUser(request); + const { organizationSlug, projectParam, envParam } = EnvironmentParamSchema.parse(params); + + const project = await findProjectBySlug(organizationSlug, projectParam, user.id); + + const redirectPath = regionsPath( + { slug: organizationSlug }, + { slug: projectParam }, + { slug: envParam } + ); + + if (!project) { + throw redirectWithErrorMessage(redirectPath, request, "Project not found"); + } + + const formData = await request.formData(); + const parsedFormData = FormSchema.safeParse(Object.fromEntries(formData)); + + if (!parsedFormData.success) { + throw redirectWithErrorMessage(redirectPath, request, "No region specified"); + } + + const service = new SetDefaultRegionService(); + const [error, result] = await tryCatch( + service.call({ + projectId: project.id, + regionId: parsedFormData.data.regionId, + isAdmin: user.admin || user.isImpersonating, + }) + ); + + if (error) { + return redirectWithErrorMessage(redirectPath, request, error.message); + } + + return redirectWithSuccessMessage(redirectPath, request, `Set ${result.name} as default`); +}; + +export default function Page() { + const { regions, isPaying } = useTypedLoaderData(); + const organization = useOrganization(); + const isAdmin = useHasAdminAccess(); + const { isManagedCloud } = useFeatures(); + + return ( + + + Regions} /> + + + + {regions.map((region) => ( + + {region.name} + {region.id} + + ))} + + + + + +
+ {regions.length === 0 ? ( + +
+ No regions found for this project. +
+
+ ) : ( + <> +
+
+ + + Region + Cloud Provider + Location + Static IPs + {isAdmin && Admin} + + + When you trigger a run it will execute in your default region, unless + you override the region when triggering. + + + Read docs + + + } + > + Default region + + + + + {regions.length === 0 ? ( + + There are no regions for this project + + ) : ( + regions.map((region) => { + return ( + + + + + + {region.cloudProvider ? ( + + + {cloudProviderTitle(region.cloudProvider)} + + ) : ( + "โ€“" + )} + + + + {region.location ? ( + + ) : null} + {region.description ?? "โ€“"} + + + + {region.staticIPs === null ? ( + + Unlock static IPs + + ) : region.staticIPs !== undefined ? ( + + ) : ( + "Not available" + )} + + {isAdmin && ( + {region.isHidden ? "Hidden" : "Visible"} + )} + {region.isDefault ? ( + + + Default + + + ) : ( + + } + /> + )} + + ); + }) + )} + + + + Suggest a new region + + + Suggest a regionโ€ฆ + + } + defaultValue="region" + /> + } + /> + + +
+ {isManagedCloud && ( + + + Trigger.dev is fully GDPR compliant. Learn more in our{" "} + security portal or{" "} + + get in touch + + } + defaultValue="help" + /> + . + + + )} +
+ + )} +
+ + + ); +} + +function SetDefaultDialog({ + regions, + newDefaultRegion, +}: { + regions: Region[]; + newDefaultRegion: Region; +}) { + const [isOpen, setIsOpen] = useState(false); + const currentDefaultRegion = regions.find((r) => r.isDefault); + + return ( + + + + + + + Set as default region + + + + Are you sure you want to set {newDefaultRegion.name} as your new default region? + + +
+
+
+ Current default +
+
+ {currentDefaultRegion?.name ?? "โ€“"} +
+
+ + {currentDefaultRegion?.cloudProvider ? ( + <> + + {cloudProviderTitle(currentDefaultRegion.cloudProvider)} + + ) : ( + "โ€“" + )} + +
+
+ + {currentDefaultRegion?.location ? ( + + ) : null} + {currentDefaultRegion?.description ?? "โ€“"} + +
+
+ + {/* Middle column with arrow */} +
+
+ +
+
+ + {/* Right column */} +
+
+ New default +
+
+ {newDefaultRegion.name} +
+
+ + {newDefaultRegion.cloudProvider ? ( + <> + + {cloudProviderTitle(newDefaultRegion.cloudProvider)} + + ) : ( + "โ€“" + )} + +
+
+ + {newDefaultRegion.location ? ( + + ) : null} + {newDefaultRegion.description ?? "โ€“"} + +
+
+
+ + + Runs triggered from now on will execute in "{newDefaultRegion.name}", unless you{" "} + override when triggering. + +
+ + +
+ +
+
+
+
+ ); +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam/route.tsx index 9e9145be9a..8ad68c00a9 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam/route.tsx @@ -2,6 +2,7 @@ import { ArrowUturnLeftIcon, BoltSlashIcon, BookOpenIcon, + ChevronUpIcon, ChevronDownIcon, ChevronRightIcon, InformationCircleIcon, @@ -11,7 +12,7 @@ import { MagnifyingGlassPlusIcon, StopCircleIcon, } from "@heroicons/react/20/solid"; -import { useLoaderData, useParams, useRevalidator } from "@remix-run/react"; +import { useLoaderData, useRevalidator } from "@remix-run/react"; import { type LoaderFunctionArgs, type SerializeFrom, json } from "@remix-run/server-runtime"; import { type Virtualizer } from "@tanstack/react-virtual"; import { @@ -20,12 +21,13 @@ import { nanosecondsToMilliseconds, tryCatch, } from "@trigger.dev/core/v3"; -import { type RuntimeEnvironmentType } from "@trigger.dev/database"; +import type { RuntimeEnvironmentType } from "@trigger.dev/database"; import { motion } from "framer-motion"; import { useCallback, useEffect, useRef, useState } from "react"; import { useHotkeys } from "react-hotkeys-hook"; import { redirect } from "remix-typedjson"; -import { ShowParentIcon, ShowParentIconSelected } from "~/assets/icons/ShowParentIcon"; +import { MoveToTopIcon } from "~/assets/icons/MoveToTopIcon"; +import { MoveUpIcon } from "~/assets/icons/MoveUpIcon"; import tileBgPath from "~/assets/images/error-banner-tile@2x.png"; import { DevDisconnectedBanner, useCrossEngineIsConnected } from "~/components/DevPresence"; import { WarmStartIconWithTooltip } from "~/components/WarmStarts"; @@ -67,7 +69,6 @@ import { eventBorderClassName, } from "~/components/runs/v3/SpanTitle"; import { TaskRunStatusIcon, runStatusClassNameColor } from "~/components/runs/v3/TaskRunStatus"; -import { env } from "~/env.server"; import { useDebounce } from "~/hooks/useDebounce"; import { useEnvironment } from "~/hooks/useEnvironment"; import { useEventSource } from "~/hooks/useEventSource"; @@ -96,6 +97,15 @@ import { import { useCurrentPlan } from "../_app.orgs.$organizationSlug/route"; import { SpanView } from "../resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam.spans.$spanParam/route"; import { useSearchParams } from "~/hooks/useSearchParam"; +import { CopyableText } from "~/components/primitives/CopyableText"; +import type { SpanOverride } from "~/v3/eventRepository/eventRepository.types"; +import { getRunFiltersFromSearchParams } from "~/components/runs/v3/RunFilters"; +import { NextRunListPresenter } from "~/presenters/v3/NextRunListPresenter.server"; +import { $replica } from "~/db.server"; +import { clickhouseClient } from "~/services/clickhouseInstance.server"; +import { findProjectBySlug } from "~/models/project.server"; +import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; +import { logger } from "~/services/logger.server"; const resizableSettings = { parent: { @@ -129,6 +139,103 @@ const resizableSettings = { type TraceEvent = NonNullable["trace"]>["events"][0]; +type RunsListNavigation = { + runs: Array<{ friendlyId: string; spanId: string }>; + pagination: { next?: string; previous?: string }; + prevPageLastRun?: { friendlyId: string; spanId: string; cursor: string }; + nextPageFirstRun?: { friendlyId: string; spanId: string; cursor: string }; +}; + +async function getRunsListFromTableState({ + tableStateParam, + organizationSlug, + projectParam, + envParam, + runParam, + userId, +}: { + tableStateParam: string | null; + organizationSlug: string; + projectParam: string; + envParam: string; + runParam: string; + userId: string; +}): Promise { + if (!tableStateParam) { + return null; + } + + try { + const tableStateSearchParams = new URLSearchParams(decodeURIComponent(tableStateParam)); + const filters = getRunFiltersFromSearchParams(tableStateSearchParams); + + const project = await findProjectBySlug(organizationSlug, projectParam, userId); + const environment = await findEnvironmentBySlug(project?.id ?? "", envParam, userId); + + if (!project || !environment) { + return null; + } + + const runsListPresenter = new NextRunListPresenter($replica, clickhouseClient); + const currentPageResult = await runsListPresenter.call(project.organizationId, environment.id, { + userId, + projectId: project.id, + ...filters, + pageSize: 25, // Load enough runs to provide navigation context + }); + + const runsList: RunsListNavigation = { + runs: currentPageResult.runs, + pagination: currentPageResult.pagination, + }; + + const currentRunIndex = currentPageResult.runs.findIndex((r) => r.friendlyId === runParam); + + if (currentRunIndex === 0 && currentPageResult.pagination.previous) { + const prevPageResult = await runsListPresenter.call(project.organizationId, environment.id, { + userId, + projectId: project.id, + ...filters, + cursor: currentPageResult.pagination.previous, + direction: "backward", + pageSize: 1, // We only need the last run from the previous page + }); + + if (prevPageResult.runs.length > 0) { + runsList.prevPageLastRun = { + friendlyId: prevPageResult.runs[0].friendlyId, + spanId: prevPageResult.runs[0].spanId, + cursor: currentPageResult.pagination.previous, + }; + } + } + + if (currentRunIndex === currentPageResult.runs.length - 1 && currentPageResult.pagination.next) { + const nextPageResult = await runsListPresenter.call(project.organizationId, environment.id, { + userId, + projectId: project.id, + ...filters, + cursor: currentPageResult.pagination.next, + direction: "forward", + pageSize: 1, // We only need the first run from the next page + }); + + if (nextPageResult.runs.length > 0) { + runsList.nextPageFirstRun = { + friendlyId: nextPageResult.runs[0].friendlyId, + spanId: nextPageResult.runs[0].spanId, + cursor: currentPageResult.pagination.next, + }; + } + } + + return runsList; + } catch (error) { + logger.error("Error loading runs list from tableState:", { error }); + return null; + } +} + export const loader = async ({ request, params }: LoaderFunctionArgs) => { const userId = await requireUserId(request); const impersonationId = await getImpersonationId(request); @@ -141,7 +248,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { const [error, result] = await tryCatch( presenter.call({ userId, - organizationSlug, showDeletedLogs: !!impersonationId, projectSlug: projectParam, runFriendlyId: runParam, @@ -168,21 +274,31 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { const parent = await getResizableSnapshot(request, resizableSettings.parent.autosaveId); const tree = await getResizableSnapshot(request, resizableSettings.tree.autosaveId); + const runsList = await getRunsListFromTableState({ + tableStateParam: url.searchParams.get("tableState"), + organizationSlug, + projectParam, + envParam, + runParam, + userId, + }); + return json({ run: result.run, trace: result.trace, - maximumLiveReloadingSetting: env.MAXIMUM_LIVE_RELOADING_EVENTS, + maximumLiveReloadingSetting: result.maximumLiveReloadingSetting, resizable: { parent, tree, }, + runsList, }); }; type LoaderData = SerializeFrom; export default function Page() { - const { run, trace, resizable, maximumLiveReloadingSetting } = useLoaderData(); + const { run, trace, maximumLiveReloadingSetting, runsList } = useLoaderData(); const organization = useOrganization(); const project = useProject(); const environment = useEnvironment(); @@ -190,16 +306,30 @@ export default function Page() { logCount: trace?.events.length ?? 0, isCompleted: run.completedAt !== null, }); + const { value } = useSearchParams(); + const tableState = decodeURIComponent(value("tableState") ?? ""); + const tableStateSearchParams = new URLSearchParams(tableState); + const filters = getRunFiltersFromSearchParams(tableStateSearchParams); + const tabParam = value("tab") ?? undefined; + const spanParam = value("span") ?? undefined; + + const [previousRunPath, nextRunPath] = useAdjacentRunPaths({organization, project, environment, tableState, run, runsList, tabParam, useSpan: !!spanParam}); return ( <> + + {tableState && (
+ + +
)} + } /> {environment.type === "DEVELOPMENT" && } @@ -275,14 +405,10 @@ export default function Page() { run={run} trace={trace} maximumLiveReloadingSetting={maximumLiveReloadingSetting} - resizable={resizable} /> ) : ( )} @@ -290,7 +416,7 @@ export default function Page() { ); } -function TraceView({ run, trace, maximumLiveReloadingSetting, resizable }: LoaderData) { +function TraceView({ run, trace, maximumLiveReloadingSetting }: Pick) { const organization = useOrganization(); const project = useProject(); const environment = useEnvironment(); @@ -301,7 +427,7 @@ function TraceView({ run, trace, maximumLiveReloadingSetting, resizable }: Loade return <>; } - const { events, parentRunFriendlyId, duration, rootSpanStatus, rootStartedAt, queuedDuration } = + const { events, duration, rootSpanStatus, rootStartedAt, queuedDuration, overridesBySpanId } = trace; const shouldLiveReload = events.length <= maximumLiveReloadingSetting; @@ -324,6 +450,8 @@ function TraceView({ run, trace, maximumLiveReloadingSetting, resizable }: Loade // WARNING Don't put the revalidator in the useEffect deps array or bad things will happen }, [streamedEvents]); // eslint-disable-line react-hooks/exhaustive-deps + const spanOverrides = selectedSpanId ? overridesBySpanId?.[selectedSpanId] : undefined; + return (
{ //instantly close the panel if no span is selected if (!selectedSpan) { @@ -357,6 +484,7 @@ function TraceView({ run, trace, maximumLiveReloadingSetting, resizable }: Loade shouldLiveReload={shouldLiveReload} maximumLiveReloadingSetting={maximumLiveReloadingSetting} rootRun={run.rootTaskRun} + parentRun={run.parentTaskRun} isCompleted={run.completedAt !== null} /> @@ -372,6 +500,7 @@ function TraceView({ run, trace, maximumLiveReloadingSetting, resizable }: Loade replaceSearchParam("span")} /> @@ -381,7 +510,7 @@ function TraceView({ run, trace, maximumLiveReloadingSetting, resizable }: Loade ); } -function NoLogsView({ run, resizable }: LoaderData) { +function NoLogsView({ run }: Pick) { const plan = useCurrentPlan(); const organization = useOrganization(); @@ -475,7 +604,6 @@ function NoLogsView({ run, resizable }: LoaderData) { type TasksTreeViewProps = { events: TraceEvent[]; selectedId?: string; - parentRunFriendlyId?: string; onSelectedIdChanged: (selectedId: string | undefined) => void; totalDuration: number; rootSpanStatus: "executing" | "completed" | "failed"; @@ -486,7 +614,10 @@ type TasksTreeViewProps = { maximumLiveReloadingSetting: number; rootRun: { friendlyId: string; - taskIdentifier: string; + spanId: string; + } | null; + parentRun: { + friendlyId: string; spanId: string; } | null; isCompleted: boolean; @@ -495,7 +626,6 @@ type TasksTreeViewProps = { function TasksTreeView({ events, selectedId, - parentRunFriendlyId, onSelectedIdChanged, totalDuration, rootSpanStatus, @@ -505,6 +635,7 @@ function TasksTreeView({ shouldLiveReload, maximumLiveReloadingSetting, rootRun, + parentRun, isCompleted, }: TasksTreeViewProps) { const isAdmin = useHasAdminAccess(); @@ -595,20 +726,30 @@ function TasksTreeView({ id={resizableSettings.tree.tree.id} default={resizableSettings.tree.tree.default} min={resizableSettings.tree.tree.min} - className="pl-3" >
-
- {rootRun ? ( - + {rootRun || parentRun ? ( + - ) : parentRunFriendlyId ? ( - ) : ( - + This is the root task )} @@ -627,6 +768,7 @@ function TasksTreeView({ nodes={nodes} getNodeProps={getNodeProps} getTreeProps={getTreeProps} + parentClassName="pl-3" renderNode={({ node, state, index }) => ( <>
)} @@ -1007,7 +1150,8 @@ function TimelineView({ "-ml-[0.1562rem] size-[0.3125rem] rounded-full border bg-background-bright", eventBorderClassName(node.data) )} - layoutId={`${node.id}-${event.name}`} + layoutId={disableSpansAnimations ? undefined : `${node.id}-${event.name}`} + animate={disableSpansAnimations ? false : undefined} /> )} @@ -1026,7 +1170,8 @@ function TimelineView({ > ) : null} @@ -1049,6 +1194,7 @@ function TimelineView({ } node={node} fadeLeft={isTopSpan && queuedDuration !== undefined} + disableAnimations={disableSpansAnimations} /> ) : ( @@ -1063,7 +1209,8 @@ function TimelineView({ "-ml-0.5 size-3 rounded-full border-2 border-background-bright", eventBackgroundClassName(node.data) )} - layoutId={node.id} + layoutId={disableSpansAnimations ? undefined : node.id} + animate={disableSpansAnimations ? false : undefined} /> )} @@ -1138,60 +1285,108 @@ function TaskLine({ isError, isSelected }: { isError: boolean; isSelected: boole return
; } -function ShowParentLink({ - runFriendlyId, - spanId, - isRoot, +function ShowParentOrRootLinks({ + relationships, }: { - runFriendlyId: string; - spanId?: string; - isRoot: boolean; + relationships: { + root?: { + friendlyId: string; + spanId: string; + isParent?: boolean; + }; + parent?: { + friendlyId: string; + spanId: string; + }; + }; }) { - const [mouseOver, setMouseOver] = useState(false); const organization = useOrganization(); const project = useProject(); const environment = useEnvironment(); - const { spanParam } = useParams(); - const span = spanId ? spanId : spanParam; + // Case 1: Root is also the parent + if (relationships.root?.isParent === true) { + return ( + + Jump to root and parent run + +
+ } + className="text-xs" + > + Root/parent + + ); + } + // Case 2: Root and Parent are different runs return ( - setMouseOver(true)} - onMouseLeave={() => setMouseOver(false)} - fullWidth - textAlignLeft - shortcut={{ key: "p" }} - className="flex-1" - > - {mouseOver ? ( - - ) : ( - +
+ {relationships.root && ( + + Jump to root run + +
+ } + className="text-xs" + > + Root +
)} - - {isRoot ? "Show root run" : "Show parent run"} - - + {relationships.parent && ( + + Jump to parent run + +
+ } + className="text-xs" + > + Parent + + )} +
); } @@ -1247,8 +1442,9 @@ function SpanWithDuration({ showDuration, node, fadeLeft, + disableAnimations, ...props -}: Timeline.SpanProps & { node: TraceEvent; showDuration: boolean; fadeLeft: boolean }) { +}: Timeline.SpanProps & { node: TraceEvent; showDuration: boolean; fadeLeft: boolean; disableAnimations?: boolean }) { return ( {node.data.isPartial && (
{formatDurationMilliseconds(props.durationMs, { style: "short", @@ -1357,16 +1556,16 @@ function KeyboardShortcuts({ expandAllBelowDepth, collapseAllBelowDepth, toggleExpandLevel, - setShowDurations, }: { expandAllBelowDepth: (depth: number) => void; collapseAllBelowDepth: (depth: number) => void; toggleExpandLevel: (depth: number) => void; - setShowDurations: (show: (show: boolean) => boolean) => void; + setShowDurations?: (show: (show: boolean) => boolean) => void; }) { return ( <> + expandAllBelowDepth(0)} @@ -1383,6 +1582,16 @@ function KeyboardShortcuts({ ); } +function AdjacentRunsShortcuts() { + return (
+ + + + Adjacent runs + +
); +} + function ArrowKeyShortcuts() { return (
@@ -1429,7 +1638,7 @@ function NumberShortcuts({ toggleLevel }: { toggleLevel: (depth: number) => void return (
0 - โ€“ + โ€“ 9 Toggle level @@ -1461,3 +1670,127 @@ function SearchField({ onChange }: { onChange: (value: string) => void }) { /> ); } + +function useAdjacentRunPaths({ + organization, + project, + environment, + tableState, + run, + runsList, + tabParam, + useSpan +}: { + organization: { slug: string }; + project: { slug: string }; + environment: { slug: string }; + tableState: string; + run: { friendlyId: string, spanId: string }; + runsList: RunsListNavigation | null; + tabParam?: string; + useSpan?: boolean; +}): [string | null, string | null] { + if (!runsList || runsList.runs.length === 0) { + return [null, null]; + } + + const currentIndex = runsList.runs.findIndex((r) => r.friendlyId === run.friendlyId); + + if (currentIndex === -1) { + return [null, null]; + } + + // Determine previous run: use prevPageLastRun if at first position, otherwise use previous run in list + let previousRun: { friendlyId: string; spanId: string } | null = null; + const previousRunTableState = new URLSearchParams(tableState); + if (currentIndex > 0) { + previousRun = runsList.runs[currentIndex - 1]; + } else if (runsList.prevPageLastRun) { + previousRun = runsList.prevPageLastRun; + // Update tableState with the new cursor for the previous page + previousRunTableState.set("cursor", runsList.prevPageLastRun.cursor); + previousRunTableState.set("direction", "backward"); + } + + // Determine next run: use nextPageFirstRun if at last position, otherwise use next run in list + let nextRun: { friendlyId: string; spanId: string } | null = null; + const nextRunTableState = new URLSearchParams(tableState); + if (currentIndex < runsList.runs.length - 1) { + nextRun = runsList.runs[currentIndex + 1]; + } else if (runsList.nextPageFirstRun) { + nextRun = runsList.nextPageFirstRun; + // Update tableState with the new cursor for the next page + nextRunTableState.set("cursor", runsList.nextPageFirstRun.cursor); + nextRunTableState.set("direction", "forward"); + } + + const previousURLSearchParams = new URLSearchParams(); + previousURLSearchParams.set("tableState", previousRunTableState.toString()); + if (previousRun && useSpan) { + previousURLSearchParams.set("span", previousRun.spanId); + } + if (tabParam && useSpan) { + previousURLSearchParams.set("tab", tabParam); + } + const previousRunPath = previousRun + ? v3RunPath(organization, project, environment, previousRun, previousURLSearchParams) + : null; + + const nextURLSearchParams = new URLSearchParams(); + nextURLSearchParams.set("tableState", nextRunTableState.toString()); + if (nextRun && useSpan) { + nextURLSearchParams.set("span", nextRun.spanId); + } + if (tabParam && useSpan) { + nextURLSearchParams.set("tab", tabParam); + } + const nextRunPath = nextRun + ? v3RunPath(organization, project, environment, nextRun, nextURLSearchParams) + : null; + + return [previousRunPath, nextRunPath]; +} + + +function PreviousRunButton({ to }: { to: string | null }) { + return ( +
+ !to && e.preventDefault()} + shortcut={{ key: "[" }} + tooltip="Previous Run" + disabled={!to} + replace + /> +
+ ); +} + +function NextRunButton({ to }: { to: string | null }) { + return ( +
+ !to && e.preventDefault()} + shortcut={{ key: "]" }} + tooltip="Next Run" + disabled={!to} + replace + /> +
+ ); +} + diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs._index/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs._index/route.tsx index 294b1a2ca8..9f8cf278be 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs._index/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs._index/route.tsx @@ -1,47 +1,49 @@ -import { ArrowPathIcon, StopCircleIcon } from "@heroicons/react/20/solid"; import { BeakerIcon, BookOpenIcon } from "@heroicons/react/24/solid"; -import { Form, type MetaFunction, useNavigation } from "@remix-run/react"; +import { type MetaFunction, useNavigation } from "@remix-run/react"; import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; -import { IconCircleX } from "@tabler/icons-react"; -import { AnimatePresence, motion } from "framer-motion"; -import { ListChecks, ListX } from "lucide-react"; -import { Suspense, useState } from "react"; -import { TypedAwait, typeddefer, useTypedLoaderData } from "remix-typedjson"; +import { Suspense } from "react"; +import { + TypedAwait, + typeddefer, + type UseDataFunctionReturn, + useTypedLoaderData, +} from "remix-typedjson"; +import { ListCheckedIcon } from "~/assets/icons/ListCheckedIcon"; import { TaskIcon } from "~/assets/icons/TaskIcon"; import { DevDisconnectedBanner, useDevPresence } from "~/components/DevPresence"; import { StepContentContainer } from "~/components/StepContentContainer"; import { MainCenteredContainer, PageBody } from "~/components/layout/AppLayout"; -import { Button, LinkButton } from "~/components/primitives/Buttons"; -import { - Dialog, - DialogContent, - DialogDescription, - DialogFooter, - DialogHeader, - DialogTrigger, -} from "~/components/primitives/Dialog"; -import { Header1, Header2 } from "~/components/primitives/Headers"; +import { Badge } from "~/components/primitives/Badge"; +import { LinkButton } from "~/components/primitives/Buttons"; +import { Header1 } from "~/components/primitives/Headers"; import { InfoPanel } from "~/components/primitives/InfoPanel"; import { NavBar, PageAccessories, PageTitle } from "~/components/primitives/PageHeader"; import { Paragraph } from "~/components/primitives/Paragraph"; import { - SelectedItemsProvider, - useSelectedItems, -} from "~/components/primitives/SelectedItemsProvider"; -import { Spinner, SpinnerWhite } from "~/components/primitives/Spinner"; + ResizableHandle, + ResizablePanel, + ResizablePanelGroup, +} from "~/components/primitives/Resizable"; +import { SelectedItemsProvider } from "~/components/primitives/SelectedItemsProvider"; +import { ShortcutKey } from "~/components/primitives/ShortcutKey"; +import { Spinner } from "~/components/primitives/Spinner"; import { StepNumber } from "~/components/primitives/StepNumber"; import { TextLink } from "~/components/primitives/TextLink"; -import { RunsFilters, TaskRunListSearchFilters } from "~/components/runs/v3/RunFilters"; +import { RunsFilters, type TaskRunListSearchFilters } from "~/components/runs/v3/RunFilters"; import { TaskRunsTable } from "~/components/runs/v3/TaskRunsTable"; import { BULK_ACTION_RUN_LIMIT } from "~/consts"; +import { $replica } from "~/db.server"; import { useEnvironment } from "~/hooks/useEnvironment"; import { useOrganization } from "~/hooks/useOrganizations"; import { useProject } from "~/hooks/useProject"; +import { useSearchParams } from "~/hooks/useSearchParam"; +import { useShortcutKeys } from "~/hooks/useShortcutKeys"; import { findProjectBySlug } from "~/models/project.server"; import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; -import { RunListPresenter } from "~/presenters/v3/RunListPresenter.server"; +import { getRunFiltersFromRequest } from "~/presenters/RunFilters.server"; +import { NextRunListPresenter } from "~/presenters/v3/NextRunListPresenter.server"; +import { clickhouseClient } from "~/services/clickhouseInstance.server"; import { - getRootOnlyFilterPreference, setRootOnlyFilterPreference, uiPreferencesStorage, } from "~/services/preferences/uiPreferences.server"; @@ -50,11 +52,14 @@ import { cn } from "~/utils/cn"; import { docsPath, EnvironmentParamSchema, + v3CreateBulkActionPath, v3ProjectPath, - v3RunsPath, v3TestPath, + v3TestTaskPath, } from "~/utils/pathBuilder"; import { ListPagination } from "../../components/ListPagination"; +import { CreateBulkActionInspector } from "../resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.bulkaction"; +import { Callout } from "~/components/primitives/Callout"; export const meta: MetaFunction = () => { return [ @@ -68,15 +73,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { const userId = await requireUserId(request); const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params); - const url = new URL(request.url); - - let rootOnlyValue = false; - if (url.searchParams.has("rootOnly")) { - rootOnlyValue = url.searchParams.get("rootOnly") === "true"; - } else { - rootOnlyValue = await getRootOnlyFilterPreference(request); - } - const project = await findProjectBySlug(organizationSlug, projectParam, userId); if (!project) { throw new Error("Project not found"); @@ -87,67 +83,23 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { throw new Error("Environment not found"); } - const s = { - cursor: url.searchParams.get("cursor") ?? undefined, - direction: url.searchParams.get("direction") ?? undefined, - statuses: url.searchParams.getAll("statuses"), - environments: [environment.id], - tasks: url.searchParams.getAll("tasks"), - period: url.searchParams.get("period") ?? undefined, - bulkId: url.searchParams.get("bulkId") ?? undefined, - tags: url.searchParams.getAll("tags").map((t) => decodeURIComponent(t)), - from: url.searchParams.get("from") ?? undefined, - to: url.searchParams.get("to") ?? undefined, - rootOnly: rootOnlyValue, - runId: url.searchParams.get("runId") ?? undefined, - batchId: url.searchParams.get("batchId") ?? undefined, - scheduleId: url.searchParams.get("scheduleId") ?? undefined, - }; - const { - tasks, - versions, - statuses, - environments, - tags, - period, - bulkId, - from, - to, - cursor, - direction, - rootOnly, - runId, - batchId, - scheduleId, - } = TaskRunListSearchFilters.parse(s); + const filters = await getRunFiltersFromRequest(request); - const presenter = new RunListPresenter(); - const list = presenter.call(environment.id, { + const presenter = new NextRunListPresenter($replica, clickhouseClient); + const list = presenter.call(project.organizationId, environment.id, { userId, projectId: project.id, - tasks, - versions, - statuses, - tags, - period, - bulkId, - from, - to, - batchId, - runIds: runId ? [runId] : undefined, - scheduleId, - rootOnly, - direction: direction, - cursor: cursor, + ...filters, }); - const session = await setRootOnlyFilterPreference(rootOnlyValue, request); + const session = await setRootOnlyFilterPreference(filters.rootOnly, request); const cookieValue = await uiPreferencesStorage.commitSession(session); return typeddefer( { data: list, - rootOnlyDefault: rootOnlyValue, + rootOnlyDefault: filters.rootOnly, + filters, }, { headers: { @@ -158,9 +110,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { }; export default function Page() { - const { data, rootOnlyDefault } = useTypedLoaderData(); - const navigation = useNavigation(); - const isLoading = navigation.state !== "idle"; + const { data, rootOnlyDefault, filters } = useTypedLoaderData(); const { isConnected } = useDevPresence(); const project = useProject(); const environment = useEnvironment(); @@ -188,65 +138,42 @@ export default function Page() { maxSelectedItemCount={BULK_ACTION_RUN_LIMIT} > {({ selectedItems }) => ( -
- + +
+
Loading runs
+
+ } + > + + + Unable to load your task runs. Please refresh the page or try again in a + moment. + +
} > - - {(list) => ( - <> - {list.runs.length === 0 && !list.hasAnyRuns ? ( - list.possibleTasks.length === 0 ? ( - - ) : ( - - ) - ) : ( -
-
- -
- -
-
- - -
- )} - - )} -
- - -
+ {(list) => { + return ( + + ); + }} + + )} @@ -254,181 +181,143 @@ export default function Page() { ); } -function BulkActionBar() { - const { selectedItems, deselectAll } = useSelectedItems(); - const [barState, setBarState] = useState<"none" | "replay" | "cancel">("none"); - - const hasSelectedMaximum = selectedItems.size >= BULK_ACTION_RUN_LIMIT; - - return ( - - {selectedItems.size > 0 && ( - -
- - Bulk actions: - {hasSelectedMaximum ? ( - - Maximum of {selectedItems.size} runs selected - - ) : ( - {selectedItems.size} runs selected - )} -
-
- { - if (o) { - setBarState("cancel"); - } else { - setBarState("none"); - } - }} - /> - { - if (o) { - setBarState("replay"); - } else { - setBarState("none"); - } - }} - /> - -
-
- )} -
- ); -} - -function CancelRuns({ onOpen }: { onOpen: (open: boolean) => void }) { - const { selectedItems } = useSelectedItems(); - - const organization = useOrganization(); - const project = useProject(); - const environment = useEnvironment(); - const failedRedirect = v3RunsPath(organization, project, environment); - - const formAction = `/resources/taskruns/bulk/cancel`; - +function RunsList({ + list, + selectedItems, + rootOnlyDefault, + filters, +}: { + list: Awaited["data"]>; + selectedItems: Set; + rootOnlyDefault: boolean; + filters: TaskRunListSearchFilters; +}) { const navigation = useNavigation(); - const isLoading = navigation.formAction === formAction; - - return ( - onOpen(o)}> - - - - - Cancel {selectedItems.size} runs? - - Canceling these runs will stop them from running. Only runs that are not already finished - will be canceled, the others will remain in their existing state. - - -
- - - - - {[...selectedItems].map((runId) => ( - - ))} - -
-
-
-
- ); -} - -function ReplayRuns({ onOpen }: { onOpen: (open: boolean) => void }) { - const { selectedItems } = useSelectedItems(); - + const isLoading = navigation.state !== "idle"; const organization = useOrganization(); const project = useProject(); const environment = useEnvironment(); - const failedRedirect = v3RunsPath(organization, project, environment); + const { has, replace } = useSearchParams(); - const formAction = `/resources/taskruns/bulk/replay`; - - const navigation = useNavigation(); - const isLoading = navigation.formAction === formAction; + // Shortcut keys for bulk actions + useShortcutKeys({ + shortcut: { key: "r" }, + action: (e) => { + replace({ + bulkInspector: "true", + action: "replay", + mode: selectedItems.size > 0 ? "selected" : undefined, + }); + }, + }); + useShortcutKeys({ + shortcut: { key: "c" }, + action: (e) => { + replace({ + bulkInspector: "true", + action: "cancel", + mode: selectedItems.size > 0 ? "selected" : undefined, + }); + }, + }); + const isShowingBulkActionInspector = has("bulkInspector") && list.hasAnyRuns; return ( - onOpen(o)}> - - - - - Replay runs? - - Replaying these runs will create a new run for each with the same payload and environment - as the original. It will use the latest version of the code for each task. - - -
- - - - - {[...selectedItems].map((runId) => ( - - ))} - -
-
-
-
+ <> + {list.runs.length === 0 && !list.hasAnyRuns ? ( + list.possibleTasks.length === 0 ? ( + + ) : ( + t.slug === list.filters.tasks[0]) + : undefined + } + /> + ) + ) : ( +
+
+ +
+ {!isShowingBulkActionInspector && ( + 0 ? "selected" : undefined + )} + LeadingIcon={ListCheckedIcon} + className={selectedItems.size > 0 ? "pr-1" : undefined} + tooltip={ +
+
+ Replay + +
+
+ Cancel + +
+
+ } + > + + Bulk action + {selectedItems.size > 0 && ( + {selectedItems.size} + )} + +
+ )} + +
+
+ + +
+ )} + +
+ + {isShowingBulkActionInspector && ( + <> + + + 0} + /> + + + )} + ); } @@ -458,7 +347,7 @@ function CreateFirstTaskInstructions() { ); } -function RunTaskInstructions() { +function RunTaskInstructions({ task }: { task?: { slug: string } }) { const organization = useOrganization(); const project = useProject(); const environment = useEnvironment(); @@ -471,7 +360,11 @@ function RunTaskInstructions() { Perform a test run with a payload directly from the dashboard.
diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.settings/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.settings/route.tsx index db6f641f5d..66ea64cb36 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.settings/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.settings/route.tsx @@ -2,7 +2,8 @@ import { conform, useForm } from "@conform-to/react"; import { parse } from "@conform-to/zod"; import { ExclamationTriangleIcon, FolderIcon, TrashIcon } from "@heroicons/react/20/solid"; import { Form, type MetaFunction, useActionData, useNavigation } from "@remix-run/react"; -import { type ActionFunction, json } from "@remix-run/server-runtime"; +import { type ActionFunction, type LoaderFunctionArgs, json } from "@remix-run/server-runtime"; +import { typedjson, useTypedLoaderData } from "remix-typedjson"; import { z } from "zod"; import { AdminDebugTooltip } from "~/components/admin/debugTooltip"; import { InlineCode } from "~/components/code/InlineCode"; @@ -12,6 +13,7 @@ import { PageContainer, } from "~/components/layout/AppLayout"; import { Button } from "~/components/primitives/Buttons"; +import { CheckboxWithLabel } from "~/components/primitives/Checkbox"; import { ClipboardField } from "~/components/primitives/ClipboardField"; import { Fieldset } from "~/components/primitives/Fieldset"; import { FormButtons } from "~/components/primitives/FormButtons"; @@ -25,13 +27,23 @@ import { NavBar, PageAccessories, PageTitle } from "~/components/primitives/Page import { Paragraph } from "~/components/primitives/Paragraph"; import * as Property from "~/components/primitives/PropertyTable"; import { SpinnerWhite } from "~/components/primitives/Spinner"; -import { prisma } from "~/db.server"; +import { useOrganization } from "~/hooks/useOrganizations"; import { useProject } from "~/hooks/useProject"; -import { redirectWithErrorMessage, redirectWithSuccessMessage } from "~/models/message.server"; -import { DeleteProjectService } from "~/services/deleteProject.server"; +import { + redirectBackWithErrorMessage, + redirectBackWithSuccessMessage, + redirectWithErrorMessage, + redirectWithSuccessMessage, +} from "~/models/message.server"; +import { ProjectSettingsService } from "~/services/projectSettings.server"; import { logger } from "~/services/logger.server"; import { requireUserId } from "~/services/session.server"; -import { organizationPath, v3ProjectPath } from "~/utils/pathBuilder"; +import { organizationPath, v3ProjectPath, EnvironmentParamSchema, v3BillingPath } from "~/utils/pathBuilder"; +import React, { useEffect, useState } from "react"; +import { useEnvironment } from "~/hooks/useEnvironment"; +import { ProjectSettingsPresenter } from "~/services/projectSettingsPresenter.server"; +import { type BuildSettings } from "~/v3/buildSettings"; +import { GitHubSettingsPanel } from "../resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.github"; export const meta: MetaFunction = () => { return [ @@ -41,6 +53,86 @@ export const meta: MetaFunction = () => { ]; }; +export const loader = async ({ request, params }: LoaderFunctionArgs) => { + const userId = await requireUserId(request); + const { projectParam, organizationSlug } = EnvironmentParamSchema.parse(params); + + const projectSettingsPresenter = new ProjectSettingsPresenter(); + const resultOrFail = await projectSettingsPresenter.getProjectSettings( + organizationSlug, + projectParam, + userId + ); + + if (resultOrFail.isErr()) { + switch (resultOrFail.error.type) { + case "project_not_found": { + throw new Response(undefined, { + status: 404, + statusText: "Project not found", + }); + } + case "other": + default: { + resultOrFail.error.type satisfies "other"; + + logger.error("Failed loading project settings", { + error: resultOrFail.error, + }); + throw new Response(undefined, { + status: 400, + statusText: "Something went wrong, please try again!", + }); + } + } + } + + const { gitHubApp, buildSettings } = resultOrFail.value; + + return typedjson({ + githubAppEnabled: gitHubApp.enabled, + buildSettings, + }); +}; + +const UpdateBuildSettingsFormSchema = z.object({ + action: z.literal("update-build-settings"), + triggerConfigFilePath: z + .string() + .trim() + .optional() + .transform((val) => (val ? val.replace(/^\/+/, "") : val)) + .refine((val) => !val || val.length <= 255, { + message: "Config file path must not exceed 255 characters", + }), + installCommand: z + .string() + .trim() + .optional() + .refine((val) => !val || !val.includes("\n"), { + message: "Install command must be a single line", + }) + .refine((val) => !val || val.length <= 500, { + message: "Install command must not exceed 500 characters", + }), + preBuildCommand: z + .string() + .trim() + .optional() + .refine((val) => !val || !val.includes("\n"), { + message: "Pre-build command must be a single line", + }) + .refine((val) => !val || val.length <= 500, { + message: "Pre-build command must not exceed 500 characters", + }), + useNativeBuildServer: z + .string() + .optional() + .transform((val) => val === "on"), +}); + +type UpdateBuildSettingsFormSchema = z.infer; + export function createSchema( constraints: { getSlugMatch?: (slug: string) => { isMatch: boolean; projectSlug: string }; @@ -72,6 +164,7 @@ export function createSchema( } }), }), + UpdateBuildSettingsFormSchema, ]); } @@ -95,63 +188,117 @@ export const action: ActionFunction = async ({ request, params }) => { return json(submission); } - try { - switch (submission.value.action) { - case "rename": { - await prisma.project.update({ - where: { - slug: projectParam, - organization: { - members: { - some: { - userId, - }, - }, - }, - }, - data: { - name: submission.value.projectName, - }, - }); + const projectSettingsService = new ProjectSettingsService(); + const membershipResultOrFail = await projectSettingsService.verifyProjectMembership( + organizationSlug, + projectParam, + userId + ); + + if (membershipResultOrFail.isErr()) { + return json({ errors: { body: membershipResultOrFail.error.type } }, { status: 404 }); + } + + const { projectId } = membershipResultOrFail.value; - return redirectWithSuccessMessage( - v3ProjectPath({ slug: organizationSlug }, { slug: projectParam }), - request, - `Project renamed to ${submission.value.projectName}` - ); + switch (submission.value.action) { + case "rename": { + const resultOrFail = await projectSettingsService.renameProject( + projectId, + submission.value.projectName + ); + + if (resultOrFail.isErr()) { + switch (resultOrFail.error.type) { + case "other": + default: { + resultOrFail.error.type satisfies "other"; + + logger.error("Failed to rename project", { + error: resultOrFail.error, + }); + return json({ errors: { body: "Failed to rename project" } }, { status: 400 }); + } + } } - case "delete": { - const deleteProjectService = new DeleteProjectService(); - try { - await deleteProjectService.call({ projectSlug: projectParam, userId }); - - return redirectWithSuccessMessage( - organizationPath({ slug: organizationSlug }), - request, - "Project deleted" - ); - } catch (error: unknown) { - logger.error("Project could not be deleted", { - error: error instanceof Error ? error.message : JSON.stringify(error), - }); - return redirectWithErrorMessage( - v3ProjectPath({ slug: organizationSlug }, { slug: projectParam }), - request, - `Project ${projectParam} could not be deleted` - ); + + return redirectWithSuccessMessage( + v3ProjectPath({ slug: organizationSlug }, { slug: projectParam }), + request, + `Project renamed to ${submission.value.projectName}` + ); + } + case "delete": { + const resultOrFail = await projectSettingsService.deleteProject(projectParam, userId); + + if (resultOrFail.isErr()) { + switch (resultOrFail.error.type) { + case "other": + default: { + resultOrFail.error.type satisfies "other"; + + logger.error("Failed to delete project", { + error: resultOrFail.error, + }); + return redirectWithErrorMessage( + v3ProjectPath({ slug: organizationSlug }, { slug: projectParam }), + request, + `Project ${projectParam} could not be deleted` + ); + } + } + } + + return redirectWithSuccessMessage( + organizationPath({ slug: organizationSlug }), + request, + "Project deleted" + ); + } + case "update-build-settings": { + const { installCommand, preBuildCommand, triggerConfigFilePath, useNativeBuildServer } = + submission.value; + + const resultOrFail = await projectSettingsService.updateBuildSettings(projectId, { + installCommand: installCommand || undefined, + preBuildCommand: preBuildCommand || undefined, + triggerConfigFilePath: triggerConfigFilePath || undefined, + useNativeBuildServer: useNativeBuildServer, + }); + + if (resultOrFail.isErr()) { + switch (resultOrFail.error.type) { + case "other": + default: { + resultOrFail.error.type satisfies "other"; + + logger.error("Failed to update build settings", { + error: resultOrFail.error, + }); + return redirectBackWithErrorMessage(request, "Failed to update build settings"); + } } } + + return redirectBackWithSuccessMessage(request, "Build settings updated successfully"); + } + default: { + submission.value satisfies never; + return redirectBackWithErrorMessage(request, "Failed to process request"); } - } catch (error: any) { - return json({ errors: { body: error.message } }, { status: 400 }); } }; export default function Page() { + const { githubAppEnabled, buildSettings } = useTypedLoaderData(); const project = useProject(); + const organization = useOrganization(); + const environment = useEnvironment(); const lastSubmission = useActionData(); const navigation = useNavigation(); + const [hasRenameFormChanges, setHasRenameFormChanges] = useState(false); + const [renameForm, { projectName }] = useForm({ id: "rename-project", // TODO: type this @@ -187,10 +334,12 @@ export default function Page() { navigation.formData?.get("action") === "delete" && (navigation.state === "submitting" || navigation.state === "loading"); + const [deleteInputValue, setDeleteInputValue] = useState(""); + return ( - + @@ -212,91 +361,118 @@ export default function Page() { - -
- Project settings -
+
-
- - - - - This goes in your{" "} - trigger.config file. - - -
- -
- -
- - - - {projectName.error} - - - Rename project - - } - className="border-t-0" - /> -
-
-
- Danger zone -
- -
- - - - {projectSlug.error} - {deleteForm.error} + General +
+
+ + + - This change is irreversible, so please be certain. Type in the Project slug - {project.slug} and then press - Delete. + This goes in your{" "} + trigger.config file. - - Delete project - - } - />
- +
+
+ + + { + setHasRenameFormChanges(e.target.value !== project.name); + }} + /> + {projectName.error} + + + Save + + } + /> +
+
+
+
+ + {githubAppEnabled && ( + +
+ Git settings +
+ +
+
+ +
+ Build settings +
+ +
+
+
+ )} + +
+ Danger zone +
+
+
+ + + setDeleteInputValue(e.target.value)} + /> + {projectSlug.error} + {deleteForm.error} + + This change is irreversible, so please be certain. Type in the Project slug + {project.slug} and then press + Delete. + + + + Delete + + } + /> +
+
+
@@ -304,3 +480,145 @@ export default function Page() {
); } + +function BuildSettingsForm({ buildSettings }: { buildSettings: BuildSettings }) { + const lastSubmission = useActionData() as any; + const navigation = useNavigation(); + + const [hasBuildSettingsChanges, setHasBuildSettingsChanges] = useState(false); + const [buildSettingsValues, setBuildSettingsValues] = useState({ + preBuildCommand: buildSettings?.preBuildCommand || "", + installCommand: buildSettings?.installCommand || "", + triggerConfigFilePath: buildSettings?.triggerConfigFilePath || "", + useNativeBuildServer: buildSettings?.useNativeBuildServer || false, + }); + + useEffect(() => { + const hasChanges = + buildSettingsValues.preBuildCommand !== (buildSettings?.preBuildCommand || "") || + buildSettingsValues.installCommand !== (buildSettings?.installCommand || "") || + buildSettingsValues.triggerConfigFilePath !== (buildSettings?.triggerConfigFilePath || "") || + buildSettingsValues.useNativeBuildServer !== (buildSettings?.useNativeBuildServer || false); + setHasBuildSettingsChanges(hasChanges); + }, [buildSettingsValues, buildSettings]); + + const [buildSettingsForm, fields] = useForm({ + id: "update-build-settings", + lastSubmission: lastSubmission, + shouldRevalidate: "onSubmit", + onValidate({ formData }) { + return parse(formData, { + schema: UpdateBuildSettingsFormSchema, + }); + }, + }); + + const isBuildSettingsLoading = + navigation.formData?.get("action") === "update-build-settings" && + (navigation.state === "submitting" || navigation.state === "loading"); + + return ( +
+
+ + + { + setBuildSettingsValues((prev) => ({ + ...prev, + triggerConfigFilePath: e.target.value, + })); + }} + /> + + Path to your Trigger configuration file, relative to the root directory of your repo. + + + {fields.triggerConfigFilePath.error} + + + + + + { + setBuildSettingsValues((prev) => ({ + ...prev, + installCommand: e.target.value, + })); + }} + /> + + Command to install your project dependencies. This will be run from the root directory + of your repo. Auto-detected by default. + + {fields.installCommand.error} + + + + { + setBuildSettingsValues((prev) => ({ + ...prev, + preBuildCommand: e.target.value, + })); + }} + /> + + Any command that needs to run before we build and deploy your project. This will be run + from the root directory of your repo. + + {fields.preBuildCommand.error} + +
+ + { + setBuildSettingsValues((prev) => ({ + ...prev, + useNativeBuildServer: isChecked, + })); + }} + /> + + Native build server builds do not rely on external build providers and will become the + default in the future. Version 4.2.0 or newer is required. + + + {fields.useNativeBuildServer.error} + + +
+ {buildSettingsForm.error} + + Save + + } + /> +
+
+ ); +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.test.tasks.$taskParam/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.test.tasks.$taskParam/route.tsx index c9d59a126b..5d6fdb80ff 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.test.tasks.$taskParam/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.test.tasks.$taskParam/route.tsx @@ -1,38 +1,43 @@ import { conform, useForm } from "@conform-to/react"; import { parse } from "@conform-to/zod"; -import { BeakerIcon } from "@heroicons/react/20/solid"; -import { Form, useActionData, useSubmit } from "@remix-run/react"; +import { + BeakerIcon, + StarIcon, + RectangleStackIcon, + TrashIcon, + CheckCircleIcon, +} from "@heroicons/react/20/solid"; +import { AnimatePresence, motion } from "framer-motion"; import { type ActionFunction, type LoaderFunctionArgs, json } from "@remix-run/server-runtime"; -import { type TaskRunStatus } from "@trigger.dev/database"; -import { useCallback, useEffect, useRef, useState } from "react"; +import { useCallback, useEffect, useRef, useState, useMemo } from "react"; import { typedjson, useTypedLoaderData } from "remix-typedjson"; +import { TaskIcon } from "~/assets/icons/TaskIcon"; import { JSONEditor } from "~/components/code/JSONEditor"; -import { EnvironmentCombo, EnvironmentLabel } from "~/components/environments/EnvironmentLabel"; +import { EnvironmentCombo } from "~/components/environments/EnvironmentLabel"; +import { Badge } from "~/components/primitives/Badge"; import { Button } from "~/components/primitives/Buttons"; -import { Callout } from "~/components/primitives/Callout"; import { DateField } from "~/components/primitives/DateField"; -import { DateTime } from "~/components/primitives/DateTime"; import { Fieldset } from "~/components/primitives/Fieldset"; import { FormError } from "~/components/primitives/FormError"; -import { Header2 } from "~/components/primitives/Headers"; import { Hint } from "~/components/primitives/Hint"; import { Input } from "~/components/primitives/Input"; import { InputGroup } from "~/components/primitives/InputGroup"; import { Label } from "~/components/primitives/Label"; +import { DurationPicker } from "~/components/primitives/DurationPicker"; import { Paragraph } from "~/components/primitives/Paragraph"; -import { RadioButtonCircle } from "~/components/primitives/RadioButton"; +import { Popover, PopoverContent, PopoverTrigger } from "~/components/primitives/Popover"; import { ResizableHandle, ResizablePanel, ResizablePanelGroup, } from "~/components/primitives/Resizable"; -import { Select } from "~/components/primitives/Select"; +import { Select, SelectItem } from "~/components/primitives/Select"; import { TabButton, TabContainer } from "~/components/primitives/Tabs"; import { TextLink } from "~/components/primitives/TextLink"; -import { TaskRunStatusCombo } from "~/components/runs/v3/TaskRunStatus"; import { TimezoneList } from "~/components/scheduled/timezones"; import { useEnvironment } from "~/hooks/useEnvironment"; import { useSearchParams } from "~/hooks/useSearchParam"; +import { useParams, Form, useActionData, useFetcher, useSubmit } from "@remix-run/react"; import { redirectBackWithErrorMessage, redirectWithErrorMessage, @@ -43,7 +48,9 @@ import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; import { type ScheduledRun, type StandardRun, - type TestTask, + type StandardTaskResult, + type ScheduledTaskResult, + type RunTemplate, TestTaskPresenter, } from "~/presenters/v3/TestTaskPresenter.server"; import { logger } from "~/services/logger.server"; @@ -53,6 +60,23 @@ import { docsPath, v3RunSpanPath, v3TaskParamsSchema, v3TestPath } from "~/utils import { TestTaskService } from "~/v3/services/testTask.server"; import { OutOfEntitlementError } from "~/v3/services/triggerTask.server"; import { TestTaskData } from "~/v3/testTask"; +import { RunTagInput } from "~/components/runs/v3/RunTagInput"; +import { type loader as queuesLoader } from "~/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.queues"; +import { DateTime } from "~/components/primitives/DateTime"; +import { TaskRunStatusCombo } from "~/components/runs/v3/TaskRunStatus"; +import { ClockRotateLeftIcon } from "~/assets/icons/ClockRotateLeftIcon"; +import { MachinePresetName } from "@trigger.dev/core/v3"; +import { TaskTriggerSourceIcon } from "~/components/runs/v3/TaskTriggerSource"; +import { TaskRunTemplateService } from "~/v3/services/taskRunTemplate.server"; +import { DeleteTaskRunTemplateService } from "~/v3/services/deleteTaskRunTemplate.server"; +import { DeleteTaskRunTemplateData, RunTemplateData } from "~/v3/taskRunTemplate"; +import { Dialog, DialogContent, DialogHeader, DialogTrigger } from "~/components/primitives/Dialog"; +import { DialogClose, DialogDescription } from "@radix-ui/react-dialog"; +import { FormButtons } from "~/components/primitives/FormButtons"; +import { $replica } from "~/db.server"; +import { clickhouseClient } from "~/services/clickhouseInstance.server"; + +type FormAction = "create-template" | "delete-template" | "run-scheduled" | "run-standard"; export const loader = async ({ request, params }: LoaderFunctionArgs) => { const userId = await requireUserId(request); @@ -74,7 +98,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { }); } - const presenter = new TestTaskPresenter(); + const presenter = new TestTaskPresenter($replica, clickhouseClient); try { const result = await presenter.call({ userId, @@ -95,14 +119,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { export const action: ActionFunction = async ({ request, params }) => { const userId = await requireUserId(request); - const { organizationSlug, projectParam, envParam, taskParam } = v3TaskParamsSchema.parse(params); - - const formData = await request.formData(); - const submission = parse(formData, { schema: TestTaskData }); - - if (!submission.value) { - return json(submission); - } + const { organizationSlug, projectParam, envParam } = v3TaskParamsSchema.parse(params); const project = await findProjectBySlug(organizationSlug, projectParam, userId); if (!project) { @@ -115,46 +132,115 @@ export const action: ActionFunction = async ({ request, params }) => { return redirectBackWithErrorMessage(request, "Environment not found"); } - if (environment.archivedAt) { - return redirectBackWithErrorMessage(request, "Can't run a test on an archived environment"); - } + const formData = await request.formData(); + const formAction = formData.get("formAction") as FormAction; - const testService = new TestTaskService(); - try { - const run = await testService.call(environment, submission.value); + switch (formAction) { + case "create-template": { + const submission = parse(formData, { schema: RunTemplateData }); + if (!submission.value) { + return json({ + ...submission, + formAction, + }); + } - if (!run) { - return redirectBackWithErrorMessage( - request, - "Unable to start a test run: Something went wrong" - ); + const templateService = new TaskRunTemplateService(); + try { + const template = await templateService.call(environment, submission.value); + + return json({ + ...submission, + success: true, + templateLabel: template.label, + formAction, + }); + } catch (e) { + logger.error("Failed to create template", { error: e instanceof Error ? e.message : e }); + return redirectBackWithErrorMessage(request, "Failed to create template"); + } } + case "delete-template": { + const submission = parse(formData, { schema: DeleteTaskRunTemplateData }); - return redirectWithSuccessMessage( - v3RunSpanPath( - { slug: organizationSlug }, - { slug: projectParam }, - { slug: envParam }, - { friendlyId: run.friendlyId }, - { spanId: run.spanId } - ), - request, - "Test run created" - ); - } catch (e) { - if (e instanceof OutOfEntitlementError) { - return redirectBackWithErrorMessage( - request, - "Unable to start a test run: You have exceeded your free credits" - ); + if (!submission.value) { + return json({ + ...submission, + formAction, + }); + } + + const deleteService = new DeleteTaskRunTemplateService(); + try { + await deleteService.call(environment, submission.value.templateId); + + return json({ + ...submission, + success: true, + formAction, + }); + } catch (e) { + logger.error("Failed to delete template", { error: e instanceof Error ? e.message : e }); + return redirectBackWithErrorMessage(request, "Failed to delete template"); + } } + case "run-scheduled": + case "run-standard": { + const submission = parse(formData, { schema: TestTaskData }); - logger.error("Failed to start a test run", { error: e instanceof Error ? e.message : e }); + if (!submission.value) { + return json({ + ...submission, + formAction, + }); + } - return redirectBackWithErrorMessage( - request, - "Unable to start a test run: Something went wrong" - ); + if (environment.archivedAt) { + return redirectBackWithErrorMessage(request, "Can't run a test on an archived environment"); + } + + const testService = new TestTaskService(); + try { + const run = await testService.call(environment, submission.value); + + if (!run) { + return redirectBackWithErrorMessage( + request, + "Unable to start a test run: Something went wrong" + ); + } + + return redirectWithSuccessMessage( + v3RunSpanPath( + { slug: organizationSlug }, + { slug: projectParam }, + { slug: envParam }, + { friendlyId: run.friendlyId }, + { spanId: run.spanId } + ), + request, + "Test run created" + ); + } catch (e) { + if (e instanceof OutOfEntitlementError) { + return redirectBackWithErrorMessage( + request, + "Unable to start a test run: You have exceeded your free credits" + ); + } + + logger.error("Failed to start a test run", { error: e instanceof Error ? e.message : e }); + + return redirectBackWithErrorMessage( + request, + "Unable to start a test run: Something went wrong" + ); + } + } + default: { + formAction satisfies never; + return redirectBackWithErrorMessage(request, "Failed to process request"); + } } }; @@ -165,41 +251,106 @@ export default function Page() { return
; } - switch (result.task.triggerSource) { + const params = useParams(); + const queueFetcher = useFetcher(); + + useEffect(() => { + if (params.organizationSlug && params.projectParam && params.envParam) { + const searchParams = new URLSearchParams(); + searchParams.set("type", "custom"); + searchParams.set("per_page", "100"); + + queueFetcher.load( + `/resources/orgs/${params.organizationSlug}/projects/${params.projectParam}/env/${ + params.envParam + }/queues?${searchParams.toString()}` + ); + } + }, [params.organizationSlug, params.projectParam, params.envParam]); + + const defaultTaskQueue = result.queue; + const queues = useMemo(() => { + const customQueues = queueFetcher.data?.queues ?? []; + + return defaultTaskQueue && !customQueues.some((q) => q.id === defaultTaskQueue.id) + ? [defaultTaskQueue, ...customQueues] + : customQueues; + }, [queueFetcher.data?.queues, defaultTaskQueue]); + + const { triggerSource } = result; + + switch (triggerSource) { case "STANDARD": { - return ; + return ( + + ); } case "SCHEDULED": { return ( ); } + default: { + return triggerSource satisfies never; + } } } const startingJson = "{\n\n}"; +const machinePresets = Object.values(MachinePresetName.enum); -function StandardTaskForm({ task, runs }: { task: TestTask["task"]; runs: StandardRun[] }) { +function StandardTaskForm({ + task, + queues, + runs, + versions, + templates, + disableVersionSelection, + allowArbitraryQueues, +}: { + task: StandardTaskResult["task"]; + queues: Required["queue"][]; + runs: StandardRun[]; + versions: string[]; + templates: RunTemplate[]; + disableVersionSelection: boolean; + allowArbitraryQueues: boolean; +}) { const environment = useEnvironment(); const { value, replace } = useSearchParams(); const tab = value("tab"); - //form submission const submit = useSubmit(); - const lastSubmission = useActionData(); + const actionData = useActionData(); + const lastSubmission = + actionData && + typeof actionData === "object" && + "formAction" in actionData && + actionData.formAction === ("run-standard" satisfies FormAction) + ? actionData + : undefined; - //recent runs - const [selectedCodeSampleId, setSelectedCodeSampleId] = useState(runs.at(0)?.id); - const selectedCodeSample = runs.find((r) => r.id === selectedCodeSampleId); - const selectedCodeSamplePayload = selectedCodeSample?.payload; - const selectedCodeSampleMetadata = selectedCodeSample?.seedMetadata; + const lastRun = runs.at(0); const [defaultPayloadJson, setDefaultPayloadJson] = useState( - selectedCodeSamplePayload ?? startingJson + lastRun?.payload ?? startingJson ); const setPayload = useCallback((code: string) => { setDefaultPayloadJson(code); @@ -208,7 +359,7 @@ function StandardTaskForm({ task, runs }: { task: TestTask["task"]; runs: Standa const currentPayloadJson = useRef(defaultPayloadJson); const [defaultMetadataJson, setDefaultMetadataJson] = useState( - selectedCodeSampleMetadata ?? "{}" + lastRun?.seedMetadata ?? startingJson ); const setMetadata = useCallback((code: string) => { setDefaultMetadataJson(code); @@ -216,140 +367,421 @@ function StandardTaskForm({ task, runs }: { task: TestTask["task"]; runs: Standa const currentMetadataJson = useRef(defaultMetadataJson); - const submitForm = useCallback( - (e: React.FormEvent) => { - submit( - { - triggerSource: "STANDARD", - payload: currentPayloadJson.current, - metadata: currentMetadataJson.current, - taskIdentifier: task.taskIdentifier, - environmentId: environment.id, - }, - { - action: "", - method: "post", - } - ); - e.preventDefault(); - }, - [currentPayloadJson, currentMetadataJson, task] + const [ttlValue, setTtlValue] = useState(lastRun?.ttlSeconds); + const [concurrencyKeyValue, setConcurrencyKeyValue] = useState( + lastRun?.concurrencyKey + ); + const [queueValue, setQueueValue] = useState(lastRun?.queue); + const [machineValue, setMachineValue] = useState(lastRun?.machinePreset); + const [maxAttemptsValue, setMaxAttemptsValue] = useState( + lastRun?.maxAttempts ); + const [maxDurationValue, setMaxDurationValue] = useState( + lastRun?.maxDurationInSeconds + ); + const [tagsValue, setTagsValue] = useState(lastRun?.runTags ?? []); + + const queueItems = queues.map((q) => ({ + value: q.type === "task" ? `task/${q.name}` : q.name, + label: q.name, + type: q.type, + paused: q.paused, + })); + + const [showTemplateCreatedSuccessMessage, setShowTemplateCreatedSuccessMessage] = useState(false); - const [form, { environmentId, payload }] = useForm({ + const [ + form, + { + environmentId, + payload, + metadata, + taskIdentifier, + delaySeconds, + ttlSeconds, + idempotencyKey, + idempotencyKeyTTLSeconds, + queue, + concurrencyKey, + maxAttempts, + maxDurationSeconds, + triggerSource, + tags, + version, + machine, + }, + ] = useForm({ id: "test-task", // TODO: type this lastSubmission: lastSubmission as any, + onSubmit(event, { formData }) { + event.preventDefault(); + + formData.set(payload.name, currentPayloadJson.current); + formData.set(metadata.name, currentMetadataJson.current); + + submit(formData, { method: "POST" }); + }, onValidate({ formData }) { return parse(formData, { schema: TestTaskData }); }, }); return ( -
submitForm(e)} - > - - - -
- - { - replace({ tab: "payload" }); - }} - > - Payload - + + + + +
+
+ + + {task.taskIdentifier} + +
+
+ { + setPayload(template.payload ?? ""); + setMetadata(template.metadata ?? ""); + setTtlValue(template.ttlSeconds ?? 0); + setConcurrencyKeyValue(template.concurrencyKey ?? ""); + setMaxAttemptsValue(template.maxAttempts ?? undefined); + setMaxDurationValue(template.maxDurationSeconds ?? 0); + setMachineValue(template.machinePreset ?? undefined); + setTagsValue(template.tags ?? []); + setQueueValue(template.queue ?? undefined); + }} + showTemplateCreatedSuccessMessage={showTemplateCreatedSuccessMessage} + /> + { + setPayload(run.payload); + run.seedMetadata && setMetadata(run.seedMetadata); + setTtlValue(run.ttlSeconds); + setConcurrencyKeyValue(run.concurrencyKey); + setMaxAttemptsValue(run.maxAttempts); + setMaxDurationValue(run.maxDurationInSeconds); + setTagsValue(run.runTags ?? []); + setQueueValue(run.queue); + setMachineValue(run.machinePreset); + }} + /> +
+
- { - replace({ tab: "metadata" }); - }} - > - Metadata - -
+ + +
{ - currentPayloadJson.current = v; - - //deselect the example if it's been edited - if (selectedCodeSampleId) { - if (v !== selectedCodeSamplePayload) { - setDefaultPayloadJson(v); - setSelectedCodeSampleId(undefined); - } + if (!tab || tab === "payload") { + currentPayloadJson.current = v; + setPayload(v); + } else { + currentMetadataJson.current = v; + setMetadata(v); } }} height="100%" - autoFocus={!tab || tab === "payload"} - className={cn("h-full overflow-auto", tab === "metadata" && "hidden")} - /> - { - currentMetadataJson.current = v; - - //deselect the example if it's been edited - if (selectedCodeSampleId) { - if (v !== selectedCodeSampleMetadata) { - setDefaultMetadataJson(v); - setSelectedCodeSampleId(undefined); - } - } - }} - height="100%" - autoFocus={tab === "metadata"} - placeholder="" - className={cn("h-full overflow-auto", tab !== "metadata" && "hidden")} + autoFocus={true} + className={cn("h-full overflow-auto")} + additionalActions={ + +
+ { + replace({ tab: "payload" }); + }} + > + Payload + + { + replace({ tab: "metadata" }); + }} + > + Metadata + +
+
+ } />
- - { - const run = runs.find((r) => r.id === id); - if (!run) return; - setPayload(run.payload); - run.seedMetadata && setMetadata(run.seedMetadata); - setSelectedCodeSampleId(id); - }} - /> + +
+
+ + Options enable you to control the execution behavior of your task.{" "} + Read the docs. + + + + + Overrides the machine preset. + {machine.error} + + + + + {disableVersionSelection ? ( + Only the latest version is available in the development environment. + ) : ( + Runs task on a specific version. + )} + {version.error} + + + + {allowArbitraryQueues ? ( + setQueueValue(e.target.value)} + /> + ) : ( + + )} + Assign run to a specific queue. + {queue.error} + + + + + Add tags to easily filter runs. + {tags.error} + + + + + setMaxAttemptsValue(e.target.value ? parseInt(e.target.value) : undefined) + } + onKeyDown={(e) => { + // only allow entering integers > 1 + if (["-", "+", ".", "e", "E"].includes(e.key)) { + e.preventDefault(); + } + }} + onBlur={(e) => { + const value = parseInt(e.target.value); + if (value < 1 && e.target.value !== "") { + e.target.value = "1"; + } + }} + /> + Retries failed runs up to the specified number of attempts. + {maxAttempts.error} + + + + + Overrides the maximum compute time limit for the run. + {maxDurationSeconds.error} + + + + + {idempotencyKey.error} + + Specify an idempotency key to ensure that a task is only triggered once with the + same key. + + + + + + Keys expire after 30 days by default. + + {idempotencyKeyTTLSeconds.error} + + + + + setConcurrencyKeyValue(e.target.value)} + /> + + Limits concurrency by creating a separate queue for each value of the key. + + {concurrencyKey.error} + + + + + Delays run by a specific duration. + {delaySeconds.error} + + + + + Expires the run if it hasn't started within the TTL. + {ttlSeconds.error} + + {form.error} +
+
-
- - This test will run in - - +
+
+ + This test will run in + + +
+ currentPayloadJson.current} + getCurrentMetadata={() => currentMetadataJson.current} + setShowCreatedSuccessMessage={setShowTemplateCreatedSuccessMessage} + /> +
-
); @@ -359,32 +791,66 @@ function ScheduledTaskForm({ task, runs, possibleTimezones, + queues, + versions, + templates, + disableVersionSelection, + allowArbitraryQueues, }: { - task: TestTask["task"]; + task: ScheduledTaskResult["task"]; runs: ScheduledRun[]; possibleTimezones: string[]; + queues: Required["queue"][]; + versions: string[]; + templates: RunTemplate[]; + disableVersionSelection: boolean; + allowArbitraryQueues: boolean; }) { const environment = useEnvironment(); - const lastSubmission = useActionData(); - const [selectedCodeSampleId, setSelectedCodeSampleId] = useState(runs.at(0)?.id); - const [timestampValue, setTimestampValue] = useState(); - const [lastTimestampValue, setLastTimestampValue] = useState(); - const [externalIdValue, setExternalIdValue] = useState(); - const [timezoneValue, setTimezoneValue] = useState("UTC"); - - //set initial values - useEffect(() => { - const initialRun = runs.find((r) => r.id === selectedCodeSampleId); - if (!initialRun) { - setTimestampValue(new Date()); - return; - } - setTimestampValue(initialRun.payload.timestamp); - setLastTimestampValue(initialRun.payload.lastTimestamp); - setExternalIdValue(initialRun.payload.externalId); - setTimezoneValue(initialRun.payload.timezone); - }, [selectedCodeSampleId]); + const lastRun = runs.at(0); + + const [timestampValue, setTimestampValue] = useState( + lastRun?.payload?.timestamp ?? new Date() + ); + const [lastTimestampValue, setLastTimestampValue] = useState( + lastRun?.payload?.lastTimestamp + ); + const [externalIdValue, setExternalIdValue] = useState( + lastRun?.payload?.externalId + ); + const [timezoneValue, setTimezoneValue] = useState(lastRun?.payload?.timezone ?? "UTC"); + const [ttlValue, setTtlValue] = useState(lastRun?.ttlSeconds); + const [concurrencyKeyValue, setConcurrencyKeyValue] = useState( + lastRun?.concurrencyKey + ); + const [queueValue, setQueueValue] = useState(lastRun?.queue); + const [machineValue, setMachineValue] = useState(lastRun?.machinePreset); + const [maxAttemptsValue, setMaxAttemptsValue] = useState( + lastRun?.maxAttempts + ); + const [maxDurationValue, setMaxDurationValue] = useState( + lastRun?.maxDurationInSeconds + ); + const [tagsValue, setTagsValue] = useState(lastRun?.runTags ?? []); + + const [showTemplateCreatedSuccessMessage, setShowTemplateCreatedSuccessMessage] = useState(false); + + const queueItems = queues.map((q) => ({ + value: q.type === "task" ? `task/${q.name}` : q.name, + label: q.name, + type: q.type, + paused: q.paused, + })); + + const actionData = useActionData(); + const lastSubmission = + actionData && + typeof actionData === "object" && + "formAction" in actionData && + actionData.formAction === ("run-scheduled" satisfies FormAction) + ? actionData + : undefined; const [ form, @@ -396,6 +862,16 @@ function ScheduledTaskForm({ taskIdentifier, environmentId, timezone, + ttlSeconds, + idempotencyKey, + idempotencyKeyTTLSeconds, + queue, + concurrencyKey, + maxAttempts, + maxDurationSeconds, + tags, + version, + machine, }, ] = useForm({ id: "test-task-scheduled", @@ -407,7 +883,7 @@ function ScheduledTaskForm({ }); return ( -
+ - - -
-
- - - - setTimestampValue(val)} - granularity="second" - showNowButton - variant="medium" - utc - /> - - This is the timestamp of the CRON, it will come through to your run in the - payload. - - {timestamp.error} - - - - - setLastTimestampValue(val)} - granularity="second" - showNowButton - showClearButton - variant="medium" - utc - /> - - This is the timestamp of the previous run. You can use this in your code to find - new data since the previous run. This can be undefined if there hasn't been a - previous run. - - {lastTimestamp.error} - - - - - - The Timestamp and Last timestamp are in UTC so this just changes the timezone - string that comes through in the payload. - - {timezone.error} - - - - setExternalIdValue(e.target.value)} - /> - - Optionally, you can specify your own IDs (like a user ID) and then use it inside - the run function of your task. This allows you to have per-user CRON tasks.{" "} - Read the docs. - - {externalId.error} - -
-
-
- - - +
+ + + {task.taskIdentifier} + +
+
+ { + setTtlValue(template.ttlSeconds ?? 0); + setConcurrencyKeyValue(template.concurrencyKey ?? ""); + setMaxAttemptsValue(template.maxAttempts ?? undefined); + setMaxDurationValue(template.maxDurationSeconds ?? 0); + setMachineValue(template.machinePreset ?? undefined); + setTagsValue(template.tags ?? []); + setQueueValue(template.queue ?? undefined); + + setTimestampValue(template.scheduledTaskPayload?.timestamp); + setLastTimestampValue(template.scheduledTaskPayload?.lastTimestamp); + setExternalIdValue(template.scheduledTaskPayload?.externalId); + setTimezoneValue(template.scheduledTaskPayload?.timezone ?? "UTC"); + }} + showTemplateCreatedSuccessMessage={showTemplateCreatedSuccessMessage} + /> + { - const run = runs.find((r) => r.id === id); - if (!run) return; - setSelectedCodeSampleId(id); + onRunSelected={(run) => { setTimestampValue(run.payload.timestamp); setLastTimestampValue(run.payload.lastTimestamp); setExternalIdValue(run.payload.externalId); + setTimezoneValue(run.payload.timezone); + setTtlValue(run.ttlSeconds); + setConcurrencyKeyValue(run.concurrencyKey); + setMaxAttemptsValue(run.maxAttempts); + setMaxDurationValue(run.maxDurationInSeconds); + setTagsValue(run.runTags ?? []); + setQueueValue(run.queue); + setMachineValue(run.machinePreset ?? undefined); }} /> - - -
-
- - This test will run in - -
- +
+
+
+ + + + setTimestampValue(val)} + granularity="second" + showNowButton + variant="small" + utc + /> + + This is the timestamp of the CRON, it will come through to your run in the payload. + + {timestamp.error} + + + + + setLastTimestampValue(val)} + granularity="second" + showNowButton + showClearButton + variant="small" + utc + /> + + This is the timestamp of the previous run. You can use this in your code to find new + data since the previous run. + + {lastTimestamp.error} + + + + + + The Timestamp and Last timestamp are in UTC so this just changes the timezone string + that comes through in the payload. + + {timezone.error} + + + + setExternalIdValue(e.target.value)} + variant="small" + /> + + Optionally, you can specify your own IDs (like a user ID) and then use it inside the + run function of your task.{" "} + Read the docs. + + {externalId.error} + +
+ + Options enable you to control the execution behavior of your task.{" "} + Read the docs. + + + + + Overrides the machine preset. + {machine.error} + + + + + {disableVersionSelection ? ( + Only the latest version is available in the development environment. + ) : ( + Runs task on a specific version. + )} + {version.error} + + + + {allowArbitraryQueues ? ( + setQueueValue(e.target.value)} + /> + ) : ( + + )} + Assign run to a specific queue. + {queue.error} + + + + + Add tags to easily filter runs. + {tags.error} + + + + + setMaxAttemptsValue(e.target.value ? parseInt(e.target.value) : undefined) + } + onKeyDown={(e) => { + // only allow entering integers > 1 + if (["-", "+", ".", "e", "E"].includes(e.key)) { + e.preventDefault(); + } + }} + onBlur={(e) => { + const value = parseInt(e.target.value); + if (value < 1 && e.target.value !== "") { + e.target.value = "1"; + } + }} + /> + Retries failed runs up to the specified number of attempts. + {maxAttempts.error} + + + + + Overrides the maximum compute time limit for the run. + {maxDurationSeconds.error} + + + + + {idempotencyKey.error} + + Specify an idempotency key to ensure that a task is only triggered once with the same + key. + + + + + + Keys expire after 30 days by default. + + {idempotencyKeyTTLSeconds.error} + + + + + setConcurrencyKeyValue(e.target.value)} + /> + Limits concurrency by creating a separate queue for each value of the key. + {concurrencyKey.error} + + + + + Expires the run if it hasn't started within the TTL. + {ttlSeconds.error} + +
+
+
+
+
+ + This test will run in + + +
+ ""} + getCurrentMetadata={() => ""} + setShowCreatedSuccessMessage={setShowTemplateCreatedSuccessMessage} + /> + +
); } -function RecentPayloads({ +function RecentRunsPopover({ runs, - selectedId, - onSelected, + onRunSelected, +}: { + runs: T[]; + onRunSelected: (run: T) => void; +}) { + const [isRecentRunsPopoverOpen, setIsRecentRunsPopoverOpen] = useState(false); + + return ( + + + + + +
+
+ {runs.map((run) => ( + + ))} +
+
+
+
+ ); +} + +function RunTemplatesPopover({ + templates, + onTemplateSelected, + showTemplateCreatedSuccessMessage, }: { - runs: { - id: string; - createdAt: Date; - number: number; - status: TaskRunStatus; - }[]; - selectedId?: string; - onSelected: (id: string) => void; + templates: RunTemplate[]; + onTemplateSelected: (run: RunTemplate) => void; + showTemplateCreatedSuccessMessage: boolean; }) { + const [isPopoverOpen, setIsPopoverOpen] = useState(false); + const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false); + const [templateIdToDelete, setTemplateIdToDelete] = useState(); + + const actionData = useActionData(); + const lastSubmission = + actionData && + typeof actionData === "object" && + "formAction" in actionData && + actionData.formAction === ("delete-template" satisfies FormAction) + ? actionData + : undefined; + + useEffect(() => { + if (lastSubmission && "success" in lastSubmission && lastSubmission.success === true) { + setIsDeleteDialogOpen(false); + } + }, [lastSubmission]); + + const [deleteForm, { templateId }] = useForm({ + id: "delete-template", + onValidate({ formData }) { + return parse(formData, { schema: DeleteTaskRunTemplateData }); + }, + }); + return ( -
-
- Recent payloads -
- {runs.length === 0 ? ( - - Recent payloads will show here once you've completed a Run. - - ) : ( -
- {runs.map((run) => ( - + + +
+
+ {templates.map((template) => ( +
+ +
+ ))} +
+
+
+ + + + {showTemplateCreatedSuccessMessage && ( + + Template saved + successfully + + )} + + + + + Delete template + + Are you sure you want to delete the template? This can't be reversed. + +
+ - ))} -
- )} + Cancel + +
+ + +
+
+ +
); } + +function CreateTemplateModal({ + rawTestTaskFormData, + getCurrentPayload, + getCurrentMetadata, + setShowCreatedSuccessMessage, +}: { + rawTestTaskFormData: { + environmentId: string; + taskIdentifier: string; + triggerSource: string; + delaySeconds?: string; + ttlSeconds?: string; + queue?: string; + concurrencyKey?: string; + maxAttempts?: string; + maxDurationSeconds?: string; + tags?: string; + machine?: string; + externalId?: string; + timestamp?: string; + timezone?: string; + lastTimestamp?: string; + }; + getCurrentPayload: () => string; + getCurrentMetadata: () => string; + setShowCreatedSuccessMessage: (value: boolean) => void; +}) { + const submit = useSubmit(); + const [isModalOpen, setIsModalOpen] = useState(false); + + const actionData = useActionData(); + const lastSubmission = + actionData && + typeof actionData === "object" && + "formAction" in actionData && + actionData.formAction === ("create-template" satisfies FormAction) + ? actionData + : undefined; + + useEffect(() => { + if (lastSubmission && "success" in lastSubmission && lastSubmission.success === true) { + setIsModalOpen(false); + setShowCreatedSuccessMessage(true); + setTimeout(() => { + setShowCreatedSuccessMessage(false); + }, 2000); + } + }, [lastSubmission]); + + const [ + form, + { + label, + environmentId, + payload, + metadata, + taskIdentifier, + delaySeconds, + ttlSeconds, + queue, + concurrencyKey, + maxAttempts, + maxDurationSeconds, + triggerSource, + tags, + machine, + externalId, + timestamp, + lastTimestamp, + timezone, + }, + ] = useForm({ + id: "save-template", + lastSubmission: lastSubmission as any, + onSubmit(event, { formData }) { + event.preventDefault(); + + formData.set(payload.name, getCurrentPayload()); + formData.set(metadata.name, getCurrentMetadata()); + + submit(formData, { method: "POST" }); + }, + onValidate({ formData }) { + return parse(formData, { schema: RunTemplateData }); + }, + shouldRevalidate: "onInput", + }); + + return ( + + + + } + cancelButton={ + + + + } + /> + + +
+ + + ); +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.test/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.test/route.tsx index 4d33289493..9bd9443e95 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.test/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.test/route.tsx @@ -138,7 +138,7 @@ function TaskSelector({
diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.settings.billing-alerts/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.settings.billing-alerts/route.tsx new file mode 100644 index 0000000000..eb0a8138b0 --- /dev/null +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.settings.billing-alerts/route.tsx @@ -0,0 +1,312 @@ +import { conform, list, requestIntent, useFieldList, useForm } from "@conform-to/react"; +import { parse } from "@conform-to/zod"; +import { Form, useActionData, type MetaFunction } from "@remix-run/react"; +import { json, type ActionFunction, type LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { tryCatch } from "@trigger.dev/core"; +import { Fragment, useEffect, useRef, useState } from "react"; +import { redirect, typedjson, useTypedLoaderData } from "remix-typedjson"; +import { z } from "zod"; +import { AdminDebugTooltip } from "~/components/admin/debugTooltip"; +import { + MainHorizontallyCenteredContainer, + PageBody, + PageContainer, +} from "~/components/layout/AppLayout"; +import { Button } from "~/components/primitives/Buttons"; +import { CheckboxWithLabel } from "~/components/primitives/Checkbox"; +import { Fieldset } from "~/components/primitives/Fieldset"; +import { FormButtons } from "~/components/primitives/FormButtons"; +import { FormError } from "~/components/primitives/FormError"; +import { Header2 } from "~/components/primitives/Headers"; +import { Input } from "~/components/primitives/Input"; +import { InputGroup } from "~/components/primitives/InputGroup"; +import { Label } from "~/components/primitives/Label"; +import { NavBar, PageAccessories, PageTitle } from "~/components/primitives/PageHeader"; +import { Paragraph } from "~/components/primitives/Paragraph"; +import { TextLink } from "~/components/primitives/TextLink"; +import { prisma } from "~/db.server"; +import { featuresForRequest } from "~/features.server"; +import { redirectWithErrorMessage, redirectWithSuccessMessage } from "~/models/message.server"; +import { getBillingAlerts, setBillingAlert } from "~/services/platform.v3.server"; +import { requireUserId } from "~/services/session.server"; +import { formatCurrency } from "~/utils/numberFormatter"; +import { + docsPath, + OrganizationParamsSchema, + organizationPath, + v3BillingAlertsPath, +} from "~/utils/pathBuilder"; +import { useCurrentPlan } from "../_app.orgs.$organizationSlug/route"; + +export const meta: MetaFunction = () => { + return [ + { + title: `Billing alerts | Trigger.dev`, + }, + ]; +}; + +export async function loader({ params, request }: LoaderFunctionArgs) { + const userId = await requireUserId(request); + const { organizationSlug } = OrganizationParamsSchema.parse(params); + + const { isManagedCloud } = featuresForRequest(request); + if (!isManagedCloud) { + return redirect(organizationPath({ slug: organizationSlug })); + } + + const organization = await prisma.organization.findFirst({ + where: { slug: organizationSlug, members: { some: { userId } } }, + }); + + if (!organization) { + throw new Response(null, { status: 404, statusText: "Organization not found" }); + } + + const [error, alerts] = await tryCatch(getBillingAlerts(organization.id)); + if (error) { + throw new Response(null, { status: 404, statusText: `Billing alerts error: ${error}` }); + } + + if (!alerts) { + throw new Response(null, { status: 404, statusText: "Billing alerts not found" }); + } + + return typedjson({ + alerts: { + ...alerts, + amount: alerts.amount / 100, + }, + }); +} + +const schema = z.object({ + amount: z + .number({ invalid_type_error: "Not a valid amount" }) + .min(0, "Amount must be greater than 0"), + emails: z.preprocess((i) => { + if (typeof i === "string") return [i]; + + if (Array.isArray(i)) { + const emails = i.filter((v) => typeof v === "string" && v !== ""); + if (emails.length === 0) { + return [""]; + } + return emails; + } + + return [""]; + }, z.string().email().array().nonempty("At least one email is required")), + alertLevels: z.preprocess((i) => { + if (typeof i === "string") return [i]; + return i; + }, z.coerce.number().array().nonempty("At least one alert level is required")), +}); + +export const action: ActionFunction = async ({ request, params }) => { + const userId = await requireUserId(request); + const { organizationSlug } = OrganizationParamsSchema.parse(params); + + const formData = await request.formData(); + const submission = parse(formData, { schema }); + + if (!submission.value || submission.intent !== "submit") { + return json(submission); + } + + try { + const organization = await prisma.organization.findFirst({ + where: { slug: organizationSlug, members: { some: { userId } } }, + }); + + if (!organization) { + return redirectWithErrorMessage( + v3BillingAlertsPath({ slug: organizationSlug }), + request, + "You are not authorized to update billing alerts" + ); + } + + const [error, updatedAlert] = await tryCatch( + setBillingAlert(organization.id, { + ...submission.value, + amount: submission.value.amount * 100, + }) + ); + if (error) { + return redirectWithErrorMessage( + v3BillingAlertsPath({ slug: organizationSlug }), + request, + "Failed to update billing alert" + ); + } + + if (!updatedAlert) { + return redirectWithErrorMessage( + v3BillingAlertsPath({ slug: organizationSlug }), + request, + "Failed to update billing alert" + ); + } + + return redirectWithSuccessMessage( + v3BillingAlertsPath({ slug: organizationSlug }), + request, + "Billing alert updated" + ); + } catch (error: any) { + return json({ errors: { body: error.message } }, { status: 400 }); + } +}; + +export default function Page() { + const { alerts } = useTypedLoaderData(); + const plan = useCurrentPlan(); + const [dollarAmount, setDollarAmount] = useState(alerts.amount.toFixed(2)); + + const lastSubmission = useActionData(); + + const [form, { emails, amount, alertLevels }] = useForm({ + id: "invite-members", + // TODO: type this + lastSubmission: lastSubmission as any, + onValidate({ formData }) { + return parse(formData, { schema }); + }, + defaultValue: { + emails: [""], + }, + }); + + const fieldValues = useRef(alerts.emails); + const emailFields = useFieldList(form.ref, { ...emails, defaultValue: alerts.emails }); + + const checkboxLevels = [0.75, 0.9, 1.0, 2.0, 5.0]; + + useEffect(() => { + if (alerts.emails.length > 0) { + requestIntent(form.ref.current ?? undefined, list.append(emails.name)); + } + }, [emails.name, form.ref]); + const isFree = !plan?.v3Subscription?.isPaying; + + return ( + + + + + + + + + +
+
+ Billing alerts + + Receive an email when your compute spend crosses different thresholds. You can also + learn how to{" "} + + reduce your compute spend + + . + +
+
+
+ + + {isFree ? ( + <> + + ${dollarAmount} + + + + ) : ( + { + const numberValue = Number(e.target.value); + if (numberValue < 0) { + setDollarAmount(""); + return; + } + setDollarAmount(e.target.value); + }} + step={0.01} + min={0} + placeholder="Enter an amount" + icon={ + $ + } + className="pl-px" + fullWidth + readOnly={isFree} + /> + )} + {amount.error} + + + + {checkboxLevels.map((level) => ( + + {level * 100}%{" "} + + ({formatCurrency(Number(dollarAmount) * level, false)}) + + + } + defaultChecked={alerts.alertLevels.includes(level)} + className="pr-0" + readOnly={level === 1.0} + /> + ))} + {alertLevels.error} + + + + {emailFields.map((email, index) => ( + + { + fieldValues.current[index] = e.target.value; + if ( + emailFields.length === fieldValues.current.length && + fieldValues.current.every((v) => v !== "") + ) { + requestIntent(form.ref.current ?? undefined, list.append(emails.name)); + } + }} + fullWidth + /> + {email.error} + + ))} + + + Update + + } + /> +
+
+
+
+
+
+ ); +} diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.settings.billing/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.settings.billing/route.tsx index f42c77ad50..1e579908a9 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.settings.billing/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.settings.billing/route.tsx @@ -1,6 +1,6 @@ import { CalendarDaysIcon, StarIcon } from "@heroicons/react/20/solid"; import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; -import { type PlanDefinition } from "@trigger.dev/platform/v3"; +import { type PlanDefinition } from "@trigger.dev/platform"; import { redirect, typedjson, useTypedLoaderData } from "remix-typedjson"; import { PageBody, PageContainer } from "~/components/layout/AppLayout"; import { LinkButton } from "~/components/primitives/Buttons"; @@ -28,7 +28,7 @@ export const meta: MetaFunction = () => { }; export async function loader({ params, request }: LoaderFunctionArgs) { - await requireUserId(request); + const userId = await requireUserId(request); const { organizationSlug } = OrganizationParamsSchema.parse(params); const { isManagedCloud } = featuresForRequest(request); @@ -41,8 +41,8 @@ export async function loader({ params, request }: LoaderFunctionArgs) { throw new Response(null, { status: 404, statusText: "Plans not found" }); } - const organization = await prisma.organization.findUnique({ - where: { slug: organizationSlug }, + const organization = await prisma.organization.findFirst({ + where: { slug: organizationSlug, members: { some: { userId } } }, }); if (!organization) { @@ -83,6 +83,7 @@ export async function loader({ params, request }: LoaderFunctionArgs) { export default function ChoosePlanPage() { const { plans, + addOnPricing, v3Subscription, organizationSlug, periodStart, @@ -141,6 +142,7 @@ export default function ChoosePlanPage() {
{ }; export async function loader({ params, request }: LoaderFunctionArgs) { - await requireUserId(request); + const userId = await requireUserId(request); const { organizationSlug } = OrganizationParamsSchema.parse(params); const { isManagedCloud } = featuresForRequest(request); @@ -54,8 +54,8 @@ export async function loader({ params, request }: LoaderFunctionArgs) { return redirect(organizationPath({ slug: organizationSlug })); } - const organization = await prisma.organization.findUnique({ - where: { slug: organizationSlug }, + const organization = await prisma.organization.findFirst({ + where: { slug: organizationSlug, members: { some: { userId } } }, }); if (!organization) { diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug_.projects.new/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug_.projects.new/route.tsx index 3cd51e2641..68c3306e28 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug_.projects.new/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug_.projects.new/route.tsx @@ -7,7 +7,9 @@ import { Form, useActionData, useNavigation } from "@remix-run/react"; import { redirect, typedjson, useTypedLoaderData } from "remix-typedjson"; import invariant from "tiny-invariant"; import { z } from "zod"; -import { MainCenteredContainer } from "~/components/layout/AppLayout"; +import { BackgroundWrapper } from "~/components/BackgroundWrapper"; +import { Feedback } from "~/components/Feedback"; +import { AppContainer, MainCenteredContainer } from "~/components/layout/AppLayout"; import { Button, LinkButton } from "~/components/primitives/Buttons"; import { Callout } from "~/components/primitives/Callout"; import { Fieldset } from "~/components/primitives/Fieldset"; @@ -20,15 +22,15 @@ import { Label } from "~/components/primitives/Label"; import { ButtonSpinner } from "~/components/primitives/Spinner"; import { prisma } from "~/db.server"; import { featuresForRequest } from "~/features.server"; -import { useFeatures } from "~/hooks/useFeatures"; -import { redirectWithSuccessMessage } from "~/models/message.server"; -import { createProject } from "~/models/project.server"; +import { redirectWithErrorMessage, redirectWithSuccessMessage } from "~/models/message.server"; +import { createProject, ExceededProjectLimitError } from "~/models/project.server"; import { requireUserId } from "~/services/session.server"; import { + newProjectPath, OrganizationParamsSchema, organizationPath, - v3ProjectPath, selectPlanPath, + v3ProjectPath, } from "~/utils/pathBuilder"; export async function loader({ params, request }: LoaderFunctionArgs) { @@ -114,8 +116,29 @@ export const action: ActionFunction = async ({ request, params }) => { request, `${submission.value.projectName} created` ); - } catch (error: any) { - return json({ errors: { body: error.message } }, { status: 400 }); + } catch (error) { + if (error instanceof ExceededProjectLimitError) { + return redirectWithErrorMessage( + newProjectPath({ slug: organizationSlug }), + request, + error.message, + { + title: "Failed to create project", + action: { + label: "Request more projects", + variant: "secondary/small", + action: { type: "help", feedbackType: "help" }, + }, + } + ); + } + + return redirectWithErrorMessage( + newProjectPath({ slug: organizationSlug }), + request, + error instanceof Error ? error.message : "Something went wrong", + { ephemeral: false } + ); } }; @@ -138,57 +161,62 @@ export default function Page() { const isLoading = navigation.state === "submitting" || navigation.state === "loading"; return ( - -
- } - title="Create a new project" - description={`This will create a new project in your "${organization.title}" organization.`} - /> -
- {message && ( - - {message} - - )} -
- - - - {projectName.error} - - {canCreateV3Projects ? ( - - ) : ( - - )} - - {isLoading ? "Creatingโ€ฆ" : "Create"} - - } - cancelButton={ - organization.projectsCount > 0 ? ( - - Cancel - - ) : undefined - } + + + +
+ } + title="Create a new project" + description={`This will create a new project in your "${organization.title}" organization.`} /> -
-
-
-
+
+ {message && ( + + {message} + + )} +
+ + + + {projectName.error} + + {canCreateV3Projects ? ( + + ) : ( + + )} + + {isLoading ? "Creatingโ€ฆ" : "Create"} + + } + cancelButton={ + organization.projectsCount > 0 ? ( + + Cancel + + ) : undefined + } + /> +
+
+
+ } /> + + + ); } diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug_.select-plan/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug_.select-plan/route.tsx index e1561becf4..f5402559bd 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug_.select-plan/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug_.select-plan/route.tsx @@ -1,6 +1,7 @@ -import { LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; import { redirect, typedjson, useTypedLoaderData } from "remix-typedjson"; -import { MainCenteredContainer } from "~/components/layout/AppLayout"; +import { BackgroundWrapper } from "~/components/BackgroundWrapper"; +import { AppContainer, MainBody, PageBody } from "~/components/layout/AppLayout"; import { Header1 } from "~/components/primitives/Headers"; import { prisma } from "~/db.server"; import { featuresForRequest } from "~/features.server"; @@ -44,20 +45,29 @@ export async function loader({ params, request }: LoaderFunctionArgs) { } export default function ChoosePlanPage() { - const { plans, v3Subscription, organizationSlug, periodEnd } = + const { plans, v3Subscription, organizationSlug, periodEnd, addOnPricing } = useTypedLoaderData(); return ( - - Subscribe for full access - - + + + +
+ Subscribe for full access +
+ +
+
+
+
+
); } diff --git a/apps/webapp/app/routes/_app.orgs.new/route.tsx b/apps/webapp/app/routes/_app.orgs.new/route.tsx index a171153510..a677782eae 100644 --- a/apps/webapp/app/routes/_app.orgs.new/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.new/route.tsx @@ -7,7 +7,8 @@ import { json, redirect } from "@remix-run/node"; import { Form, useActionData, useNavigation } from "@remix-run/react"; import { typedjson, useTypedLoaderData } from "remix-typedjson"; import { z } from "zod"; -import { MainCenteredContainer } from "~/components/layout/AppLayout"; +import { BackgroundWrapper } from "~/components/BackgroundWrapper"; +import { AppContainer, MainCenteredContainer } from "~/components/layout/AppLayout"; import { Button, LinkButton } from "~/components/primitives/Buttons"; import { Fieldset } from "~/components/primitives/Fieldset"; import { FormButtons } from "~/components/primitives/FormButtons"; @@ -94,85 +95,92 @@ export default function NewOrganizationPage() { const isLoading = navigation.state === "submitting" || navigation.state === "loading"; return ( - - } - title="Create an Organization" - /> -
-
- - - - E.g. your company name or your workspace name. - {orgName.error} - - {isManagedCloud && ( - <> - - - - - - - - - + + + + } + title="Create an Organization" + /> + +
- -