diff --git a/astro.config.mjs b/astro.config.mjs
index 8365c6c..d502a22 100644
--- a/astro.config.mjs
+++ b/astro.config.mjs
@@ -4,11 +4,88 @@ import mdx from '@astrojs/mdx';
import sitemap from '@astrojs/sitemap';
import tailwindcss from "@tailwindcss/vite";
import netlify from '@astrojs/netlify';
+import starlight from '@astrojs/starlight';
// https://astro.build/config
export default defineConfig({
site: 'https://serverlessworkflow.io/',
- integrations: [mdx(), sitemap()],
+ integrations: [
+ sitemap(),
+ starlight({
+ title: 'Serverless Workflow Docs',
+ logo: {
+ src: './public/icons/logo.svg'
+ },
+ favicon: '/icons/favicon-32x32.png',
+ customCss: [
+ './src/styles/docs.css',
+ ],
+ social: [
+ { icon: 'github', label: 'GitHub', href: 'https://github.com/serverlessworkflow/specification' },
+ ],
+ editLink: {
+ baseUrl: 'https://github.com/serverlessworkflow/serverlessworkflow.github.io/edit/main/',
+ },
+ lastUpdated: true,
+ sidebar: [
+ { label: 'Introduction', slug : 'docs' },
+ { label: 'Workflow Definition Examples', slug : 'docs/workflow-definition-examples' },
+ {
+ label: 'Core Concepts',
+ autogenerate: { directory: 'docs/core-concepts' },
+ /* Or manually -- probably preferred, easy to insert item/modify order:
+ items: [
+ // Each item here is one entry in the navigation menu.
+ { label: 'Example Guide', slug: 'docs/guides/example' },
+ ],
+ */
+ },
+ {
+ label: 'Control Flow',
+ autogenerate: { directory: 'docs/control-flow' },
+ },
+ { label: 'Wait', slug : 'docs/wait' },
+ { label: 'Set', slug : 'docs/set' },
+ {
+ label: 'Error Handling',
+ autogenerate: { directory: 'docs/error-handling' },
+ },
+ {
+ label: 'Call Tasks',
+ autogenerate: { directory: 'docs/call-tasks' },
+ },
+ {
+ label: 'Run Tasks',
+ autogenerate: { directory: 'docs/run-tasks' },
+ },
+ {
+ label: 'Event Tasks',
+ autogenerate: { directory: 'docs/event-tasks' },
+ },
+ {
+ label: 'Resources & Configuration',
+ autogenerate: { directory: 'docs/resources-configuration' },
+ },
+ {
+ label: 'Reference',
+ autogenerate: { directory: 'docs/reference' },
+ },
+ ],
+ components: {
+ ThemeSelect: './src/overrides/ThemeSelect.astro'
+ },
+ head: [
+ {
+ tag: 'script',
+ attrs: {
+ src: '/webcomponents/version-select/version-select.js',
+ defer: true
+ }
+ }
+ ]
+ }),
+ mdx(),
+ ],
markdown: {
syntaxHighlight: 'shiki',
shikiConfig: {
diff --git a/package-lock.json b/package-lock.json
index 80a8206..8c4bb6b 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -8,12 +8,14 @@
"name": "website",
"version": "0.0.1",
"dependencies": {
- "@astrojs/mdx": "^4.2.2",
- "@astrojs/netlify": "^6.2.4",
+ "@astrojs/mdx": "^4.2.6",
+ "@astrojs/netlify": "^6.3.2",
"@astrojs/rss": "^4.0.11",
- "@astrojs/sitemap": "^3.3.0",
+ "@astrojs/sitemap": "^3.3.1",
+ "@astrojs/starlight": "^0.34.2",
+ "@astrojs/starlight-tailwind": "^4.0.1",
"@tailwindcss/vite": "^4.0.13",
- "astro": "^5.5.5",
+ "astro": "^5.7.10",
"tailwindcss": "^4.0.13"
},
"devDependencies": {
@@ -62,9 +64,10 @@
}
},
"node_modules/@astrojs/mdx": {
- "version": "4.2.2",
- "resolved": "https://registry.npmjs.org/@astrojs/mdx/-/mdx-4.2.2.tgz",
- "integrity": "sha512-nWDvuCPenxoxbog3YK3yVWF3Jw7Lq1+ziWSAOc9fy6zAUbPDSr2bt3c6r6+oa1ll0miCQByj5UVts6eJvN/y+g==",
+ "version": "4.2.6",
+ "resolved": "https://registry.npmjs.org/@astrojs/mdx/-/mdx-4.2.6.tgz",
+ "integrity": "sha512-0i/GmOm6d0qq1/SCfcUgY/IjDc/bS0i42u7h85TkPFBmlFOcBZfkYhR5iyz6hZLwidvJOEq5yGfzt9B1Azku4w==",
+ "license": "MIT",
"dependencies": {
"@astrojs/markdown-remark": "6.3.1",
"@mdx-js/mdx": "^3.1.0",
@@ -88,18 +91,19 @@
}
},
"node_modules/@astrojs/netlify": {
- "version": "6.2.4",
- "resolved": "https://registry.npmjs.org/@astrojs/netlify/-/netlify-6.2.4.tgz",
- "integrity": "sha512-Pk1ZRDgH4cmrIqIuYQn1DJjNKRLNjybHoYWMoZnE+zP2NLQFirR15FQFKGbg8S3+zWQ1EptOBAb/ETnXpbwepA==",
+ "version": "6.3.2",
+ "resolved": "https://registry.npmjs.org/@astrojs/netlify/-/netlify-6.3.2.tgz",
+ "integrity": "sha512-LmYZr3LMZjl4xgXbUfR45TmDsqiQiifN7+BRFCYPhEguPQsnFwLmyB/LY6Gj/RfKF97og35ZqQDyaor5tcBHrg==",
+ "license": "MIT",
"dependencies": {
"@astrojs/internal-helpers": "0.6.1",
- "@astrojs/underscore-redirects": "0.6.0",
+ "@astrojs/underscore-redirects": "0.6.1",
"@netlify/blobs": "^8.1.1",
"@netlify/functions": "^3.0.1",
"@vercel/nft": "^0.29.2",
"esbuild": "^0.25.0",
"tinyglobby": "^0.2.12",
- "vite": "^6.2.3"
+ "vite": "^6.3.4"
},
"peerDependencies": {
"astro": "^5.3.0"
@@ -127,23 +131,72 @@
}
},
"node_modules/@astrojs/sitemap": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/@astrojs/sitemap/-/sitemap-3.3.0.tgz",
- "integrity": "sha512-nYE4lKQtk+Kbrw/w0G0TTgT724co0jUsU4tPlHY9au5HmTBKbwiCLwO/15b1/y13aZ4Kr9ZbMeMHlXuwn0ty4Q==",
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/@astrojs/sitemap/-/sitemap-3.3.1.tgz",
+ "integrity": "sha512-GRnDUCTviBSNfXJ0Jmur+1/C+z3g36jy79VyYggfe1uNyEYSTcmAfTTCmbytrRvJRNyJJnSfB/77Gnm9PiXRRg==",
+ "license": "MIT",
"dependencies": {
"sitemap": "^8.0.0",
"stream-replace-string": "^2.0.0",
"zod": "^3.24.2"
}
},
+ "node_modules/@astrojs/starlight": {
+ "version": "0.34.2",
+ "resolved": "https://registry.npmjs.org/@astrojs/starlight/-/starlight-0.34.2.tgz",
+ "integrity": "sha512-/XwBXQ4ZyQBEETZIPw2pa+UsKawkpRVO7CF6QVpOmteGfUxCrsD3n26G0OsnWHh0Gfo67QkAqM0FsGmDuNUVHw==",
+ "license": "MIT",
+ "dependencies": {
+ "@astrojs/markdown-remark": "^6.3.1",
+ "@astrojs/mdx": "^4.2.3",
+ "@astrojs/sitemap": "^3.3.0",
+ "@pagefind/default-ui": "^1.3.0",
+ "@types/hast": "^3.0.4",
+ "@types/js-yaml": "^4.0.9",
+ "@types/mdast": "^4.0.4",
+ "astro-expressive-code": "^0.41.1",
+ "bcp-47": "^2.1.0",
+ "hast-util-from-html": "^2.0.1",
+ "hast-util-select": "^6.0.2",
+ "hast-util-to-string": "^3.0.0",
+ "hastscript": "^9.0.0",
+ "i18next": "^23.11.5",
+ "js-yaml": "^4.1.0",
+ "klona": "^2.0.6",
+ "mdast-util-directive": "^3.0.0",
+ "mdast-util-to-markdown": "^2.1.0",
+ "mdast-util-to-string": "^4.0.0",
+ "pagefind": "^1.3.0",
+ "rehype": "^13.0.1",
+ "rehype-format": "^5.0.0",
+ "remark-directive": "^3.0.0",
+ "ultrahtml": "^1.6.0",
+ "unified": "^11.0.5",
+ "unist-util-visit": "^5.0.0",
+ "vfile": "^6.0.2"
+ },
+ "peerDependencies": {
+ "astro": "^5.5.0"
+ }
+ },
+ "node_modules/@astrojs/starlight-tailwind": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/@astrojs/starlight-tailwind/-/starlight-tailwind-4.0.1.tgz",
+ "integrity": "sha512-AOOEWTGqJ7fG66U04xTmZQZ40oZnUYe4Qljpr+No88ozKywtsD1DiXOrGTeHCnZu0hRtMbRtBGB1fZsf0L62iw==",
+ "license": "MIT",
+ "peerDependencies": {
+ "@astrojs/starlight": ">=0.34.0",
+ "tailwindcss": "^4.0.0"
+ }
+ },
"node_modules/@astrojs/telemetry": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/@astrojs/telemetry/-/telemetry-3.2.0.tgz",
- "integrity": "sha512-wxhSKRfKugLwLlr4OFfcqovk+LIFtKwLyGPqMsv+9/ibqqnW3Gv7tBhtKEb0gAyUAC4G9BTVQeQahqnQAhd6IQ==",
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/@astrojs/telemetry/-/telemetry-3.2.1.tgz",
+ "integrity": "sha512-SSVM820Jqc6wjsn7qYfV9qfeQvePtVc1nSofhyap7l0/iakUKywj3hfy3UJAOV4sGV4Q/u450RD4AaCaFvNPlg==",
"license": "MIT",
"dependencies": {
- "ci-info": "^4.1.0",
- "debug": "^4.3.7",
+ "ci-info": "^4.2.0",
+ "debug": "^4.4.0",
"dlv": "^1.1.3",
"dset": "^3.1.4",
"is-docker": "^3.0.0",
@@ -155,9 +208,9 @@
}
},
"node_modules/@astrojs/underscore-redirects": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/@astrojs/underscore-redirects/-/underscore-redirects-0.6.0.tgz",
- "integrity": "sha512-dnJgFpaM955IFNIkEEmMaaIdWXRdeZs1ID6mlGBqdjh6NiCXfKmOdq7L4fd9Dd/tr18fkLrOJ25IUJSxRAEhjQ==",
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/@astrojs/underscore-redirects/-/underscore-redirects-0.6.1.tgz",
+ "integrity": "sha512-4bMLrs2KW+8/vHEE5Ffv2HbxCbbgXO+2N6MpoCsMXUlUoi7pgEEx8kbkzMXJ2dZtWF3gvwm9lvgjnFeanC2LGg==",
"license": "MIT"
},
"node_modules/@babel/helper-string-parser": {
@@ -193,6 +246,17 @@
"node": ">=6.0.0"
}
},
+ "node_modules/@babel/runtime": {
+ "version": "7.27.0",
+ "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.0.tgz",
+ "integrity": "sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==",
+ "dependencies": {
+ "regenerator-runtime": "^0.14.0"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
"node_modules/@babel/types": {
"version": "7.26.10",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.10.tgz",
@@ -206,6 +270,24 @@
"node": ">=6.9.0"
}
},
+ "node_modules/@capsizecss/unpack": {
+ "version": "2.4.0",
+ "resolved": "https://registry.npmjs.org/@capsizecss/unpack/-/unpack-2.4.0.tgz",
+ "integrity": "sha512-GrSU71meACqcmIUxPYOJvGKF0yryjN/L1aCuE9DViCTJI7bfkjgYDPD1zbNDcINJwSSP6UaBZY9GAbYDO7re0Q==",
+ "dependencies": {
+ "blob-to-buffer": "^1.2.8",
+ "cross-fetch": "^3.0.4",
+ "fontkit": "^2.0.2"
+ }
+ },
+ "node_modules/@ctrl/tinycolor": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/@ctrl/tinycolor/-/tinycolor-4.1.0.tgz",
+ "integrity": "sha512-WyOx8cJQ+FQus4Mm4uPIZA64gbk3Wxh0so5Lcii0aJifqwoVOlfFtorjLE0Hen4OYyHZMXDWqMmaQemBhgxFRQ==",
+ "engines": {
+ "node": ">=14"
+ }
+ },
"node_modules/@emnapi/runtime": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.3.1.tgz",
@@ -616,6 +698,47 @@
"node": ">=18"
}
},
+ "node_modules/@expressive-code/core": {
+ "version": "0.41.2",
+ "resolved": "https://registry.npmjs.org/@expressive-code/core/-/core-0.41.2.tgz",
+ "integrity": "sha512-AJW5Tp9czbLqKMzwudL9Rv4js9afXBxkSGLmCNPq1iRgAYcx9NkTPJiSNCesjKRWoVC328AdSu6fqrD22zDgDg==",
+ "dependencies": {
+ "@ctrl/tinycolor": "^4.0.4",
+ "hast-util-select": "^6.0.2",
+ "hast-util-to-html": "^9.0.1",
+ "hast-util-to-text": "^4.0.1",
+ "hastscript": "^9.0.0",
+ "postcss": "^8.4.38",
+ "postcss-nested": "^6.0.1",
+ "unist-util-visit": "^5.0.0",
+ "unist-util-visit-parents": "^6.0.1"
+ }
+ },
+ "node_modules/@expressive-code/plugin-frames": {
+ "version": "0.41.2",
+ "resolved": "https://registry.npmjs.org/@expressive-code/plugin-frames/-/plugin-frames-0.41.2.tgz",
+ "integrity": "sha512-pfy0hkJI4nbaONjmksFDcuHmIuyPTFmi1JpABe4q2ajskiJtfBf+WDAL2pg595R9JNoPrrH5+aT9lbkx2noicw==",
+ "dependencies": {
+ "@expressive-code/core": "^0.41.2"
+ }
+ },
+ "node_modules/@expressive-code/plugin-shiki": {
+ "version": "0.41.2",
+ "resolved": "https://registry.npmjs.org/@expressive-code/plugin-shiki/-/plugin-shiki-0.41.2.tgz",
+ "integrity": "sha512-xD4zwqAkDccXqye+235BH5bN038jYiSMLfUrCOmMlzxPDGWdxJDk5z4uUB/aLfivEF2tXyO2zyaarL3Oqht0fQ==",
+ "dependencies": {
+ "@expressive-code/core": "^0.41.2",
+ "shiki": "^3.2.2"
+ }
+ },
+ "node_modules/@expressive-code/plugin-text-markers": {
+ "version": "0.41.2",
+ "resolved": "https://registry.npmjs.org/@expressive-code/plugin-text-markers/-/plugin-text-markers-0.41.2.tgz",
+ "integrity": "sha512-JFWBz2qYxxJOJkkWf96LpeolbnOqJY95TvwYc0hXIHf9oSWV0h0SY268w/5N3EtQaD9KktzDE+VIVwb9jdb3nw==",
+ "dependencies": {
+ "@expressive-code/core": "^0.41.2"
+ }
+ },
"node_modules/@img/sharp-darwin-arm64": {
"version": "0.33.5",
"resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz",
@@ -1145,6 +1268,71 @@
"integrity": "sha512-70wQhgYmndg4GCPxPPxPGevRKqTIJ2Nh4OkiMWmDAVYsTQ+Ta7Sq+rPevXyXGdzr30/qZBnyOalCszoMxlyldQ==",
"license": "MIT"
},
+ "node_modules/@pagefind/darwin-arm64": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/@pagefind/darwin-arm64/-/darwin-arm64-1.3.0.tgz",
+ "integrity": "sha512-365BEGl6ChOsauRjyVpBjXybflXAOvoMROw3TucAROHIcdBvXk9/2AmEvGFU0r75+vdQI4LJdJdpH4Y6Yqaj4A==",
+ "cpu": [
+ "arm64"
+ ],
+ "optional": true,
+ "os": [
+ "darwin"
+ ]
+ },
+ "node_modules/@pagefind/darwin-x64": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/@pagefind/darwin-x64/-/darwin-x64-1.3.0.tgz",
+ "integrity": "sha512-zlGHA23uuXmS8z3XxEGmbHpWDxXfPZ47QS06tGUq0HDcZjXjXHeLG+cboOy828QIV5FXsm9MjfkP5e4ZNbOkow==",
+ "cpu": [
+ "x64"
+ ],
+ "optional": true,
+ "os": [
+ "darwin"
+ ]
+ },
+ "node_modules/@pagefind/default-ui": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/@pagefind/default-ui/-/default-ui-1.3.0.tgz",
+ "integrity": "sha512-CGKT9ccd3+oRK6STXGgfH+m0DbOKayX6QGlq38TfE1ZfUcPc5+ulTuzDbZUnMo+bubsEOIypm4Pl2iEyzZ1cNg=="
+ },
+ "node_modules/@pagefind/linux-arm64": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/@pagefind/linux-arm64/-/linux-arm64-1.3.0.tgz",
+ "integrity": "sha512-8lsxNAiBRUk72JvetSBXs4WRpYrQrVJXjlRRnOL6UCdBN9Nlsz0t7hWstRk36+JqHpGWOKYiuHLzGYqYAqoOnQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@pagefind/linux-x64": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/@pagefind/linux-x64/-/linux-x64-1.3.0.tgz",
+ "integrity": "sha512-hAvqdPJv7A20Ucb6FQGE6jhjqy+vZ6pf+s2tFMNtMBG+fzcdc91uTw7aP/1Vo5plD0dAOHwdxfkyw0ugal4kcQ==",
+ "cpu": [
+ "x64"
+ ],
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@pagefind/windows-x64": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/@pagefind/windows-x64/-/windows-x64-1.3.0.tgz",
+ "integrity": "sha512-BR1bIRWOMqkf8IoU576YDhij1Wd/Zf2kX/kCI0b2qzCKC8wcc2GQJaaRMCpzvCCrmliO4vtJ6RITp/AnoYUUmQ==",
+ "cpu": [
+ "x64"
+ ],
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
"node_modules/@pkgjs/parseargs": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
@@ -1431,55 +1619,55 @@
]
},
"node_modules/@shikijs/core": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-3.2.1.tgz",
- "integrity": "sha512-FhsdxMWYu/C11sFisEp7FMGBtX/OSSbnXZDMBhGuUDBNTdsoZlMSgQv5f90rwvzWAdWIW6VobD+G3IrazxA6dQ==",
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-3.3.0.tgz",
+ "integrity": "sha512-CovkFL2WVaHk6PCrwv6ctlmD4SS1qtIfN8yEyDXDYWh4ONvomdM9MaFw20qHuqJOcb8/xrkqoWQRJ//X10phOQ==",
"dependencies": {
- "@shikijs/types": "3.2.1",
+ "@shikijs/types": "3.3.0",
"@shikijs/vscode-textmate": "^10.0.2",
"@types/hast": "^3.0.4",
"hast-util-to-html": "^9.0.5"
}
},
"node_modules/@shikijs/engine-javascript": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-3.2.1.tgz",
- "integrity": "sha512-eMdcUzN3FMQYxOmRf2rmU8frikzoSHbQDFH2hIuXsrMO+IBOCI9BeeRkCiBkcLDHeRKbOCtYMJK3D6U32ooU9Q==",
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-3.3.0.tgz",
+ "integrity": "sha512-XlhnFGv0glq7pfsoN0KyBCz9FJU678LZdQ2LqlIdAj6JKsg5xpYKay3DkazXWExp3DTJJK9rMOuGzU2911pg7Q==",
"dependencies": {
- "@shikijs/types": "3.2.1",
+ "@shikijs/types": "3.3.0",
"@shikijs/vscode-textmate": "^10.0.2",
- "oniguruma-to-es": "^4.1.0"
+ "oniguruma-to-es": "^4.2.0"
}
},
"node_modules/@shikijs/engine-oniguruma": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.2.1.tgz",
- "integrity": "sha512-wZZAkayEn6qu2+YjenEoFqj0OyQI64EWsNR6/71d1EkG4sxEOFooowKivsWPpaWNBu3sxAG+zPz5kzBL/SsreQ==",
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.3.0.tgz",
+ "integrity": "sha512-l0vIw+GxeNU7uGnsu6B+Crpeqf+WTQ2Va71cHb5ZYWEVEPdfYwY5kXwYqRJwHrxz9WH+pjSpXQz+TJgAsrkA5A==",
"dependencies": {
- "@shikijs/types": "3.2.1",
+ "@shikijs/types": "3.3.0",
"@shikijs/vscode-textmate": "^10.0.2"
}
},
"node_modules/@shikijs/langs": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.2.1.tgz",
- "integrity": "sha512-If0iDHYRSGbihiA8+7uRsgb1er1Yj11pwpX1c6HLYnizDsKAw5iaT3JXj5ZpaimXSWky/IhxTm7C6nkiYVym+A==",
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.3.0.tgz",
+ "integrity": "sha512-zt6Kf/7XpBQKSI9eqku+arLkAcDQ3NHJO6zFjiChI8w0Oz6Jjjay7pToottjQGjSDCFk++R85643WbyINcuL+g==",
"dependencies": {
- "@shikijs/types": "3.2.1"
+ "@shikijs/types": "3.3.0"
}
},
"node_modules/@shikijs/themes": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.2.1.tgz",
- "integrity": "sha512-k5DKJUT8IldBvAm8WcrDT5+7GA7se6lLksR+2E3SvyqGTyFMzU2F9Gb7rmD+t+Pga1MKrYFxDIeyWjMZWM6uBQ==",
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.3.0.tgz",
+ "integrity": "sha512-tXeCvLXBnqq34B0YZUEaAD1lD4lmN6TOHAhnHacj4Owh7Ptb/rf5XCDeROZt2rEOk5yuka3OOW2zLqClV7/SOg==",
"dependencies": {
- "@shikijs/types": "3.2.1"
+ "@shikijs/types": "3.3.0"
}
},
"node_modules/@shikijs/types": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.2.1.tgz",
- "integrity": "sha512-/NTWAk4KE2M8uac0RhOsIhYQf4pdU0OywQuYDGIGAJ6Mjunxl2cGiuLkvu4HLCMn+OTTLRWkjZITp+aYJv60yA==",
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.3.0.tgz",
+ "integrity": "sha512-KPCGnHG6k06QG/2pnYGbFtFvpVJmC3uIpXrAiPrawETifujPBv0Se2oUxm5qYgjCvGJS9InKvjytOdN+bGuX+Q==",
"dependencies": {
"@shikijs/vscode-textmate": "^10.0.2",
"@types/hast": "^3.0.4"
@@ -1490,6 +1678,14 @@
"resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-10.0.2.tgz",
"integrity": "sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg=="
},
+ "node_modules/@swc/helpers": {
+ "version": "0.5.17",
+ "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.17.tgz",
+ "integrity": "sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A==",
+ "dependencies": {
+ "tslib": "^2.8.0"
+ }
+ },
"node_modules/@tailwindcss/node": {
"version": "4.0.13",
"resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.0.13.tgz",
@@ -1772,6 +1968,11 @@
"@types/unist": "*"
}
},
+ "node_modules/@types/js-yaml": {
+ "version": "4.0.9",
+ "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz",
+ "integrity": "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg=="
+ },
"node_modules/@types/mdast": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
@@ -2051,14 +2252,16 @@
}
},
"node_modules/astro": {
- "version": "5.5.5",
- "resolved": "https://registry.npmjs.org/astro/-/astro-5.5.5.tgz",
- "integrity": "sha512-fdnnK5dhWNIQT/cXzvaGs9il4T5noi4jafobdntbuNOrRxI1JnOxDfrtBadUo6cknCRCFhYrXh4VndCqj1a4Sg==",
+ "version": "5.7.10",
+ "resolved": "https://registry.npmjs.org/astro/-/astro-5.7.10.tgz",
+ "integrity": "sha512-9TQcFZqP2w6//JXXUHfw8/5PX7KUx9EkG5O3m+hISuyeUztvjY1q5+p7+C5HiXyg24Zs3KkpieoL5BGRXGCAGA==",
+ "license": "MIT",
"dependencies": {
"@astrojs/compiler": "^2.11.0",
"@astrojs/internal-helpers": "0.6.1",
"@astrojs/markdown-remark": "6.3.1",
- "@astrojs/telemetry": "3.2.0",
+ "@astrojs/telemetry": "3.2.1",
+ "@capsizecss/unpack": "^2.4.0",
"@oslojs/encoding": "^1.1.0",
"@rollup/pluginutils": "^5.1.4",
"acorn": "^8.14.1",
@@ -2091,26 +2294,27 @@
"neotraverse": "^0.6.18",
"p-limit": "^6.2.0",
"p-queue": "^8.1.0",
- "package-manager-detector": "^1.0.0",
+ "package-manager-detector": "^1.1.0",
"picomatch": "^4.0.2",
"prompts": "^2.4.2",
"rehype": "^13.0.2",
"semver": "^7.7.1",
- "shiki": "^3.0.0",
+ "shiki": "^3.2.1",
"tinyexec": "^0.3.2",
"tinyglobby": "^0.2.12",
"tsconfck": "^3.1.5",
- "ultrahtml": "^1.5.3",
+ "ultrahtml": "^1.6.0",
+ "unifont": "~0.4.1",
"unist-util-visit": "^5.0.0",
"unstorage": "^1.15.0",
"vfile": "^6.0.3",
- "vite": "^6.2.3",
+ "vite": "^6.3.4",
"vitefu": "^1.0.6",
"xxhash-wasm": "^1.1.0",
"yargs-parser": "^21.1.1",
"yocto-spinner": "^0.2.1",
"zod": "^3.24.2",
- "zod-to-json-schema": "^3.24.3",
+ "zod-to-json-schema": "^3.24.5",
"zod-to-ts": "^1.2.0"
},
"bin": {
@@ -2129,6 +2333,17 @@
"sharp": "^0.33.3"
}
},
+ "node_modules/astro-expressive-code": {
+ "version": "0.41.2",
+ "resolved": "https://registry.npmjs.org/astro-expressive-code/-/astro-expressive-code-0.41.2.tgz",
+ "integrity": "sha512-HN0jWTnhr7mIV/2e6uu4PPRNNo/k4UEgTLZqbp3MrHU+caCARveG2yZxaZVBmxyiVdYqW5Pd3u3n2zjnshixbw==",
+ "dependencies": {
+ "rehype-expressive-code": "^0.41.2"
+ },
+ "peerDependencies": {
+ "astro": "^4.0.0-beta || ^5.0.0-beta || ^3.3.0"
+ }
+ },
"node_modules/async-sema": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/async-sema/-/async-sema-3.1.1.tgz",
@@ -2166,6 +2381,48 @@
"integrity": "sha512-kwDPIFCGx0NZHog36dj+tHiwP4QMzsZ3AgMViUBKI0+V5n4U0ufTCUMhnQ04diaRI8EX/QcPfql7zlhZ7j4zgg==",
"license": "MIT"
},
+ "node_modules/base64-js": {
+ "version": "1.5.1",
+ "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
+ "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ]
+ },
+ "node_modules/bcp-47": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/bcp-47/-/bcp-47-2.1.0.tgz",
+ "integrity": "sha512-9IIS3UPrvIa1Ej+lVDdDwO7zLehjqsaByECw0bu2RRGP73jALm6FYbzI5gWbgHLvNdkvfXB5YrSbocZdOS0c0w==",
+ "dependencies": {
+ "is-alphabetical": "^2.0.0",
+ "is-alphanumerical": "^2.0.0",
+ "is-decimal": "^2.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/bcp-47-match": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/bcp-47-match/-/bcp-47-match-2.0.3.tgz",
+ "integrity": "sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/bindings": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz",
@@ -2175,6 +2432,30 @@
"file-uri-to-path": "1.0.0"
}
},
+ "node_modules/blob-to-buffer": {
+ "version": "1.2.9",
+ "resolved": "https://registry.npmjs.org/blob-to-buffer/-/blob-to-buffer-1.2.9.tgz",
+ "integrity": "sha512-BF033y5fN6OCofD3vgHmNtwZWRcq9NLyyxyILx9hfMy1sXYy4ojFl765hJ2lP0YaN2fuxPaLO2Vzzoxy0FLFFA==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ]
+ },
+ "node_modules/boolbase": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
+ "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww=="
+ },
"node_modules/boxen": {
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/boxen/-/boxen-8.0.1.tgz",
@@ -2206,6 +2487,14 @@
"balanced-match": "^1.0.0"
}
},
+ "node_modules/brotli": {
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/brotli/-/brotli-1.3.3.tgz",
+ "integrity": "sha512-oTKjJdShmDuGW94SyyaoQvAjf30dZaHnjJ8uAF+u2/vGJkJbJPJAT1gDiOJP5v1Zb6f9KEyW/1HpuaWIXtGHPg==",
+ "dependencies": {
+ "base64-js": "^1.1.2"
+ }
+ },
"node_modules/camelcase": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-8.0.0.tgz",
@@ -2316,6 +2605,14 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/clone": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz",
+ "integrity": "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==",
+ "engines": {
+ "node": ">=0.8"
+ }
+ },
"node_modules/clsx": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz",
@@ -2417,6 +2714,14 @@
"integrity": "sha512-+W7VmiVINB+ywl1HGXJXmrqkOhpKrIiVZV6tQuV54ZyQC7MMuBt81Vc336GMLoHBq5hV/F9eXgt5Mnx0Rha5Fg==",
"license": "MIT"
},
+ "node_modules/cross-fetch": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.2.0.tgz",
+ "integrity": "sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q==",
+ "dependencies": {
+ "node-fetch": "^2.7.0"
+ }
+ },
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@@ -2440,6 +2745,34 @@
"uncrypto": "^0.1.3"
}
},
+ "node_modules/css-selector-parser": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/css-selector-parser/-/css-selector-parser-3.1.2.tgz",
+ "integrity": "sha512-WfUcL99xWDs7b3eZPoRszWVfbNo8ErCF15PTvVROjkShGlAfjIkG6hlfj/sl6/rfo5Q9x9ryJ3VqVnAZDA+gcw==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/mdevils"
+ },
+ {
+ "type": "patreon",
+ "url": "https://patreon.com/mdevils"
+ }
+ ]
+ },
+ "node_modules/css-tree": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-3.1.0.tgz",
+ "integrity": "sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w==",
+ "license": "MIT",
+ "dependencies": {
+ "mdn-data": "2.12.2",
+ "source-map-js": "^1.0.1"
+ },
+ "engines": {
+ "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0"
+ }
+ },
"node_modules/cssesc": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz",
@@ -2553,6 +2886,11 @@
"url": "https://github.com/sponsors/wooorm"
}
},
+ "node_modules/dfa": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/dfa/-/dfa-1.2.0.tgz",
+ "integrity": "sha512-ED3jP8saaweFTjeGX8HQPjeC1YYyZs98jGNZx6IiBvxW7JG5v492kamAQB3m2wop07CvU/RQmzcKr6bgcC5D/Q=="
+ },
"node_modules/diff": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz",
@@ -2562,6 +2900,18 @@
"node": ">=0.3.1"
}
},
+ "node_modules/direction": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/direction/-/direction-2.0.1.tgz",
+ "integrity": "sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA==",
+ "bin": {
+ "direction": "cli.js"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/dlv": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz",
@@ -2806,12 +3156,28 @@
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==",
"license": "MIT"
},
+ "node_modules/expressive-code": {
+ "version": "0.41.2",
+ "resolved": "https://registry.npmjs.org/expressive-code/-/expressive-code-0.41.2.tgz",
+ "integrity": "sha512-aLZiZaqorRtNExtGpUjK9zFH9aTpWeoTXMyLo4b4IcuXfPqtLPPxhRm/QlPb8QqIcMMXnSiGRHSFpQfX0m7HJw==",
+ "dependencies": {
+ "@expressive-code/core": "^0.41.2",
+ "@expressive-code/plugin-frames": "^0.41.2",
+ "@expressive-code/plugin-shiki": "^0.41.2",
+ "@expressive-code/plugin-text-markers": "^0.41.2"
+ }
+ },
"node_modules/extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==",
"license": "MIT"
},
+ "node_modules/fast-deep-equal": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
+ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
+ },
"node_modules/fast-xml-parser": {
"version": "4.5.3",
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz",
@@ -2831,9 +3197,9 @@
}
},
"node_modules/fdir": {
- "version": "6.4.3",
- "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.3.tgz",
- "integrity": "sha512-PMXmW2y1hDDfTSRc9gaXIuCCRpuoz3Kaz8cUelp3smouvfT632ozg2vrT6lJsHKKOF59YLbOGfAWGUcKEfRMQw==",
+ "version": "6.4.4",
+ "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
+ "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
"license": "MIT",
"peerDependencies": {
"picomatch": "^3 || ^4"
@@ -2859,6 +3225,22 @@
"node": ">=8"
}
},
+ "node_modules/fontkit": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/fontkit/-/fontkit-2.0.4.tgz",
+ "integrity": "sha512-syetQadaUEDNdxdugga9CpEYVaQIxOwk7GlwZWWZ19//qW4zE5bknOKeMBDYAASwnpaSHKJITRLMF9m1fp3s6g==",
+ "dependencies": {
+ "@swc/helpers": "^0.5.12",
+ "brotli": "^1.3.2",
+ "clone": "^2.1.2",
+ "dfa": "^1.2.0",
+ "fast-deep-equal": "^3.1.3",
+ "restructure": "^3.0.0",
+ "tiny-inflate": "^1.0.3",
+ "unicode-properties": "^1.4.0",
+ "unicode-trie": "^2.0.0"
+ }
+ },
"node_modules/foreground-child": {
"version": "3.3.1",
"resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
@@ -2949,6 +3331,37 @@
"uncrypto": "^0.1.3"
}
},
+ "node_modules/hast-util-embedded": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/hast-util-embedded/-/hast-util-embedded-3.0.0.tgz",
+ "integrity": "sha512-naH8sld4Pe2ep03qqULEtvYr7EjrLK2QHY8KJR6RJkTUjPGObe1vnx585uzem2hGra+s1q08DZZpfgDVYRbaXA==",
+ "dependencies": {
+ "@types/hast": "^3.0.0",
+ "hast-util-is-element": "^3.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/hast-util-format": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/hast-util-format/-/hast-util-format-1.1.0.tgz",
+ "integrity": "sha512-yY1UDz6bC9rDvCWHpx12aIBGRG7krurX0p0Fm6pT547LwDIZZiNr8a+IHDogorAdreULSEzP82Nlv5SZkHZcjA==",
+ "dependencies": {
+ "@types/hast": "^3.0.0",
+ "hast-util-embedded": "^3.0.0",
+ "hast-util-minify-whitespace": "^1.0.0",
+ "hast-util-phrasing": "^3.0.0",
+ "hast-util-whitespace": "^3.0.0",
+ "html-whitespace-sensitive-tag-names": "^3.0.0",
+ "unist-util-visit-parents": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/hast-util-from-html": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/hast-util-from-html/-/hast-util-from-html-2.0.3.tgz",
@@ -2987,6 +3400,30 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/hast-util-has-property": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/hast-util-has-property/-/hast-util-has-property-3.0.0.tgz",
+ "integrity": "sha512-MNilsvEKLFpV604hwfhVStK0usFY/QmM5zX16bo7EjnAEGofr5YyI37kzopBlZJkHD4t887i+q/C8/tr5Q94cA==",
+ "dependencies": {
+ "@types/hast": "^3.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/hast-util-is-body-ok-link": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/hast-util-is-body-ok-link/-/hast-util-is-body-ok-link-3.0.1.tgz",
+ "integrity": "sha512-0qpnzOBLztXHbHQenVB8uNuxTnm/QBFUOmdOSsEn7GnBtyY07+ENTWVFBAnXd/zEgd9/SUG3lRY7hSIBWRgGpQ==",
+ "dependencies": {
+ "@types/hast": "^3.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/hast-util-is-element": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-3.0.0.tgz",
@@ -2999,6 +3436,22 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/hast-util-minify-whitespace": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/hast-util-minify-whitespace/-/hast-util-minify-whitespace-1.0.1.tgz",
+ "integrity": "sha512-L96fPOVpnclQE0xzdWb/D12VT5FabA7SnZOUMtL1DbXmYiHJMXZvFkIZfiMmTCNJHUeO2K9UYNXoVyfz+QHuOw==",
+ "dependencies": {
+ "@types/hast": "^3.0.0",
+ "hast-util-embedded": "^3.0.0",
+ "hast-util-is-element": "^3.0.0",
+ "hast-util-whitespace": "^3.0.0",
+ "unist-util-is": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/hast-util-parse-selector": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz",
@@ -3012,6 +3465,22 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/hast-util-phrasing": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/hast-util-phrasing/-/hast-util-phrasing-3.0.1.tgz",
+ "integrity": "sha512-6h60VfI3uBQUxHqTyMymMZnEbNl1XmEGtOxxKYL7stY2o601COo62AWAYBQR9lZbYXYSBoxag8UpPRXK+9fqSQ==",
+ "dependencies": {
+ "@types/hast": "^3.0.0",
+ "hast-util-embedded": "^3.0.0",
+ "hast-util-has-property": "^3.0.0",
+ "hast-util-is-body-ok-link": "^3.0.0",
+ "hast-util-is-element": "^3.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/hast-util-raw": {
"version": "9.1.0",
"resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-9.1.0.tgz",
@@ -3037,6 +3506,32 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/hast-util-select": {
+ "version": "6.0.4",
+ "resolved": "https://registry.npmjs.org/hast-util-select/-/hast-util-select-6.0.4.tgz",
+ "integrity": "sha512-RqGS1ZgI0MwxLaKLDxjprynNzINEkRHY2i8ln4DDjgv9ZhcYVIHN9rlpiYsqtFwrgpYU361SyWDQcGNIBVu3lw==",
+ "dependencies": {
+ "@types/hast": "^3.0.0",
+ "@types/unist": "^3.0.0",
+ "bcp-47-match": "^2.0.0",
+ "comma-separated-tokens": "^2.0.0",
+ "css-selector-parser": "^3.0.0",
+ "devlop": "^1.0.0",
+ "direction": "^2.0.0",
+ "hast-util-has-property": "^3.0.0",
+ "hast-util-to-string": "^3.0.0",
+ "hast-util-whitespace": "^3.0.0",
+ "nth-check": "^2.0.0",
+ "property-information": "^7.0.0",
+ "space-separated-tokens": "^2.0.0",
+ "unist-util-visit": "^5.0.0",
+ "zwitch": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/hast-util-to-estree": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/hast-util-to-estree/-/hast-util-to-estree-3.1.3.tgz",
@@ -3144,6 +3639,18 @@
"url": "https://github.com/sponsors/wooorm"
}
},
+ "node_modules/hast-util-to-string": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/hast-util-to-string/-/hast-util-to-string-3.0.1.tgz",
+ "integrity": "sha512-XelQVTDWvqcl3axRfI0xSeoVKzyIFPwsAGSLIsKdJKQMXDYJS4WYrBNF/8J7RdhIcFI2BOHgAifggsvsxp/3+A==",
+ "dependencies": {
+ "@types/hast": "^3.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/hast-util-to-text": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/hast-util-to-text/-/hast-util-to-text-4.0.2.tgz",
@@ -3205,6 +3712,15 @@
"url": "https://github.com/sponsors/wooorm"
}
},
+ "node_modules/html-whitespace-sensitive-tag-names": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/html-whitespace-sensitive-tag-names/-/html-whitespace-sensitive-tag-names-3.0.1.tgz",
+ "integrity": "sha512-q+310vW8zmymYHALr1da4HyXUQ0zgiIwIicEfotYPWGN0OJVEN/58IJ3A4GBYcEq3LGAZqKb+ugvP0GNB9CEAA==",
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/http-cache-semantics": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz",
@@ -3224,6 +3740,28 @@
"node": ">= 14"
}
},
+ "node_modules/i18next": {
+ "version": "23.16.8",
+ "resolved": "https://registry.npmjs.org/i18next/-/i18next-23.16.8.tgz",
+ "integrity": "sha512-06r/TitrM88Mg5FdUXAKL96dJMzgqLE5dv3ryBAra4KCwD9mJ4ndOTS95ZuymIGoE+2hzfdaMak2X11/es7ZWg==",
+ "funding": [
+ {
+ "type": "individual",
+ "url": "https://locize.com"
+ },
+ {
+ "type": "individual",
+ "url": "https://locize.com/i18next.html"
+ },
+ {
+ "type": "individual",
+ "url": "https://www.i18next.com/how-to/faq#i18next-is-awesome.-how-can-i-support-the-project"
+ }
+ ],
+ "dependencies": {
+ "@babel/runtime": "^7.23.2"
+ }
+ },
"node_modules/import-meta-resolve": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/import-meta-resolve/-/import-meta-resolve-4.1.0.tgz",
@@ -3418,6 +3956,14 @@
"node": ">=6"
}
},
+ "node_modules/klona": {
+ "version": "2.0.6",
+ "resolved": "https://registry.npmjs.org/klona/-/klona-2.0.6.tgz",
+ "integrity": "sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA==",
+ "engines": {
+ "node": ">= 8"
+ }
+ },
"node_modules/lightningcss": {
"version": "1.29.2",
"resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.29.2.tgz",
@@ -3739,6 +4285,26 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/mdast-util-directive": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-directive/-/mdast-util-directive-3.1.0.tgz",
+ "integrity": "sha512-I3fNFt+DHmpWCYAT7quoM6lHf9wuqtI+oCOfvILnoicNIqjh5E3dEJWiXuYME2gNe8vl1iMQwyUHa7bgFmak6Q==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "@types/unist": "^3.0.0",
+ "ccount": "^2.0.0",
+ "devlop": "^1.0.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0",
+ "parse-entities": "^4.0.0",
+ "stringify-entities": "^4.0.0",
+ "unist-util-visit-parents": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/mdast-util-find-and-replace": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz",
@@ -4026,6 +4592,12 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/mdn-data": {
+ "version": "2.12.2",
+ "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz",
+ "integrity": "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==",
+ "license": "CC0-1.0"
+ },
"node_modules/micromark": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz",
@@ -4095,6 +4667,24 @@
"micromark-util-types": "^2.0.0"
}
},
+ "node_modules/micromark-extension-directive": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/micromark-extension-directive/-/micromark-extension-directive-3.0.2.tgz",
+ "integrity": "sha512-wjcXHgk+PPdmvR58Le9d7zQYWy+vKEU9Se44p2CrCDPiLr2FMyiT4Fyb5UFKFC66wGB3kPlgD7q3TnoqPS7SZA==",
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-factory-whitespace": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0",
+ "parse-entities": "^4.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/micromark-extension-gfm": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz",
@@ -4919,6 +5509,17 @@
"node": ">=0.10.0"
}
},
+ "node_modules/nth-check": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz",
+ "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
+ "dependencies": {
+ "boolbase": "^1.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/fb55/nth-check?sponsor=1"
+ }
+ },
"node_modules/ofetch": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/ofetch/-/ofetch-1.4.1.tgz",
@@ -4930,18 +5531,24 @@
"ufo": "^1.5.4"
}
},
+ "node_modules/ohash": {
+ "version": "2.0.11",
+ "resolved": "https://registry.npmjs.org/ohash/-/ohash-2.0.11.tgz",
+ "integrity": "sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==",
+ "license": "MIT"
+ },
"node_modules/oniguruma-parser": {
- "version": "0.5.4",
- "resolved": "https://registry.npmjs.org/oniguruma-parser/-/oniguruma-parser-0.5.4.tgz",
- "integrity": "sha512-yNxcQ8sKvURiTwP0mV6bLQCYE7NKfKRRWunhbZnXgxSmB1OXa1lHrN3o4DZd+0Si0kU5blidK7BcROO8qv5TZA=="
+ "version": "0.11.2",
+ "resolved": "https://registry.npmjs.org/oniguruma-parser/-/oniguruma-parser-0.11.2.tgz",
+ "integrity": "sha512-F7Ld4oDZJCI5/wCZ8AOffQbqjSzIRpKH7I/iuSs1SkhZeCj0wS6PMZ4W6VA16TWHrAo0Y9bBKEJOe7tvwcTXnw=="
},
"node_modules/oniguruma-to-es": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-4.1.0.tgz",
- "integrity": "sha512-SNwG909cSLo4vPyyPbU/VJkEc9WOXqu2ycBlfd1UCXLqk1IijcQktSBb2yRQ2UFPsDhpkaf+C1dtT3PkLK/yWA==",
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-4.2.0.tgz",
+ "integrity": "sha512-MDPs6KSOLS0tKQ7joqg44dRIRZUyotfTy0r+7oEEs6VwWWP0+E2PPDYWMFN0aqOjRyWHBYq7RfKw9GQk2S2z5g==",
"dependencies": {
"emoji-regex-xs": "^1.0.0",
- "oniguruma-parser": "^0.5.4",
+ "oniguruma-parser": "^0.11.0",
"regex": "^6.0.1",
"regex-recursion": "^6.0.2"
}
@@ -4996,10 +5603,29 @@
"license": "BlueOak-1.0.0"
},
"node_modules/package-manager-detector": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/package-manager-detector/-/package-manager-detector-1.0.0.tgz",
- "integrity": "sha512-7elnH+9zMsRo7aS72w6MeRugTpdRvInmEB4Kmm9BVvPw/SLG8gXUGQ+4wF0Mys0RSWPz0B9nuBbDe8vFeA2sfg==",
- "license": "MIT"
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/package-manager-detector/-/package-manager-detector-1.2.0.tgz",
+ "integrity": "sha512-PutJepsOtsqVfUsxCzgTTpyXmiAgvKptIgY4th5eq5UXXFhj5PxfQ9hnGkypMeovpAvVshFRItoFHYO18TCOqA=="
+ },
+ "node_modules/pagefind": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/pagefind/-/pagefind-1.3.0.tgz",
+ "integrity": "sha512-8KPLGT5g9s+olKMRTU9LFekLizkVIu9tes90O1/aigJ0T5LmyPqTzGJrETnSw3meSYg58YH7JTzhTTW/3z6VAw==",
+ "bin": {
+ "pagefind": "lib/runner/bin.cjs"
+ },
+ "optionalDependencies": {
+ "@pagefind/darwin-arm64": "1.3.0",
+ "@pagefind/darwin-x64": "1.3.0",
+ "@pagefind/linux-arm64": "1.3.0",
+ "@pagefind/linux-x64": "1.3.0",
+ "@pagefind/windows-x64": "1.3.0"
+ }
+ },
+ "node_modules/pako": {
+ "version": "0.2.9",
+ "resolved": "https://registry.npmjs.org/pako/-/pako-0.2.9.tgz",
+ "integrity": "sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA=="
},
"node_modules/parse-entities": {
"version": "4.0.2",
@@ -5127,6 +5753,42 @@
"node": "^10 || ^12 || >=14"
}
},
+ "node_modules/postcss-nested": {
+ "version": "6.2.0",
+ "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz",
+ "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==",
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/postcss/"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "dependencies": {
+ "postcss-selector-parser": "^6.1.1"
+ },
+ "engines": {
+ "node": ">=12.0"
+ },
+ "peerDependencies": {
+ "postcss": "^8.2.14"
+ }
+ },
+ "node_modules/postcss-nested/node_modules/postcss-selector-parser": {
+ "version": "6.1.2",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz",
+ "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==",
+ "dependencies": {
+ "cssesc": "^3.0.0",
+ "util-deprecate": "^1.0.2"
+ },
+ "engines": {
+ "node": ">=4"
+ }
+ },
"node_modules/postcss-selector-parser": {
"version": "6.0.10",
"resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz",
@@ -5251,6 +5913,11 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/regenerator-runtime": {
+ "version": "0.14.1",
+ "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz",
+ "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw=="
+ },
"node_modules/regex": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/regex/-/regex-6.0.1.tgz",
@@ -5288,6 +5955,27 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/rehype-expressive-code": {
+ "version": "0.41.2",
+ "resolved": "https://registry.npmjs.org/rehype-expressive-code/-/rehype-expressive-code-0.41.2.tgz",
+ "integrity": "sha512-vHYfWO9WxAw6kHHctddOt+P4266BtyT1mrOIuxJD+1ELuvuJAa5uBIhYt0OVMyOhlvf57hzWOXJkHnMhpaHyxw==",
+ "dependencies": {
+ "expressive-code": "^0.41.2"
+ }
+ },
+ "node_modules/rehype-format": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/rehype-format/-/rehype-format-5.0.1.tgz",
+ "integrity": "sha512-zvmVru9uB0josBVpr946OR8ui7nJEdzZobwLOOqHb/OOD88W0Vk2SqLwoVOj0fM6IPCCO6TaV9CvQvJMWwukFQ==",
+ "dependencies": {
+ "@types/hast": "^3.0.0",
+ "hast-util-format": "^1.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/rehype-parse": {
"version": "9.0.1",
"resolved": "https://registry.npmjs.org/rehype-parse/-/rehype-parse-9.0.1.tgz",
@@ -5348,6 +6036,21 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/remark-directive": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/remark-directive/-/remark-directive-3.0.1.tgz",
+ "integrity": "sha512-gwglrEQEZcZYgVyG1tQuA+h58EZfq5CSULw7J90AFuCTyib1thgHPoqQ+h9iFvU6R+vnZ5oNFQR5QKgGpk741A==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "mdast-util-directive": "^3.0.0",
+ "micromark-extension-directive": "^3.0.0",
+ "unified": "^11.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/remark-gfm": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.1.tgz",
@@ -5452,6 +6155,11 @@
"node": ">=8"
}
},
+ "node_modules/restructure": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/restructure/-/restructure-3.0.2.tgz",
+ "integrity": "sha512-gSfoiOEA0VPE6Tukkrr7I0RBdE0s7H1eFCDBk05l1KIQT1UIKNc5JZy6jdyW6eYH3aR3g5b3PuL77rq0hvwtAw=="
+ },
"node_modules/retext": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/retext/-/retext-9.0.0.tgz",
@@ -5646,16 +6354,16 @@
}
},
"node_modules/shiki": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/shiki/-/shiki-3.2.1.tgz",
- "integrity": "sha512-VML/2o1/KGYkEf/stJJ+s9Ypn7jUKQPomGLGYso4JJFMFxVDyPNsjsI3MB3KLjlMOeH44gyaPdXC6rik2WXvUQ==",
- "dependencies": {
- "@shikijs/core": "3.2.1",
- "@shikijs/engine-javascript": "3.2.1",
- "@shikijs/engine-oniguruma": "3.2.1",
- "@shikijs/langs": "3.2.1",
- "@shikijs/themes": "3.2.1",
- "@shikijs/types": "3.2.1",
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/shiki/-/shiki-3.3.0.tgz",
+ "integrity": "sha512-j0Z1tG5vlOFGW8JVj0Cpuatzvshes7VJy5ncDmmMaYcmnGW0Js1N81TOW98ivTFNZfKRn9uwEg/aIm638o368g==",
+ "dependencies": {
+ "@shikijs/core": "3.3.0",
+ "@shikijs/engine-javascript": "3.3.0",
+ "@shikijs/engine-oniguruma": "3.3.0",
+ "@shikijs/langs": "3.3.0",
+ "@shikijs/themes": "3.3.0",
+ "@shikijs/types": "3.3.0",
"@shikijs/vscode-textmate": "^10.0.2",
"@types/hast": "^3.0.4"
}
@@ -5930,6 +6638,11 @@
"node": ">=18"
}
},
+ "node_modules/tiny-inflate": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/tiny-inflate/-/tiny-inflate-1.0.3.tgz",
+ "integrity": "sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw=="
+ },
"node_modules/tinyexec": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz",
@@ -5937,12 +6650,12 @@
"license": "MIT"
},
"node_modules/tinyglobby": {
- "version": "0.2.12",
- "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.12.tgz",
- "integrity": "sha512-qkf4trmKSIiMTs/E63cxH+ojC2unam7rJ0WrauAzpT3ECNTxGRMlaXxVbfxMUC/w0LaYk6jQ4y/nGR9uBO3tww==",
+ "version": "0.2.13",
+ "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz",
+ "integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==",
"license": "MIT",
"dependencies": {
- "fdir": "^6.4.3",
+ "fdir": "^6.4.4",
"picomatch": "^4.0.2"
},
"engines": {
@@ -6002,8 +6715,7 @@
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
- "license": "0BSD",
- "optional": true
+ "license": "0BSD"
},
"node_modules/type-fest": {
"version": "4.37.0",
@@ -6038,10 +6750,9 @@
"license": "MIT"
},
"node_modules/ultrahtml": {
- "version": "1.5.3",
- "resolved": "https://registry.npmjs.org/ultrahtml/-/ultrahtml-1.5.3.tgz",
- "integrity": "sha512-GykOvZwgDWZlTQMtp5jrD4BVL+gNn2NVlVafjcFUJ7taY20tqYdwdoWBFy6GBJsNTZe1GkGPkSl5knQAjtgceg==",
- "license": "MIT"
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/ultrahtml/-/ultrahtml-1.6.0.tgz",
+ "integrity": "sha512-R9fBn90VTJrqqLDwyMph+HGne8eqY1iPfYhPzZrvKpIfwkWZbcYlfpsb8B9dTvBfpy1/hqAD7Wi8EKfP9e8zdw=="
},
"node_modules/uncrypto": {
"version": "0.1.3",
@@ -6055,6 +6766,24 @@
"integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==",
"license": "MIT"
},
+ "node_modules/unicode-properties": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/unicode-properties/-/unicode-properties-1.4.1.tgz",
+ "integrity": "sha512-CLjCCLQ6UuMxWnbIylkisbRj31qxHPAurvena/0iwSVbQ2G1VY5/HjV0IRabOEbDHlzZlRdCrD4NhB0JtU40Pg==",
+ "dependencies": {
+ "base64-js": "^1.3.0",
+ "unicode-trie": "^2.0.0"
+ }
+ },
+ "node_modules/unicode-trie": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/unicode-trie/-/unicode-trie-2.0.0.tgz",
+ "integrity": "sha512-x7bc76x0bm4prf1VLg79uhAzKw8DVboClSN5VxJuQ+LKDOVEW9CdH+VY7SP+vX7xCYQqzzgQpFqz15zeLvAtZQ==",
+ "dependencies": {
+ "pako": "^0.2.5",
+ "tiny-inflate": "^1.0.0"
+ }
+ },
"node_modules/unified": {
"version": "11.0.5",
"resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
@@ -6074,6 +6803,16 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/unifont": {
+ "version": "0.4.1",
+ "resolved": "https://registry.npmjs.org/unifont/-/unifont-0.4.1.tgz",
+ "integrity": "sha512-zKSY9qO8svWYns+FGKjyVdLvpGPwqmsCjeJLN1xndMiqxHWBAhoWDMYMG960MxeV48clBmG+fDP59dHY1VoZvg==",
+ "license": "MIT",
+ "dependencies": {
+ "css-tree": "^3.0.0",
+ "ohash": "^2.0.0"
+ }
+ },
"node_modules/unist-util-find-after": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/unist-util-find-after/-/unist-util-find-after-5.0.0.tgz",
@@ -6332,7 +7071,6 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
- "dev": true,
"license": "MIT"
},
"node_modules/vfile": {
@@ -6378,13 +7116,17 @@
}
},
"node_modules/vite": {
- "version": "6.2.3",
- "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.3.tgz",
- "integrity": "sha512-IzwM54g4y9JA/xAeBPNaDXiBF8Jsgl3VBQ2YQ/wOY6fyW3xMdSoltIV3Bo59DErdqdE6RxUfv8W69DvUorE4Eg==",
+ "version": "6.3.4",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.4.tgz",
+ "integrity": "sha512-BiReIiMS2fyFqbqNT/Qqt4CVITDU9M9vE+DKcVAsB+ZV0wvTKd+3hMbkpxz1b+NmEDMegpVbisKiAZOnvO92Sw==",
+ "license": "MIT",
"dependencies": {
"esbuild": "^0.25.0",
+ "fdir": "^6.4.4",
+ "picomatch": "^4.0.2",
"postcss": "^8.5.3",
- "rollup": "^4.30.1"
+ "rollup": "^4.34.9",
+ "tinyglobby": "^0.2.13"
},
"bin": {
"vite": "bin/vite.js"
@@ -6708,10 +7450,9 @@
}
},
"node_modules/zod-to-json-schema": {
- "version": "3.24.3",
- "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.3.tgz",
- "integrity": "sha512-HIAfWdYIt1sssHfYZFCXp4rU1w2r8hVVXYIlmoa0r0gABLs5di3RCqPU5DDROogVz1pAdYBaz7HK5n9pSUNs3A==",
- "license": "ISC",
+ "version": "3.24.5",
+ "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz",
+ "integrity": "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==",
"peerDependencies": {
"zod": "^3.24.1"
}
diff --git a/package.json b/package.json
index bb1e002..d1c9f0c 100644
--- a/package.json
+++ b/package.json
@@ -9,12 +9,14 @@
"astro": "astro"
},
"dependencies": {
- "@astrojs/mdx": "^4.2.2",
- "@astrojs/netlify": "^6.2.4",
+ "@astrojs/mdx": "^4.2.6",
+ "@astrojs/netlify": "^6.3.2",
"@astrojs/rss": "^4.0.11",
- "@astrojs/sitemap": "^3.3.0",
+ "@astrojs/sitemap": "^3.3.1",
+ "@astrojs/starlight": "^0.34.2",
+ "@astrojs/starlight-tailwind": "^4.0.1",
"@tailwindcss/vite": "^4.0.13",
- "astro": "^5.5.5",
+ "astro": "^5.7.10",
"tailwindcss": "^4.0.13"
},
"devDependencies": {
diff --git a/public/versions.json b/public/versions.json
new file mode 100644
index 0000000..6fb4107
--- /dev/null
+++ b/public/versions.json
@@ -0,0 +1,10 @@
+[
+ {
+ "label": "current",
+ "url": "https://www.serverlessworkflow.io"
+ },
+ {
+ "label": "v0.9",
+ "url": "https://0-9.serverlessworkflow.io"
+ }
+]
\ No newline at end of file
diff --git a/public/webcomponents/version-select/version-select.js b/public/webcomponents/version-select/version-select.js
new file mode 100644
index 0000000..5b1edfe
--- /dev/null
+++ b/public/webcomponents/version-select/version-select.js
@@ -0,0 +1,179 @@
+class VersionSelect extends HTMLElement {
+ constructor() {
+ super();
+ this.attachShadow({ mode: 'open' });
+ }
+
+ connectedCallback() {
+ this.#render();
+ fetch('https://serverlessworkflow.io/versions.json')
+ .catch(_ => fetch('/versions.json'))
+ .then(res => res.json())
+ .then(versions => this.#render(versions))
+ .catch(err => console.error(`Unable to fetch versions: ${err}`));
+ }
+
+ #createOptionElement(version) {
+ const option = document.createElement('option');
+ option.value = version.url;
+ option.textContent = version.label;
+ option.selected = version.selected != null ? version.selected : false;
+ option.disabled = version.disabled != null ? version.disabled : false;
+ return option;
+ }
+
+ #render(versions) {
+ const shadow = this.shadowRoot;
+ while (shadow.firstChild) {
+ shadow.removeChild(shadow.firstChild);
+ }
+ const select = document.createElement('select');
+ if (!versions) {
+ select.appendChild(this.#createOptionElement({
+ label: 'loading...',
+ value: '',
+ selected: true,
+ disabled: true,
+ }));
+ select.disabled = true;
+ }
+ else {
+ versions.forEach(version => select.appendChild(this.#createOptionElement({
+ ...version,
+ selected: version.url.startsWith(window.location.origin)
+ })));
+ select.addEventListener('change', e => {
+ const url = e.target.value;
+ if (!url.startsWith(window.location.origin)) {
+ window.location.assign(
+ window.location.pathname
+ ? url + window.location.pathname + window.location.hash
+ : url,
+ );
+ }
+ });
+ }
+ const containerLabel = document.createElement('label');
+ const label = select.options[select.selectedIndex].text;
+ const span = document.createElement('span');
+ span.textContent = label;
+ span.classList.add('sr-only');
+ containerLabel.appendChild(span);
+ this.#appendIcon(
+ containerLabel,
+ `M11.328 0.198L11.328 0.198L12.924 0.198Q13.260 0.240 13.848 0.303Q14.436 0.366 14.772 0.450L14.772 0.450Q17.124 1.038 19.098 2.424L19.098 2.424Q20.694 3.642 21.576 5.028L21.576 5.028Q23.298 7.296 23.676 10.278L23.676 10.278Q23.676 10.404 23.739 10.698Q23.802 10.992 23.802 11.202L23.802 11.202L23.802 12.924Q23.760 13.176 23.697 13.659Q23.634 14.142 23.550 14.352L23.550 14.352Q22.920 16.914 21.597 18.804Q20.274 20.694 18.048 22.122L18.048 22.122Q16.788 22.794 15.822 23.130L15.822 23.130Q14.604 23.592 13.428 23.676L13.428 23.676Q13.302 23.676 13.050 23.739Q12.798 23.802 12.672 23.802L12.672 23.802L10.950 23.802Q10.698 23.760 10.215 23.697Q9.732 23.634 9.522 23.550L9.522 23.550Q6.960 22.920 5.070 21.597Q3.180 20.274 1.752 18.048L1.752 18.048Q1.080 16.788 0.744 15.822L0.744 15.822Q0.282 14.604 0.198 13.428L0.198 13.428Q0.282 13.344 0.240 13.071Q0.198 12.798 0.198 12.672L0.198 12.672L0.198 11.076Q0.240 10.740 0.303 10.152Q0.366 9.564 0.450 9.228L0.450 9.228Q1.038 6.876 2.424 4.902L2.424 4.902Q3.642 3.138 5.952 1.752L5.952 1.752Q8.052 0.492 10.278 0.324L10.278 0.324Q10.488 0.324 10.824 0.261Q11.160 0.198 11.328 0.198ZM2.172 12.252L2.172 12.252Q2.214 14.898 3.579 17.124Q4.944 19.350 7.212 20.673Q9.480 21.996 12.168 21.975Q14.856 21.954 17.145 20.589Q19.434 19.224 20.736 16.872Q22.038 14.520 21.996 11.748L21.996 11.748Q21.912 9.144 20.526 6.918Q19.140 4.692 16.872 3.432L16.872 3.432Q14.478 2.088 11.748 2.172L11.748 2.172Q9.144 2.214 6.918 3.558Q4.692 4.902 3.432 7.170L3.432 7.170Q2.088 9.522 2.172 12.252ZM12.546 11.958L12.546 12Q12.672 11.874 12.987 11.685Q13.302 11.496 13.428 11.328L13.428 11.328Q15.822 9.774 16.872 8.976L16.872 8.976L16.998 8.892Q17.124 8.808 17.124 8.724L17.124 8.724Q17.292 8.514 17.565 8.577Q17.838 8.640 18.027 8.829Q18.216 9.018 18.174 9.291Q18.132 9.564 17.922 9.774L17.922 9.774L15.780 11.244Q14.352 12.210 13.722 12.798L13.722 12.798L12.252 13.848Q11.958 14.016 11.643 13.890Q11.328 13.764 11.328 13.428L11.328 13.428L11.328 4.902Q11.328 4.272 11.874 4.272L11.874 4.272Q12.084 4.146 12.315 4.314Q12.546 4.482 12.546 4.776L12.546 4.776L12.546 12L12.546 11.958Z`,
+ 'label-icon'
+ );
+ containerLabel.appendChild(select);
+ this.#appendIcon(
+ containerLabel,
+ `M17 9.17a1 1 0 0 0-1.41 0L12 12.71 8.46 9.17a1 1 0 1 0-1.41 1.42l4.24 4.24a1.002 1.002 0 0 0 1.42 0L17 10.59a1.002 1.002 0 0 0 0-1.42Z`,
+ 'caret'
+ );
+ this.#appendStyle(shadow);
+ shadow.appendChild(containerLabel);
+ return;
+ }
+
+ #appendIcon(container, definition, cssClass) {
+ const icon = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
+ icon.classList.add('icon');
+ icon.classList.add(cssClass);
+ icon.setAttribute('width', 16);
+ icon.setAttribute('height', 16);
+ icon.setAttribute('viewBox', '0 0 24 24');
+ icon.setAttribute('fill', 'currentColor');
+ const path = document.createElementNS('http://www.w3.org/2000/svg', 'path');
+ path.d = definition;
+ path.setAttribute('d', definition);
+ icon.appendChild(path);
+ container.appendChild(icon);
+ }
+
+ // Style taken from Starlight's select: https://github.com/withastro/starlight/blob/ca693feb4b6aa9f26b3d536d284288773b788ac6/packages/starlight/components/Select.astro
+ #appendStyle(container) {
+ const style = document.createElement('style');
+ style.textContent = `
+ @layer starlight.core {
+ label {
+ --sl-label-icon-size: 0.875rem;
+ --sl-caret-size: 1.25rem;
+ --sl-inline-padding: 0.5rem;
+ position: relative;
+ display: flex;
+ align-items: center;
+ gap: 0.25rem;
+ color: var(--sl-color-gray-1);
+ }
+
+ label:hover {
+ color: var(--sl-color-gray-2);
+ }
+
+ .sr-only {
+ position: absolute;
+ width: 1px;
+ height: 1px;
+ padding: 0;
+ margin: -1px;
+ overflow: hidden;
+ clip: rect(0, 0, 0, 0);
+ white-space: nowrap;
+ border-width: 0;
+ }
+
+ .icon {
+ position: absolute;
+ top: 50%;
+ transform: translateY(-50%);
+ pointer-events: none;
+ }
+
+ select {
+ border: 0;
+ padding-block: 0.625rem;
+ padding-inline: calc(var(--sl-label-icon-size) + var(--sl-inline-padding) + 0.25rem)
+ calc(var(--sl-caret-size) + var(--sl-inline-padding) + 0.25rem);
+ margin-inline: calc(var(--sl-inline-padding) * -1);
+ width: calc(var(--sl-select-width) + var(--sl-inline-padding) * 2);
+ background-color: transparent;
+ text-overflow: ellipsis;
+ color: inherit;
+ cursor: pointer;
+ appearance: none;
+ }
+
+ option {
+ background-color: var(--sl-color-bg-nav);
+ color: var(--sl-color-gray-1);
+ }
+
+ @media (min-width: 50rem) {
+ select {
+ font-size: var(--sl-text-sm);
+ }
+ }
+ }
+
+ @layer starlight.components {
+ .label-icon {
+ font-size: var(--sl-label-icon-size);
+ inset-inline-start: 0;
+ }
+
+ .caret {
+ font-size: var(--sl-caret-size);
+ inset-inline-end: 0;
+ }
+
+ svg {
+ color: var(--sl-icon-color);
+ font-size: var(--sl-icon-size, 1em);
+ width: 1em;
+ height: 1em;
+ }
+ }`;
+ container.appendChild(style);
+ }
+}
+customElements.define('version-select', VersionSelect);
\ No newline at end of file
diff --git a/src/components/Header.astro b/src/components/Header.astro
index 6eac2cb..705b8bd 100644
--- a/src/components/Header.astro
+++ b/src/components/Header.astro
@@ -11,6 +11,7 @@ import ThemeIcon from './ThemeIcon.astro';
diff --git a/src/components/ThemeIcon.astro b/src/components/ThemeIcon.astro
index 13bcc2e..7f39cd3 100644
--- a/src/components/ThemeIcon.astro
+++ b/src/components/ThemeIcon.astro
@@ -22,9 +22,10 @@
\ No newline at end of file
diff --git a/src/content.config.ts b/src/content.config.ts
index 25631fc..934a68b 100644
--- a/src/content.config.ts
+++ b/src/content.config.ts
@@ -1,5 +1,7 @@
import { glob } from 'astro/loaders';
import { defineCollection, z } from 'astro:content';
+import { docsLoader } from '@astrojs/starlight/loaders';
+import { docsSchema } from '@astrojs/starlight/schema';
const mdExtensions = ['markdown', 'mdown', 'mkdn', 'mkd', 'mdwn', 'md', 'mdx'];
const getGlobLoader = (contentType: string, extensions: string) => glob({
@@ -39,20 +41,14 @@ const example = defineCollection({
loader: getGlobLoader('examples', `{${mdExtensions.join(',')}}`),
schema: exampleSchema
});
-/*
-const docs = defineCollection({
- loader: getLoader('docs'),
- schema: z.object({
- title: z.string(),
- description: z.string()
- })
-});
-*/
+
+const docs = defineCollection({ loader: docsLoader(), schema: docsSchema() });
+
export const collections = {
blog,
specErrorV1,
example,
- //docs,
+ docs,
};
export type CollectionContent = {
diff --git a/src/content/docs/docs/call-tasks/async-api.mdx b/src/content/docs/docs/call-tasks/async-api.mdx
new file mode 100644
index 0000000..1fdbefa
--- /dev/null
+++ b/src/content/docs/docs/call-tasks/async-api.mdx
@@ -0,0 +1,123 @@
+---
+title: "AsyncAPI Call Task (`call: asyncapi`)"
+sidebar:
+ order: 50
+ label: AsyncAPI Call
+---
+{/* Examples are validated */}
+
+## Purpose
+
+The AsyncAPI Call task enables workflows to interact with message brokers and event-driven services described by an [AsyncAPI](https://www.asyncapi.com/) specification document.
+
+This allows workflows to publish messages to channels or subscribe to messages from channels defined in the AsyncAPI document.
+
+## Usage Examples
+
+### Example: Publishing a Message
+
+```yaml
+document:
+ dsl: '1.0.0' # Assuming alpha5 or later based on reference example
+ namespace: test
+ name: asyncapi-publish-example
+ version: '0.1.0'
+do:
+ - publishGreeting:
+ call: asyncapi
+ with:
+ # Reference to the AsyncAPI document
+ document:
+ endpoint: https://broker.example.com/docs/asyncapi.json
+ operation: sendGreeting
+ server:
+ name: productionBroker
+ variables:
+ environment: prod
+ # Define the message to publish
+ message:
+ payload:
+ greeting: "Hello from workflow ${ $workflow.id }"
+ headers:
+ traceId: "${ $context.traceId }"
+ # Output typically confirms publish success/failure, specifics vary
+ - afterPublish:
+ # ...
+```
+
+### Example: Subscribing to Messages
+
+```yaml
+document:
+ dsl: '1.0.0' # Workflow DSL version
+ namespace: test # Namespace for the workflow
+ name: asyncapi-subscribe-example # Name of the workflow
+ version: '0.1.0' # Version of this workflow
+do:
+ - subscribeToChatRoom: # Task name for the subscription
+ call: asyncapi # Use the AsyncAPI task type
+ with:
+ # Reference to the AsyncAPI specification document
+ document:
+ endpoint: https://chat.example.com/api/asyncapi.yaml
+
+ # Operation ID for subscribing (must match one defined in the AsyncAPI doc)
+ operation: receiveChatMessages
+
+ # Specify protocol to select appropriate server
+ protocol: ws # WebSocket protocol
+
+ subscription:
+ # Optional: Filter messages based on payload content
+ filter: '${ .roomId == $context.targetRoomId }'
+
+ # Define consumption limits
+ consume:
+ amount: 10 # Max 10 messages
+ for: { minutes: 5 } # Or max 5 minutes, whichever comes first
+
+ # Process each consumed message
+ foreach:
+ item: msg # Variable name for the current message
+ do:
+ # Log each order update
+ - logUpdate:
+ call: logMessage
+ with:
+ message: "Received order update: ${ .msg.payload.orderId }"
+
+ # Conditionally notify shipping if status is SHIPPED
+ - checkStatus:
+ call: notifyShipping
+ if: "${ .msg.payload.status == 'SHIPPED' }"
+
+ # Define the output of the foreach loop (and thus the task)
+ output:
+ as:
+ processedCount: "${ count(.) }" # Total messages processed
+ lastOrderId: "${ .[-1]?.msg.payload.orderId }" # Last order ID
+
+ - afterSubscription:
+ # ...
+```
+
+## Configuration Options
+
+The configuration for an AsyncAPI call is provided within the `with` property of the `call: asyncapi` task.
+
+### `with` (Object, Required)
+
+* **`document`** (Object, Required): Defines the location of the AsyncAPI specification document (JSON or YAML). Contains:
+ * `endpoint` (Object, Required): Specifies the location with `uri` (String | Object, Required) and optional `authentication` (String | Object).
+* **`operation`** (String, Required): The operation (publish or subscribe) to invoke, as defined within the AsyncAPI `document`.
+* **`server`** (Object, Optional): Configuration for connecting to a specific server defined in the AsyncAPI document. If omitted, the runtime selects a suitable server based on the operation and `protocol`. Contains:
+ * `name` (String, Required): The name of the server (must match a server name defined in the AsyncAPI document under the specified operation/channel).
+ * `variables` (Object, Optional): A key/value map to override [Server Variables](https://www.asyncapi.com/docs/reference/specification/v3.0.0#serverVariableObject) defined in the AsyncAPI document for the selected server.
+* **`protocol`** (String, Optional): The protocol to use, helping select the target server if `server` is not specified or if multiple servers support the operation. Supported values include: `amqp`, `amqp1`, `anypointmq`, `googlepubsub`, `http`, `ibmmq`, `jms`, `kafka`, `mercure`, `mqtt`, `mqtt5`, `nats`, `pulsar`, `redis`, `sns`, `solace`, `sqs`, `stomp`, `ws`.
+* **`message`** (Object, Conditionally Required): Defines the message to be published. Required if the `operation` represents a *publish* action. Contains details matching the [AsyncAPI Message Object](https://www.asyncapi.com/docs/reference/specification/v3.0.0#messageObject), such as:
+ * `payload` (Any, Optional): The main content/body of the message.
+ * `headers` (Object, Optional): Application-specific headers for the message.
+ * `correlationId` (String, Optional): ID used for message correlation.
+ * (Other properties like `contentType`, `name`, `title`, `summary`, `description`, `tags`, `externalDocs`, `bindings`, `examples`, `traits` may be supported depending on runtime capabilities).
+* **`subscription`** (Object, Conditionally Required): Defines how to subscribe to and consume messages. Required if the `operation` represents a *subscribe* action. Contains:
+ * `
\ No newline at end of file
diff --git a/src/content/docs/docs/call-tasks/function.mdx b/src/content/docs/docs/call-tasks/function.mdx
new file mode 100644
index 0000000..a33eb0b
--- /dev/null
+++ b/src/content/docs/docs/call-tasks/function.mdx
@@ -0,0 +1,131 @@
+---
+title: Function Call Task (`call`)
+sidebar:
+ order: 10
+ label: Function Call
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+{/* Examples are validated */}
+
+## Purpose
+
+The generic `Call` task enables the execution of a specified, named function within a workflow. This allows seamless integration with custom business logic, reusable workflow components, or functions defined externally (e.g., in a [Resource Catalog](/docs/resources-configuration/resource-catalog/)).
+
+*Note: This describes the generic function call using a function name. For specific protocol interactions like HTTP, gRPC, OpenAPI, or AsyncAPI, refer to their dedicated task pages.*
+
+## Usage Example
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: custom-functions
+ name: call-custom-example
+ version: '1.0.0'
+use:
+ functions:
+ validateAddress:
+ call: expression
+ with:
+ code: |
+ function validateAddress(street, city, zipCode) {
+ // Basic validation logic
+ if (!street || !city || !zipCode) {
+ return { valid: false, error: "Missing required fields" };
+ }
+ // Additional validation logic here
+ return { valid: true, normalized: { street, city, zipCode } };
+ }
+do:
+ - checkAddress:
+ call: validateAddress
+ with:
+ street: "${ .customer.address.street }"
+ city: "${ .customer.address.city }"
+ zipCode: "${ .customer.address.zip }"
+ # Output of the 'validateAddress' function becomes output of this task
+ - processValidationResult:
+ switch:
+ - caseValid:
+ when: "${ .checkAddress.valid }"
+ then: setNormalizedAddress
+ - default:
+ then: raiseValidationError
+ - setNormalizedAddress:
+ set:
+ normalizedAddress: "${ .checkAddress.normalized }"
+ then: exit
+ - raiseValidationError:
+ raise:
+ error:
+ type: "https://example.com/errors/validation"
+ status: 400
+ detail: "${ .checkAddress.error }"
+ then: exit
+
+```
+
+In this example, the `checkAddress` task calls a function named `validateAddress`, passing arguments derived from the current context or input using the `with` property.
+
+## Additional Examples
+
+### Example: Calling a Function with No Arguments
+
+```yaml
+do:
+ - triggerProcessing:
+ # Assumes 'startBackgroundJob' function requires no specific arguments
+ call: startBackgroundJob
+ # No 'with' property needed
+ - monitorJob:
+ # ...
+```
+
+### Example: Calling a Catalog Function
+
+```yaml
+# Assumes 'globalUtils' catalog is imported via workflow.use.catalogs
+# Assumes 'logMessage:1.0' function exists in that catalog
+do:
+ - recordInfo:
+ call: logMessage:1.0@globalUtils # Function:Version@CatalogName
+ with:
+ level: INFO
+ message: "Processed item ${ .itemId }"
+```
+
+## Configuration Options
+
+### `call` (String, Required)
+
+Specifies the **name** of the function to execute. This name must correspond to:
+
+* A function defined in the workflow's `use.functions` section.
+* A function available from an imported [Resource Catalog](/docs/resources-configuration/resource-catalog/).
+* A built-in function provided by the runtime (like `http`, `grpc`, etc., although using their specific task pages is recommended).
+
+### `with` (Object, Optional)
+
+A simple key/value map defining the arguments to pass to the called function. Values can be static or derived using [Runtime Expressions](/docs/core-concepts/runtime-expressions/).
+
+### Authentication
+
+The generic function `call` task itself does not have a dedicated `authentication` property within its `with` block. If the function being called requires authentication to perform its internal operations, that logic must be handled *inside* the function's implementation (potentially using secrets or context passed via `with`).
+
+If the function *definition* itself is hosted in a protected location (like a secured [Resource Catalog](/docs/resources-configuration/resource-catalog/)), authentication would be configured on the catalog's `endpoint` definition, not on the `call` task.
+
+**Error Handling**: If authentication fails while trying to *access the function definition* from a protected catalog, the runtime should raise an `Authentication` or `Authorization` error related to accessing the catalog endpoint. Errors related to authentication *within* the called function's logic are the responsibility of the function itself to handle or raise appropriately.
+
+### Data Flow
+
+
+**Note**:
+* The `transformedInput` to the `Call` task is available for use within runtime expressions in the `with` arguments.
+* The `rawOutput` of the `Call` task is the result returned by the executed function.
+* Standard `output.as` and `export.as` process this function result.
+
+### Flow Control
+
+
diff --git a/src/content/docs/docs/call-tasks/grpc.mdx b/src/content/docs/docs/call-tasks/grpc.mdx
new file mode 100644
index 0000000..9ff0204
--- /dev/null
+++ b/src/content/docs/docs/call-tasks/grpc.mdx
@@ -0,0 +1,123 @@
+---
+title: "gRPC Call Task (`call: grpc`)"
+sidebar:
+ order: 30
+ label: gRPC Call
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+{/* Examples are validated */}
+
+## Purpose
+
+The gRPC Call task enables workflows to interact with external systems using the high-performance [gRPC](https://grpc.io/) protocol.
+
+## Usage Example
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: test
+ name: grpc-example
+ version: '0.1.0'
+do:
+ - greetUser:
+ call: grpc
+ with:
+ # Reference to the Protobuf definition file
+ proto:
+ endpoint: file://app/protos/greet.proto
+ # Service details
+ service:
+ name: GreeterApi.Greeter # Service name defined in proto
+ host: localhost
+ port: 5011
+ authentication:
+ use: myGrpcAuth # Optional authentication
+ # Method and arguments
+ method: SayHello # Method name defined in proto
+ arguments:
+ name: "${ .user.preferredDisplayName }"
+ # Output is the response message from the SayHello method
+ - processGreeting:
+ # Store the greeting response
+ set:
+ greeting: "${ .greetUser.response }"
+```
+
+## Additional Examples
+
+### Example: Using Authentication
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: test
+ name: grpc-auth-example
+ version: '0.1.0'
+use:
+ authentications:
+ myGrpcAuth:
+ oauth2:
+ authority: "https://auth.example.com"
+ client:
+ id: "client-id"
+ secret: "client-secret"
+ grant: "client_credentials"
+do:
+ - getAccountDetails:
+ call: grpc
+ with:
+ proto:
+ endpoint: "file://protos/account.proto"
+ service:
+ name: AccountService
+ host: secure-grpc
+ port: 50051
+ authentication:
+ use: myGrpcAuth
+ method: GetAccount
+ arguments:
+ accountId: "${ .user.id }"
+ then: processAccount
+ - processAccount:
+ ###
+```
+## Configuration Options
+
+The configuration for a gRPC call is provided within the `with` property of the `call: grpc` task.
+
+### `with` (Object, Required)
+
+* **`proto`** (Object, Required): Defines the location of the Protobuf (`.proto`) file. Contains:
+ * `endpoint` (Object, Required): Specifies the location with `uri` (String | Object, Required) and optional `authentication` (String | Object).
+* **`service`** (Object, Required): Defines the target gRPC service.
+ * **`name`** (String, Required): The fully qualified name of the gRPC service (e.g., `package.ServiceName`) as defined in the `.proto` file.
+ * **`host`** (String, Required): The hostname or IP address of the gRPC server.
+ * **`port`** (Integer, Optional): The port number of the gRPC server.
+ * **`authentication`** (String | Object, Optional): Authentication details (inline definition or reference by name from `use.authentications`) needed to connect to the service.
+* **`method`** (String, Required): The name of the specific RPC method to call within the service, as defined in the `.proto` file.
+* **`arguments`** (Object, Optional): A key/value map representing the arguments required by the specified gRPC `method`. Values can be static or derived using [Runtime Expressions](/docs/core-concepts/runtime-expressions/).
+
+### Authentication
+
+The `service` object contains an optional `authentication` property where you can specify the authentication details needed to connect to the gRPC service. This can be an inline definition or a reference (by name) to a policy defined in `use.authentications`.
+
+See the main [Authentication](/docs/resources-configuration/authentication/) page for details on supported schemes like OAuth2, Bearer, etc.
+
+**Error Handling**: If the specified authentication fails during the connection attempt to the gRPC service (e.g., invalid token, credentials rejected), the runtime should typically raise an `Authentication` (401) or `Authorization` (403) error. This prevents the RPC method call and halts the task unless the error is caught by a `Try` block.
+
+### Data Flow
+
+
+**Note**:
+* The `transformedInput` to the task is available for use within runtime expressions in the `with.arguments`.
+* The `rawOutput` of the task is the response message returned by the invoked gRPC method.
+* Standard `output.as` and `export.as` process this resulting response message.
+
+### Flow Control
+
+
+**Note**: If the gRPC call fails (e.g., connection error, method not found, server error), a `Communication` error is typically raised, and the `then` directive is *not* followed (unless caught by `Try`).
diff --git a/src/content/docs/docs/call-tasks/http.mdx b/src/content/docs/docs/call-tasks/http.mdx
new file mode 100644
index 0000000..21923e2
--- /dev/null
+++ b/src/content/docs/docs/call-tasks/http.mdx
@@ -0,0 +1,149 @@
+---
+title: "HTTP Call Task (`call: http`)"
+sidebar:
+ order: 20
+ label: HTTP Call
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+{/* Examples are validated */}
+
+## Purpose
+
+The HTTP Call task enables workflows to interact with external services and APIs over the standard HTTP/HTTPS protocol.
+
+## Usage Example
+
+```yaml
+document:
+ dsl: '1.0.0' # Assuming alpha5 or later based on reference example
+ namespace: test
+ name: http-example
+ version: '0.1.0'
+do:
+ - getPetById:
+ call: http
+ with:
+ method: get
+ # URI Template used here for dynamic petId
+ endpoint:
+ uri: https://petstore.swagger.io/v2/pet/{petId}
+ headers:
+ Accept: "application/json"
+ # Assume petId is available in the task input or context
+ # Authentication would typically be defined on the endpoint or globally
+ # Output by default is the deserialized response body
+ then: processPetData
+ - processPetData:
+ # ... uses the pet data received in the response body ...
+```
+
+## Additional Examples
+
+### Example: POST Request with Body and Query Parameters
+
+```yaml
+do:
+ - createUser:
+ call: http
+ with:
+ method: post
+ endpoint: https://api.example.com/users
+ query:
+ source: workflow
+ headers:
+ Content-Type: application/json
+ X-Request-Id: "${ $workflow.id }"
+ body:
+ name: "${ .userDetails.name }"
+ email: "${ .userDetails.email }"
+ role: viewer
+ then: handleUserCreation
+```
+
+### Example: Getting the Full Response Object
+
+```yaml
+do:
+ - checkServiceStatus:
+ call: http
+ with:
+ method: get
+ endpoint:
+ uri: https://status.example.com/health
+ # Get the full response including status code and headers
+ output: response
+ # Allow redirects to be considered successful
+ redirect: true
+ then: analyzeStatus
+ - analyzeStatus:
+ # Task input will be like: { "status": 200, "headers": { ... }, "body": { "status": "UP" } }
+ set:
+ isHealthy: "${ .body.status == 'UP' }"
+ statusCode: "${ .status }"
+```
+
+### Example: Getting Raw Response (e.g., for Binary Data)
+
+```yaml
+do:
+ - downloadImage:
+ call: http
+ with:
+ method: get
+ endpoint:
+ uri: https://images.example.com/logo.png
+ # Get the raw response bytes (likely base64 encoded)
+ output: raw
+ # output.as could decode the base64 if needed
+ output:
+ as: "${ base64decode(.) }" # Example - assumes a base64decode function
+```
+
+## Configuration Options
+
+The configuration for an HTTP call is provided within the `with` property of the `call: http` task.
+
+### `with` (Object, Required)
+
+* **`method`** (String, Required): The HTTP request method (e.g., `get`, `post`, `put`, `delete`, `patch`).
+* **`endpoint`** (String | Object, Required): The target URL. Can be:
+ * A simple string URL.
+ * A [URI Template](/docs/resources-configuration/uri-templates/) string for simple variable substitution (e.g., `https://.../items/{itemId}`).
+ * An `Endpoint` object (inline definition or reference by name from `use.resources`) allowing specification of `uri` (String | Object, Required) and optional `authentication` (String | Object).
+* **`headers`** (Object, Optional): A key/value map of HTTP headers to include in the request.
+* **`body`** (Any, Optional): The HTTP request body. Can be any valid JSON/YAML structure or a string. The runtime typically serializes this based on the `Content-Type` header.
+* **`query`** (Object, Optional): A key/value map of query parameters to append to the URI.
+* **`output`** (String - `content` | `response` | `raw`, Optional, Default: `content`): Specifies the desired format of the task's `rawOutput`:
+ * `content` (Default): The deserialized body of the HTTP response.
+ * `response`: An object containing the full HTTP response details. It typically includes:
+ * `status` (Integer): The HTTP status code.
+ * `headers` (Object): A map of response headers.
+ * `body` (Any): The deserialized response body.
+ * `raw`: The raw response body, usually Base64 encoded if not plain text.
+* **`redirect`** (Boolean, Optional, Default: `false`): Controls handling of HTTP redirect status codes (3xx):
+ * `false` (Default): Only 2xx status codes are considered successful. 3xx codes will cause an error.
+ * `true`: Both 2xx and 3xx status codes are considered successful (implying redirects are followed, although the runtime behavior might vary). Errors are raised for other statuses (4xx, 5xx).
+
+### Authentication
+
+Authentication for HTTP calls is typically defined within the `Endpoint` object if you are using the object structure for the `endpoint` property. This allows you to associate an authentication policy (defined globally in `use.authentications` or inline) directly with the target URI.
+
+See the main [Authentication](/docs/resources-configuration/authentication/) page for details on defining different authentication schemes (Basic, Bearer, OAuth2, etc.).
+
+**Error Handling**: If the authentication process itself fails (e.g., invalid credentials, unable to obtain OAuth2 token, token rejected by the server *before* the main request is processed), the runtime should typically raise an `Authentication` (401) or `Authorization` (403) error. This would prevent the actual HTTP request from being sent and halt the task unless the error is caught by a `Try` block.
+
+### Data Flow
+
+
+**Note**:
+* The `transformedInput` to the task is available for use within runtime expressions in the `with` properties (`endpoint`, `headers`, `body`, `query`).
+* The `rawOutput` of the task depends on the `with.output` setting (`content`, `response`, or `raw`).
+* Standard `output.as` and `export.as` process this resulting `rawOutput`.
+
+### Flow Control
+
+
+**Note**: If the HTTP call results in an unhandled error status code (e.g., 4xx, 5xx, or 3xx if `redirect: false`), a `Communication` error is typically raised, and the `then` directive is *not* followed (unless caught by `Try`).
\ No newline at end of file
diff --git a/src/content/docs/docs/call-tasks/open-api.mdx b/src/content/docs/docs/call-tasks/open-api.mdx
new file mode 100644
index 0000000..b11a03d
--- /dev/null
+++ b/src/content/docs/docs/call-tasks/open-api.mdx
@@ -0,0 +1,145 @@
+---
+title: "OpenAPI Call Task (`call: openapi`)"
+sidebar:
+ order: 40
+ label: OpenAPI Call
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+{/* Examples are validated */}
+
+## Purpose
+
+The OpenAPI Call task enables workflows to interact with external RESTful APIs that are described by
+an [OpenAPI](https://www.openapis.org/) specification document.
+
+This allows for type-safe interactions and leverages the API definition for validation and potentially generating client
+code or configurations.
+
+## Usage Example
+
+```yaml
+document:
+ dsl: '1.0.0' # Assuming alpha5 or later based on reference example
+ namespace: test
+ name: openapi-example
+ version: '0.1.0'
+do:
+ - findAvailablePets:
+ call: openapi
+ with:
+ # Reference to the OpenAPI document
+ document:
+ endpoint: https://petstore.swagger.io/v2/swagger.json
+ # ID of the operation to call (defined within the OpenAPI doc)
+ operationId: findPetsByStatus
+ # Parameters required by the operation
+ parameters:
+ status: available # Passed as query/path/header param based on OpenAPI spec
+ # authentication: myApiAuth # Optional authentication
+ # Output by default is the deserialized response body
+ then: processAvailablePets
+ - processAvailablePets:
+ # ... uses the list of available pets from the response ...
+```
+
+## Additional Examples
+
+### Example: Sending Different Parameter Types
+
+```yaml
+do:
+ - updatePetDetails:
+ call: openapi
+ with:
+ document:
+ endpoint:
+ uri: "https://petstore.swagger.io/v2/swagger.json"
+ operationId: "updatePetWithForm"
+ parameters:
+ petId: 123
+ name: "Fluffy Updated"
+ status: "pending"
+ authentication:
+ bearer:
+ token: "${secrets.petstore_auth}"
+ then: checkUpdateStatus
+```
+
+### Example: Getting the Full HTTP Response
+
+```yaml
+do:
+ - getPetAndCheckHeaders:
+ call: openapi
+ with:
+ document:
+ endpoint:
+ uri: "https://petstore.swagger.io/v2/swagger.json"
+ operationId: "getPetById"
+ parameters:
+ petId: "${ .targetPetId }"
+ output: response
+ then: analyzeHttpResponse
+ - analyzeHttpResponse:
+ set:
+ petData: "${ .body }"
+ contentType: "${ .headers['Content-Type'] }"
+```
+
+## Configuration Options
+
+The configuration for an OpenAPI call is provided within the `with` property of the `call: openapi` task.
+
+### `with` (Object, Required)
+
+* **`document`** (Object, Required): Defines the location of the OpenAPI specification document (JSON or YAML).
+ Contains:
+ * `endpoint` (Object, Required): Specifies the location with `uri` (String | Object, Required) and optional
+ `authentication` (String | Object).
+* **`operationId`** (String, Required): The unique identifier (`operationId`) of the specific API operation to invoke,
+ as defined within the OpenAPI `document`.
+* **`parameters`** (Object, Optional): A key/value map representing the parameters required by the specified
+ `operationId`. The runtime uses the OpenAPI document to determine how each parameter should be sent (e.g., path,
+ query, header, cookie, request body). Values can be static or derived
+ using [Runtime Expressions](/docs/core-concepts/runtime-expressions/).
+* **`authentication`** (String | Object, Optional): Authentication details (inline definition or reference by name from
+ `use.authentications`) needed to access the API, often corresponding to security schemes defined in the OpenAPI
+ document.
+* **`output`** (String - `content` | `response` | `raw`, Optional, Default: `content`): Specifies the desired format of
+ the task's `rawOutput` (same behavior as the `output` property in the [HTTP Call Task](/docs/call-tasks/http/)).
+ * When `response` is chosen, the output is an object typically containing:
+ * `status` (Integer): The HTTP status code.
+ * `headers` (Object): A map of response headers.
+ * `body` (Any): The deserialized response body.
+* **`redirect`** (Boolean, Optional, Default: `false`): Controls handling of HTTP redirect status codes (3xx) (same
+ behavior as the `redirect` property in the [HTTP Call Task](/docs/call-tasks/http/)).
+
+### Authentication
+
+The `with` object contains an optional `authentication` property. You can provide an inline definition or reference (by
+name) a policy from `use.authentications`. This authentication mechanism is used to access the API described by the
+OpenAPI document, often corresponding to `securitySchemes` defined within that document.
+
+See the main [Authentication](/docs/resources-configuration/authentication/) page for details on defining authentication policies.
+
+**Error Handling**: If the authentication specified fails *before* the underlying HTTP request is made (e.g., cannot
+retrieve OAuth2 token) or if the server rejects the credentials provided with a 401/403 status *specifically due to
+authentication*, the runtime should raise an `Authentication` (401) or `Authorization` (403) error. This halts the task
+unless caught by a `Try` block. General API errors (4xx/5xx) unrelated to the initial auth rejection are typically
+raised as `Communication` errors.
+
+### Data Flow
+
+
+**Note**:
+* The `transformedInput` to the task is available for use within runtime expressions in the `with.parameters`.
+* The `rawOutput` of the task depends on the `with.output` setting (`content`, `response`, or `raw`), derived from the HTTP response of the underlying API call.
+* Standard `output.as` and `export.as` process this resulting `rawOutput`.
+
+### Flow Control
+
+
+**Note**: If the underlying API call fails (e.g., validation error based on OpenAPI spec, unhandled error status code), a `Communication` or potentially `Validation` error is raised, and the `then` directive is *not* followed (unless caught by `Try`).
\ No newline at end of file
diff --git a/src/content/docs/docs/call-tasks/overview.mdx b/src/content/docs/docs/call-tasks/overview.mdx
new file mode 100644
index 0000000..b0fd318
--- /dev/null
+++ b/src/content/docs/docs/call-tasks/overview.mdx
@@ -0,0 +1,58 @@
+---
+title: Call Task Overview
+sidebar:
+ order: 0
+ label: Overview
+---
+{/* Examples are validated */}
+
+## Purpose & Concept
+
+The core concept of a "Call" task in the Serverless Workflow DSL is to enable interaction with external services, functions, or APIs. These tasks are the primary mechanism for orchestrating communication beyond the workflow's internal state management and control flow.
+
+Different types of `call` tasks cater to specific protocols or interaction patterns, providing a structured way to define how the workflow communicates with the outside world.
+
+Think of `call` tasks as the workflow's way of making requests or invoking functionality that resides outside of its own definition. This is crucial for integrating with existing systems, leveraging microservices, or accessing third-party APIs.
+
+## Types of Call Tasks
+
+* **`call:
`**:
+ * Invokes a named function defined either within the workflow (`use.functions`), imported from a [Resource Catalog](/docs/resources-configuration/resource-catalog/), or potentially provided by the runtime.
+ * This is the most generic call type, suitable for custom logic or reusable components.
+ * Configuration and arguments are passed via the `with` property.
+ * See: [Function Call Task](/docs/call-tasks/function/)
+
+* **`call: http`**:
+ * Performs standard HTTP/HTTPS requests (GET, POST, PUT, etc.) to interact with web services or REST APIs.
+ * Essential for web-based integrations.
+ * Configuration (method, endpoint, headers, body, query parameters, etc.) is provided within the `with` property.
+ * See: [HTTP Call Task](/docs/call-tasks/http/)
+
+* **`call: grpc`**:
+ * Interacts with services using the high-performance gRPC protocol, typically requiring a Protobuf definition.
+ * Suitable for efficient, low-latency communication between microservices.
+ * Configuration (proto definition, service details, method, arguments) is provided within the `with` property.
+ * See: [gRPC Call Task](/docs/call-tasks/grpc/)
+
+* **`call: openapi`**:
+ * Interacts with RESTful APIs described by an OpenAPI specification document.
+ * Leverages the OpenAPI document for operation details, validation, and parameter handling, promoting type safety.
+ * Configuration (OpenAPI document reference, operationId, parameters) is provided within the `with` property.
+ * See: [OpenAPI Call Task](/docs/call-tasks/open-api/)
+
+* **`call: asyncapi`**:
+ * Interacts with message brokers or event-driven services described by an AsyncAPI specification document.
+ * Used for publishing messages to channels or subscribing to messages from channels in event-driven architectures.
+ * Configuration (AsyncAPI document reference, operationId, message/subscription details) is provided within the `with` property.
+ * See: [AsyncAPI Call Task](/docs/call-tasks/async-api/)
+
+## Common Concepts
+
+While each call type has specific configuration options detailed on its respective page, they share common structural patterns and workflow interactions:
+
+* **`with` property**: This object is universally used to contain the specific configuration parameters and arguments required by the call type. It acts as the payload defining the details of the external interaction.
+* **Data Flow**: They generally follow the standard [Task Data Flow](/docs/core-concepts/data-flow-management/), where `input.from` prepares data for the task, the call is made using configurations from `with` (often utilizing the transformed input), and the result (`rawOutput`) is processed by `output.as` and `export.as`.
+* **Error Handling**: Failures during communication (e.g., network errors, non-success status codes, connection timeouts) typically raise a [`Communication`](dsl-error-handling.md#standard-error-types) error. Failures related to invalid requests based on specifications (like OpenAPI) might raise a `Validation` error. These errors can be caught and handled using a [Try Task](/docs/error-handling/try/).
+* **Authentication**: Most call types support an `authentication` property within their `with` block (or on referenced resources like `endpoint`), allowing secure interaction with protected services using mechanisms defined in the workflow's `use.authentications` section.
+
+Please refer to the individual task pages linked above for detailed configuration options, examples, and specific behaviors.
\ No newline at end of file
diff --git a/src/content/docs/docs/control-flow/do.mdx b/src/content/docs/docs/control-flow/do.mdx
new file mode 100644
index 0000000..c24aa9f
--- /dev/null
+++ b/src/content/docs/docs/control-flow/do.mdx
@@ -0,0 +1,79 @@
+---
+title: Do
+sidebar:
+ order: 10
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+## Purpose
+
+The `Do` task is a fundamental control flow construct in the Serverless Workflow DSL. Its primary purpose is to define a
+sequence of one or more sub-tasks that are executed in the order they are declared.
+
+The output of one task in the sequence becomes the input for the next, facilitating data flow through the
+defined steps.
+
+## Basic Usage
+
+Here's a simple example of a `Do` task executing two sub-tasks sequentially:
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: sequential-tasks
+ version: '1.0.0'
+do: # This is the main 'Do'
+ - step1:
+ do: # This is a `Do` task containing sub-tasks
+ - taskA:
+ set:
+ result: "Value from Task A"
+ - taskB:
+ set:
+ # Input to taskB is the output of taskA
+ resultB: "${ .result + ' and Task B' }"
+
+# Workflow output will be { "resultB": "Value from Task A and Task B" }
+```
+
+In this example, `taskA` executes first, setting the `result` field. Its output is then implicitly passed as input to
+`taskB`, which uses that result to compute its own `resultB`.
+
+## Configuration Options
+
+The `Do` task itself has standard task configuration options like `input`, `output`, `if`, and `then`. Its main defining
+characteristic is the `do` block containing the sequence of sub-tasks.
+
+### `do` (List\, Required)
+
+This mandatory property contains a list of key-value pairs, where each key is a unique name for the sub-task and the
+value defines the sub-task to be executed. The sub-tasks are executed sequentially in the order they appear in the list.
+
+Each element in the list specifies a unique name for the sub-task and the task definition itself (e.g., `set`,
+`call`, another `do`, etc.). The order in the list determines the execution sequence.
+
+```yaml
+do:
+ - myDoTask: # Name of the parent Do task
+ do: # The 'do' property containing the list of sub-tasks
+ - firstSubTask:
+ call: http
+ with:
+ uri: https://api.example.com/data
+ method: get
+ - secondSubTask:
+ set:
+ processedData: "${ .body }" # Access output from firstSubTask
+```
+
+## Data Flow
+
+
+
+## Flow Control
+
+
+
diff --git a/src/content/docs/docs/control-flow/for.mdx b/src/content/docs/docs/control-flow/for.mdx
new file mode 100644
index 0000000..3b13525
--- /dev/null
+++ b/src/content/docs/docs/control-flow/for.mdx
@@ -0,0 +1,131 @@
+---
+title: For
+sidebar:
+ order: 20
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+## Purpose
+
+The `For` task provides a mechanism to iterate over a collection of items (an array or list). For each item in the
+collection, it can conditionally execute a defined block of sub-tasks (defined within a `do` block).
+
+This is useful for processing arrays or lists of data, applying the same set of operations to each element, and
+potentially filtering which elements are processed based on a condition.
+
+## Basic Usage
+
+Here's an example of iterating over an array of numbers and performing an action for each:
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: for-loop-basic
+ version: '1.0.0'
+do:
+ - setup:
+ set:
+ numbers: [ 1, 2, 3, 4, 5 ]
+ - processNumbers:
+ for:
+ in: "${ .numbers }" # Expression evaluating to the collection to iterate
+ # each: item # Default variable name for the current item
+ # at: index # Default variable name for the current index
+ do:
+ - logItem:
+ # Inside the 'do' block, 'item' and 'index' are available in the scope
+ call: log # Assuming a 'log' function exists
+ with:
+ message: "Processing item ${ $index }: ${ $item }"
+
+# Workflow output after 'processNumbers' will typically be the output of 'setup':
+# { "numbers": [1, 2, 3, 4, 5] }
+# The 'For' task itself doesn't aggregate results from the loop by default.
+```
+
+In this example, the `processNumbers` task iterates over the `numbers` array provided by the `setup` task. For each
+number, it calls a hypothetical `log` function, accessing the current number via the default `$item` variable and its
+index via the default `$index` variable within the runtime expression scope.
+
+## Configuration Options
+
+### `for` (Object, Required)
+
+This object defines the core iteration parameters.
+
+* **`in`** (String, Required): A [Runtime Expression](/docs/core-concepts/runtime-expressions/) that **must** evaluate to an
+ array/list. The workflow iterates over the elements of this array.
+* **`each`** (String, Optional): Specifies the variable name used within the `do` block's scope to access the current
+ item being processed. Defaults to `item` (accessible as `$item` in expressions).
+* **`at`** (String, Optional): Specifies the variable name used within the `do` block's scope to access the zero-based
+ index of the current item being processed. Defaults to `index` (accessible as `$index` in expressions).
+
+```yaml
+for:
+ in: "${ .users }" # Iterate over the 'users' array from the task's input
+ each: "currentUser" # Access the current user as $currentUser
+ at: "userIndex" # Access the current index as $userIndex
+```
+
+### `do` (List\, Required)
+
+Defines the block of tasks to execute for each item in the collection (subject to the `while` condition).
+
+The tasks within this block execute sequentially for each iteration. They have access to the current item and index via
+the variables defined by `for.each` and `for.at`, which are injected into the runtime expression scope.
+
+```yaml
+for:
+ in: "${ .products }"
+ each: product
+do:
+ - checkStock:
+ call: inventoryService
+ with:
+ productId: "${ $product.id }" # Access the 'id' field of the current product
+ - updatePrice:
+ # ... task definition that can use $product ...
+```
+
+### `while` (String, Optional)
+
+An optional [Runtime Expression](/docs/core-concepts/runtime-expressions/) evaluated *before* executing the `do` block for each item (
+including the first item).
+
+If the expression evaluates to `false`, the `do` block for the current item is skipped, and the loop terminates.
+The expression is evaluated against the current task state, including
+the loop variables (`$item`, `$index` by default) available in this scope.
+
+```yaml
+for:
+ in: "${ .dataPoints }"
+ # execute 'do' block up to the first item where value > 10
+ while: "${ $item.value > 10 }"
+do:
+ - processHighValueItem:
+ # ... task definition ...
+```
+
+## Data Flow
+
+
+
+**Note**:
+
+* the `transformedOutput` of each iteration (or the `transformedInput` for the first one) is the `rawOutput` of the next
+ one.
+* The final `rawOutput` of the `For` task (which feeds into its `output.as`/`export.as`) is the
+ `transformedOutput` of the *last* successfully executed iteration's `do` block, or the `For` task's own
+ `transformedInput` if the loop didn't execute at all (e.g., empty `for.in` or initial `while` was false).
+
+## Flow Control
+
+
+
+**Note**: A `then :exit` directive within the `do` block of a `For` task will terminate the loop immediately,
+skipping any remaining items in the `for.in` collection.
+
+
diff --git a/src/content/docs/docs/control-flow/fork.mdx b/src/content/docs/docs/control-flow/fork.mdx
new file mode 100644
index 0000000..f12b305
--- /dev/null
+++ b/src/content/docs/docs/control-flow/fork.mdx
@@ -0,0 +1,151 @@
+---
+title: Fork
+sidebar:
+ order: 40
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+## Purpose
+
+The `Fork` task allows workflows to execute multiple defined subtasks (branches) concurrently.
+
+This enables parallel processing, potentially improving the overall efficiency and speed of the workflow by executing
+independent tasks simultaneously.
+
+## Usage Example
+
+```yaml
+document:
+ dsl: '1.0.0' # Assuming alpha5 or later based on reference example
+ namespace: test
+ name: fork-example
+ version: '0.1.0'
+do:
+ - getData:
+ # ... task to fetch initial data ...
+ then: processConcurrently
+ - processConcurrently:
+ fork:
+ # Optional: Make branches compete; first one finished provides the output
+ # compete: true
+ branches:
+ - processUserData:
+ # This branch runs concurrently with processProductData
+ call: processUserMicroservice
+ with:
+ userId: "${ .userId }"
+ - processProductData:
+ # This branch runs concurrently with processUserData
+ call: processProductMicroservice
+ with:
+ productId: "${ .productId }"
+ # Default: compete=false, waits for both branches
+ # Output is an array: [output_of_processUserData, output_of_processProductData]
+ then: combineResults
+ - combineResults:
+ # ... task that uses the array output from the fork ...
+```
+
+In this example, after `getData`, the tasks `processUserData` and `processProductData` are executed in parallel. Since
+`compete` is false (by default), the workflow waits for both branches to complete. The output passed to `combineResults`
+is an array containing the results from both branches in the order they were defined.
+
+### Additional Examples
+
+#### Example: Competing Branches (`compete: true`)
+
+```yaml
+do:
+ - fetchFastestQuote:
+ fork:
+ compete: true # Branches race, first one to complete wins
+ branches:
+ - getQuoteSourceA:
+ call: http
+ with:
+ uri: https://api.sourceA.com/quote
+ method: get
+ # Assume returns { "provider": "A", "price": 100 }
+ - getQuoteSourceB:
+ call: http
+ with:
+ uri: https://api.sourceB.com/quote
+ method: get
+ # Assume returns { "provider": "B", "price": 105 }
+ # Output is the result of whichever branch finished first
+ # e.g., { "provider": "A", "price": 100 } if Source A was faster
+ then: processBestQuote
+ - processBestQuote:
+ # ... task uses the single quote object from the winner ...
+```
+
+Here, `getQuoteSourceA` and `getQuoteSourceB` run concurrently. Because `compete: true`, the `Fork` task finishes as
+soon as *one* of them completes successfully. The output passed to `processBestQuote` will be the result object from
+only the faster source.
+
+#### Example: Output Array Structure (`compete: false`)
+
+```yaml
+do:
+ - gatherInfo:
+ fork:
+ # compete: false is the default
+ branches:
+ - getUserProfile:
+ set: { profile: { name: "Alice", id: 123 } }
+ - getUserOrders:
+ set: { orders: [ { orderId: "A" }, { orderId: "B" } ] }
+ # Output is an array: [ { profile: { ... } }, { orders: [ ... ] } ]
+ then: displayInfo
+ - displayInfo:
+ # Input to this task is the array from the fork
+ # Access results via index: ${ .[0].profile.name }, ${ .[1].orders[0].orderId }
+ call: log
+ with:
+ message: "User: ${ .[0].profile.name }, First Order: ${ .[1].orders[0].orderId }"
+```
+
+This demonstrates the output when `compete` is false. The `rawOutput` of the `gatherInfo` task is an array where the
+first element is the output of `getUserProfile` and the second element is the output of `getUserOrders`. The
+`displayInfo` task then accesses elements within this array using index notation (`.[0]`, `.[1]`).
+
+## Configuration Options
+
+### `fork` (Fork, Required)
+
+This object defines the concurrent execution.
+
+* **`branches`** (List\, Required): A list of `Task Items`, each defining a named branch containing a task to
+ be executed concurrently with the others. Each branch operates on the same `transformedInput` that was passed to the
+ `Fork` task.
+ * *Note: The type in the reference `map[string, task][]` seems slightly unusual; it likely represents a list where
+ each item implicitly has a name (key) and a task definition (value), similar to how tasks are defined in a `Do`
+ block.*
+
+* **`compete`** (Boolean, Optional, Default: `false`): Determines the completion and output behavior:
+ * `false` (Default): The `Fork` task completes only after **all** branches have successfully completed. The task's
+ `rawOutput` is an array containing the `transformedOutput` from each branch, in the order the branches are
+ declared in the `branches` list.
+ * `true`: The branches race against each other. The `Fork` task completes as soon as the **first** branch
+ successfully completes. The task's `rawOutput` is the `transformedOutput` of only that single winning branch. The
+ execution of other, slower branches might be implicitly cancelled by the runtime.
+
+### Data Flow
+
+
+**Note**:
+* The `transformedInput` to the `Fork` task is passed identically to each branch when it starts execution.
+* The `rawOutput` of the `Fork` task depends on the `compete` flag:
+ * `compete: false`: An array of the `transformedOutput` from all completed branches, in declaration order.
+ * `compete: true`: The `transformedOutput` of the single branch that completed first.
+* Standard `output.as` and `export.as` process this resulting `rawOutput`.
+
+### Flow Control
+
+
+**Note**:
+* The `if` condition is evaluated *before* the `Fork` task starts any branches. If false, the entire task is skipped, and its `then` directive is followed immediately.
+* The `then` directive is followed only *after* the `Fork` task completes successfully based on the `compete` flag (either all branches finish, or the first competing branch finishes).
+* If any branch faults *before* the `Fork` task completes according to its `compete` mode, the entire `Fork` task typically faults immediately, and its `then` directive is *not* followed (unless the error is caught by an outer `Try` task).
\ No newline at end of file
diff --git a/src/content/docs/docs/control-flow/overview.mdx b/src/content/docs/docs/control-flow/overview.mdx
new file mode 100644
index 0000000..c87f3fe
--- /dev/null
+++ b/src/content/docs/docs/control-flow/overview.mdx
@@ -0,0 +1,322 @@
+---
+title: Control Flow Overview
+sidebar:
+ order: 0
+---
+
+# Flow Tasks
+
+Flow tasks are fundamental components that control the execution path of your serverless workflows. They determine the
+order in which tasks run, manage conditional branching, handle iterations, and orchestrate parallel execution paths.
+
+## Flow Task Types
+
+| Task | Purpose |
+|------------------------------|------------------------------------------------------|
+| [Do](/docs/control-flow/do/) | Define sequential execution of one or more tasks |
+| [For](/docs/control-flow/for/) | Iterate over a collection of items |
+| [Switch](/docs/control-flow/switch/) | Implement conditional branching based on data values |
+| [Fork](/docs/control-flow/fork/) | Execute multiple branches in parallel |
+
+## When to Use Flow Tasks
+
+### Sequential Execution with Do
+
+Use the **Do** task when you need to:
+
+- Define a sequence of operations that should run in a specific order
+- Group related tasks together for better organization
+- Create nested execution blocks for complex workflows
+- Implement a series of steps that build on each other
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: order-processing
+ version: '1.0.0'
+use:
+ functions:
+ orderValidator:
+ ## define function here...
+ paymentProcessor:
+ ## define function here...
+ inventoryManager:
+ ## define function here...
+do:
+ - validateOrder:
+ call: function
+ with:
+ function: orderValidator
+ args:
+ order: ${ .input.order }
+
+ - processPayment:
+ call: function
+ with:
+ function: paymentProcessor
+ args:
+ amount: ${ .input.order.totalAmount }
+ paymentDetails: ${ .input.paymentInfo }
+
+ - updateInventory:
+ call: function
+ with:
+ function: inventoryManager
+ args:
+ items: ${ .input.order.items }
+ operation: "reserve"
+```
+
+### Iterative Processing with For
+
+Use the **For** task when you need to:
+
+- Process each item in a collection or array
+- Execute the same operation multiple times with different inputs
+- Break down bulk operations into individual item processing
+- Create data transformation pipelines that operate on collections
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: process-order-items
+ version: '1.0.0'
+use:
+ functions:
+ inventoryUpdater:
+ ## define function here...
+ orderItemTracker:
+ ## define function here...
+do:
+ - processItems:
+ for:
+ in: ${ .input.order.items }
+ each: currentItem
+ do:
+ - updateInventory:
+ call: inventoryUpdater
+ with:
+ args:
+ productId: ${ .currentItem.productId }
+ quantity: ${ .currentItem.quantity }
+ operation: "DECREMENT"
+
+ - recordItemFulfillment:
+ call: orderItemTracker
+ with:
+ args:
+ orderId: ${ .input.order.id }
+ itemId: ${ .currentItem.id }
+ status: "FULFILLED"
+```
+
+### Conditional Logic with Switch
+
+Use the **Switch** task when you need to:
+
+- Implement decision-making logic based on data values
+- Create different execution paths based on conditions
+- Handle multiple possible values with distinct processing for each
+- Implement business rules with clear branching logic
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: payment-processor
+ version: '1.0.0'
+use:
+ functions:
+ creditCardProcessor:
+ ## define function here...
+ paypalProcessor:
+ ## define function here...
+ bankTransferProcessor:
+ ## define function here...
+do:
+ - determinePaymentMethod:
+ switch:
+ - creditCard:
+ when: ${ .input.paymentMethod == 'creditCard' }
+ then: processCreditCard
+
+ - paypal:
+ when: ${ .input.paymentMethod == 'paypal' }
+ then: processPayPal
+
+ - bankTransfer:
+ when: ${ .input.paymentMethod == 'bankTransfer' }
+ then: processBankTransfer
+
+ - default:
+ then: handleUnsupportedPaymentMethod
+
+ - processCreditCard:
+ call: creditCardProcessor
+ with:
+ args:
+ cardDetails: ${ .input.paymentDetails }
+ amount: ${ .input.amount }
+ then: exit
+
+ - processPayPal:
+ call: paypalProcessor
+ with:
+ args:
+ paypalAccount: ${ .input.paymentDetails }
+ amount: ${ .input.amount }
+ then: exit
+
+ - processBankTransfer:
+ call: bankTransferProcessor
+ with:
+ args:
+ bankDetails: ${ .input.paymentDetails }
+ amount: ${ .input.amount }
+ then: exit
+
+ - handleUnsupportedPaymentMethod:
+ set:
+ result:
+ success: false
+ errorCode: "UNSUPPORTED_PAYMENT_METHOD"
+ message: ${ "Payment method " + .input.paymentMethod + " is not supported" }
+ then: exit
+
+```
+
+### Parallel Execution with Fork
+
+Use the **Fork** task when you need to:
+
+- Execute multiple independent tasks simultaneously
+- Improve workflow performance by parallelizing operations
+- Process multiple streams of data concurrently
+- Implement fan-out patterns for workload distribution
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: product-data-enrichment
+ version: '1.0.0'
+use:
+ functions:
+ pricingService:
+ ## define function here...
+ inventoryService:
+ ## define function here...
+ reviewService:
+ ## define function here...
+do:
+ - enrichProductData:
+ fork:
+ compete: false
+ branches:
+ - fetchPricing:
+ call: pricingService
+ with:
+ args:
+ productId: ${ .input.productId }
+
+ - fetchInventory:
+ call: inventoryService
+ with:
+ args:
+ productId: ${ .input.productId }
+
+ - fetchReviews:
+ call: reviewService
+ with:
+ args:
+ productId: ${ .input.productId }
+```
+
+## Combining Flow Tasks
+
+Flow tasks are often combined to create sophisticated workflows that accurately reflect complex business processes:
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: order-fulfillment-workflow
+ version: '1.0.0'
+do:
+ - validateOrder:
+ call: function
+ with:
+ function: orderValidator
+ args:
+ order: ${ .input.order }
+ result: validationResult
+
+ - checkOrderValidity:
+ switch:
+ - when: ${ .validationResult.valid == false }
+ do:
+ - handleInvalidOrder:
+ set:
+ result:
+ success: false
+ message: ${ .validationResult.reason }
+ code: "INVALID_ORDER"
+ - notifyCustomer:
+ call: emailService
+ with:
+ to: ${ .input.customer.email }
+ subject: "Order Validation Failed"
+ message: ${ .validationResult.reason }
+ catch:
+ errors:
+ with:
+ type: "email-service-error"
+ do:
+ - logEmailFailure:
+ call: logger
+ with:
+ level: "WARNING"
+ message: "Failed to send validation failure email"
+ orderId: ${ .input.order.id }
+
+ - processValidOrder:
+ fork:
+ compete: false
+ branches:
+ - handlePayment:
+ call: function
+ with:
+ function: paymentProcessor
+ args:
+ amount: ${ .input.order.totalAmount }
+ currency: ${ .input.order.currency }
+ paymentMethod: ${ .input.paymentInfo }
+ result: paymentResult
+
+ - reserveInventory:
+ for:
+ in: ${ .input.order.items }
+ each: item
+ do:
+ - reserveItem:
+ call: inventoryManager
+ with:
+ productId: ${ .item.productId }
+ quantity: ${ .item.quantity }
+ operation: "reserve"
+
+ - prepareOrderResponse:
+ set:
+ result:
+ orderId: ${ .input.order.id }
+ status: "PROCESSING"
+ paymentStatus: ${ .processingResults.handlePayment.paymentResult.status }
+ inventoryStatus: ${ .processingResults.reserveInventory.inventoryReservations }
+ estimatedShipDate: ${ new Date(Date.now() + 86400000 * 2).toISOString() }
+```
+
+Flow tasks are the building blocks of workflow orchestration,
+enabling you to create maintainable workflows that reflect
+your business processes while separating orchestration logic from the implementation of business logic.
\ No newline at end of file
diff --git a/src/content/docs/docs/control-flow/switch.mdx b/src/content/docs/docs/control-flow/switch.mdx
new file mode 100644
index 0000000..435a0f9
--- /dev/null
+++ b/src/content/docs/docs/control-flow/switch.mdx
@@ -0,0 +1,116 @@
+---
+title: Switch
+sidebar:
+ order: 30
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+## Purpose
+
+The `Switch` task provides conditional branching based on evaluating a series of conditions. It allows the workflow to
+dynamically select one execution path from multiple alternatives.
+
+It evaluates conditions (defined by `when` expressions) sequentially. The `then` directive associated with the *first*
+condition that evaluates to `true` is executed, determining the next step in the workflow. If no conditions evaluate to
+`true`, a default path (the case without a `when` or the `Switch` task's own `then` directive) is taken.
+
+It's primarily used for:
+
+* Implementing decision logic (if-elseif-else patterns).
+* Routing workflow execution based on input data or context state.
+* Selecting different processing paths for different types of data.
+
+## Basic Usage
+
+Here's an example of routing based on an input status field:
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: switch-basic
+ version: '1.0.0'
+do:
+ - decideNextStep:
+ # Assume input is { "status": "Approved" } or { "status": "Rejected" } etc.
+ switch:
+ - caseApproved:
+ when: "${ .status == \"Approved\" }"
+ then: processApproved # Go to the 'processApproved' task
+ - caseRejected:
+ when: "${ .status == \"Rejected\" }"
+ then: processRejected # Go to the 'processRejected' task
+ - caseDefault: # Default case (no 'when')
+ then: handleOtherStatus # Go to 'handleOtherStatus' if neither matched
+ - processApproved:
+ # ... task definition ...
+ then: continue # Or specify next step
+ - processRejected:
+ # ... task definition ...
+ then: continue
+ - handleOtherStatus:
+ # ... task definition ...
+ then: continue
+```
+
+In this example, the `decideNextStep` task inspects the `.status` field of its input. If it's "Approved", execution
+jumps to `processApproved`. If it's "Rejected", it jumps to `processRejected`. Otherwise, the default case triggers,
+jumping to `handleOtherStatus`.
+
+## Configuration Options
+
+### `switch` (List\, Required)
+
+This mandatory property contains a list of cases, each representing a potential branch or case.
+
+The runtime evaluates the items in the order they appear in the list.
+
+Each case contains:
+
+* **A unique name** for the case (e.g., `caseApproved`).
+* **An object** containing:
+ * **`when`** (String, Optional): A [Runtime Expression](/docs/core-concepts/runtime-expressions/). If present, this expression is
+ evaluated against the *transformed input* of the `Switch` task. If it evaluates to `true`, this case is selected,
+ and its `then` directive is followed. If omitted, this case acts as the default branch (executed only if no
+ preceding `when` condition was met).
+ * **`then`** (String, Required): Defines the next step if this case is selected. It follows the
+ standard [Flow Control](/docs/core-concepts/flow-control/) rules (`continue`, `exit`, `end`, or a task name).
+
+```yaml
+do:
+ - switch:
+ - checkHighPriority:
+ when: "${ .priority > 5 }"
+ then: handleHighPriority
+ - checkMediumPriority:
+ when: "${ .priority > 2 }" # Only checked if priority <= 5
+ then: handleMediumPriority
+ - defaultCase: # No 'when', acts as default
+ then: handleLowPriority
+ - handleHighPriority:
+ # ... task definition ...
+ then: exit
+ - handleMediumPriority:
+ # ... task definition ...
+ then: exit
+ - handleLowPriority:
+ # ... task definition ...
+ then: exit
+```
+
+**Important**: Only the *first* `when` condition that evaluates to `true` is selected. Subsequent cases are ignored. If
+a default case (no `when`) exists, it should typically be placed last.
+Targeted tasks typically have a `then: exit` to prevent further execution.
+
+## Data Flow
+
+
+**Note**: The `rawOutput` of the `Switch` task (feeding into its `output.as`/`export.as`) is its own `transformedInput`. The `Switch` itself doesn't modify the data; it only directs the flow.
+
+## Flow Control
+
+
+**Note**: The `Switch` task has unique flow control. Instead of using its own `then` property, flow continues based on the `then` property of the *matched* `case` within the `switch` block. If no case matches, flow follows the `Switch` task's own `then` property.
+
diff --git a/src/content/docs/docs/core-concepts/data-flow-management.mdx b/src/content/docs/docs/core-concepts/data-flow-management.mdx
new file mode 100644
index 0000000..2a014a7
--- /dev/null
+++ b/src/content/docs/docs/core-concepts/data-flow-management.mdx
@@ -0,0 +1,304 @@
+---
+title: Data Flow Management
+sidebar:
+ order: 30
+---
+
+## Purpose
+
+Effective data flow management is crucial for building robust and maintainable workflows. The Serverless Workflow DSL
+provides specific constructs (`input`, `output`, `export`) at both the workflow and individual task levels to control
+how data is validated, transformed, and passed between steps and into the workflow's shared context.
+
+This allows you to:
+
+* Ensure tasks receive only the data they need in the correct format
+* Validate data structures at key points to prevent errors
+* Shape the final output of tasks and the entire workflow
+* Maintain a shared state (`$context`) across tasks in a controlled manner
+
+## Key Concepts and Keywords
+
+Data flows through a sequence of validation and transformation steps:
+
+1. **Workflow Input Processing**:
+ * `workflow.input.schema`: Validates the initial `raw input` provided when the workflow starts
+ * `workflow.input.from`: Transforms the `raw input` of the workflow.
+ The result (workflow's `transformed input`) becomes the initial `raw input` of the *first* task
+2. **Task Input Processing** (for each task):
+ * `task.input.schema`: Validates the task's `raw input`
+ (which is either the `transformed output` of the previous task or the workflow's `transformed input` for the first task)
+ * `task.input.from`: Transforms the task's `raw input`.
+ The result (task's `transformed input`) is available as `$input` within the task's execution scope
+ and is used for evaluating expressions within the task definition
+3. **Task Execution**: The task performs its action (e.g., calls an API, runs a script, sets data).
+ The result of this action is the task's `raw output`
+4. **Task Output Processing** (for each task):
+ * `task.output.as`: Transforms the `raw output` of the task.
+ The result (task's `transformed output`) is available as `$output` in subsequent steps and
+ becomes the `raw input` for the *next* task (or the workflow's `raw output` if it's the last task)
+ * `task.output.schema`: Validates the task's `transformed output`
+5. **Task Context Export** (for each task):
+ * `task.export.as`: Transforms the task's `transformed output` to update the shared
+ context (`$context`) of the workflow
+ * `task.export.schema`: Validates the data produced by `export.as` *before* it updates the current
+ `$context`
+6. **Workflow Output Processing**:
+ * `workflow.output.as`: Transforms the workflow's `raw output` (the `transformed output` of the *last* task executed).
+ This defines the workflow's `transformed output`, which becomes the final result returned by the workflow execution
+ * `workflow.output.schema`: Validates the final `transformed output` of the workflow
+
+## Workflow Level Data Handling
+
+These properties are defined at the top level of the workflow document.
+
+### `input` (Object, Optional)
+
+Controls processing of the initial data the workflow receives.
+
+* **`from`** (String | Object | Array | ..., Optional): A [Runtime Expression](/docs/core-concepts/runtime-expressions/) or literal
+ value defining how to transform the raw workflow input. The result initializes `$context` and is passed as raw input
+ to the first task. Defaults to identity (`${. }`)
+* **`schema`** (Schema Definition, Optional): A [JSON Schema](https://json-schema.org/) used to validate the *raw*
+ workflow input *before* `input.from` is applied. If validation fails, the workflow faults immediately
+
+```yaml
+document:
+ dsl: '1.0.0'
+ # ... workflow metadata ...
+input:
+ schema:
+ type: object
+ required: ["user", "payload"]
+ properties:
+ user:
+ type: object
+ properties:
+ id:
+ type: string
+ payload:
+ type: object
+ from: "${ { userId: .user.id, orderDetails: .payload } }" # Select and restructure
+do:
+ - firstTask: # Receives { userId: ..., orderDetails: ... } as raw input
+ # ...
+```
+
+### `output` (Object, Optional)
+
+Controls processing of the final data returned by the workflow.
+
+* **`as`** (String | Object | Array | ..., Optional): A [Runtime Expression](/docs/core-concepts/runtime-expressions/) or literal
+ value defining how to transform the *transformed output* of the *last* task. Defaults to identity (`${. }`)
+* **`schema`** (Schema Definition, Optional): A [JSON Schema](https://json-schema.org/) used to validate the *final
+ transformed workflow output* (after `output.as` is applied). If validation fails, the workflow faults
+
+```yaml
+document:
+# ... workflow definition ...
+do:
+ - # ... tasks ...
+ - lastTask:
+ set:
+ confirmation: "ABC-123"
+ internalStatus: "Complete"
+output:
+ # Only return the confirmation field from the last task's output
+ as: "${ { confirmationId: .confirmation } }"
+ schema:
+ type: object
+ required: ["confirmationId"]
+ properties:
+ confirmationId:
+ type: string
+```
+
+## Task Level Data Handling
+
+These properties can be defined within individual task definitions.
+
+### `input` (Object, Optional)
+
+Controls processing of data entering a specific task.
+
+* **`from`** (String | Object | Array | ..., Optional): Transforms the task's *raw input*. The result is available as
+ `$input` within the task's scope. Defaults to identity (`${ . }`)
+* **`schema`** (Schema Definition, Optional): Validates the task's *raw input* *before* `input.from` is applied
+
+```yaml
+- taskA:
+ # Assume previous task output was { "data": { "value": 10 }, "meta": ... }
+ input:
+ schema:
+ type: object
+ required: ["data"]
+ properties:
+ value:
+ type: number
+ from: "${ .data }" # Pass only the 'data' part to this task
+ set:
+ doubled: "${ $input.value * 2 }" # Use the transformed input ($input)
+```
+
+### `output` (Object, Optional)
+
+Controls processing of data produced by a specific task.
+
+* **`as`** (String | Object | Array | ..., Optional): Transforms the task's *raw output* (the direct result of its
+ action, e.g., HTTP response body, script return value). The result becomes the raw input for the next task and is
+ available as `$output` for `export.as`. Defaults to identity (`${. }`)
+* **`schema`** (Schema Definition, Optional): Validates the *transformed output* (after `output.as` is applied)
+
+```yaml
+- callApi:
+ call: http
+ with:
+ # ... call definition ...
+ # Assume API returns { "result": { "payload": ..., "debug": ... }, "status": ... }
+ output:
+ # Select only the payload from the API response
+ as: "${ .result.payload }"
+ schema:
+ # Define expected payload structure
+ type: object
+ properties:
+ # ... payload schema ...
+- nextTask:
+ # Raw input here will be the value of 'result.payload' from the API call
+ # ...
+```
+
+### `export` (Object, Optional)
+
+Controls how the task's results update the shared workflow context (`$context`).
+
+* **`as`** (String | Object | Array | ..., Optional): A [Runtime Expression](/docs/core-concepts/runtime-expressions/) evaluated
+ against the task's *transformed output* (`$output`). The result of this expression **updates** the current value of
+ `$context`. Use `$context + { newField: ... }` (jq syntax) to merge with existing context
+* **`schema`** (Schema Definition, Optional): Validates the data structure produced by `export.as` *before* it updates
+ `$context`
+
+```yaml
+- updateUser:
+ call: http # Assume API call returns user ID { "userId": "user-xyz" }
+ # ...
+ export:
+ # Add/update the lastUserId in the context
+ as: "${ $context + { lastUserId: $output.userId } }"
+ schema:
+ type: object
+ required: ["lastUserId"]
+ properties:
+ lastUserId:
+ type: string
+- subsequentTask:
+ # Can now access $context.lastUserId
+ set:
+ message: "Processed user: ${ $context.lastUserId }"
+```
+
+## Visualization
+
+The following diagram illustrates the flow of data through validation and transformation stages for both workflow and
+tasks:
+
+```mermaid
+flowchart TD
+
+ subgraph Legend
+ legend_data{{Data}}
+ legend_schema[\Schema/]
+ legend_transformation[Transformation]
+ legend_arg([Runtime Argument])
+ end
+
+ context_arg([$context])
+ input_arg([$input])
+ output_arg([$output])
+
+ workflow_raw_input{{Raw Workflow Input}}
+ workflow_input_schema[\Workflow: input.schema/]
+ workflow_input_from[Workflow: input.from]
+ workflow_transformed_input{{Transformed Workflow Input}}
+
+ task_raw_input{{Raw Task Input}}
+ task_if[Task: if]
+ task_input_schema[\Task: input.schema/]
+ task_input_from[Task: input.from]
+ task_transformed_input{{Transformed Task Input}}
+ task_definition[Task definition/execution]
+ task_raw_output{{Raw Task output}}
+ task_output_as[Task: output.as]
+ task_transformed_output{{Transformed Task output}}
+ task_output_schema[\Task: output.schema/]
+ task_export_as[Task: export.as]
+ task_export_schema[\Task: export.schema/]
+
+ new_context{{New $context value}}
+
+ workflow_raw_output["Raw Workflow Output (from last task)"]
+ workflow_output_as[Workflow: output.as]
+ workflow_transformed_output{{Transformed Workflow Output}}
+ workflow_output_schema[\Workflow: output.schema/]
+ final_output{{Final Workflow Result}}
+
+ workflow_raw_input -- Validate --> workflow_input_schema
+ workflow_input_schema -- Transform --> workflow_input_from
+ workflow_input_from -- Produces --> workflow_transformed_input
+ workflow_transformed_input -- Passed as Raw Input to First Task --> task_raw_input
+
+ subgraph Task Execution Cycle
+ task_raw_input -- Validate --> task_input_schema
+ task_input_schema -- Transform --> task_input_from
+ task_input_from -- Produces --> task_transformed_input
+ task_transformed_input -- Used by --> task_if
+ task_if -- if true (default) --> input_arg
+ task_if -- if false --> next_task
+ input_arg -- Used in --> task_definition
+
+ task_definition -- Produces --> task_raw_output
+ task_raw_output -- Transform --> task_output_as
+ task_output_as -- Produces --> task_transformed_output
+ task_output_schema -- Available as --> output_arg
+ task_transformed_output -- Validate --> task_output_schema
+ output_arg -- Used by --> task_export_as
+ task_export_as -- Produces --> new_context
+ new_context -- Validate --> task_export_schema
+ task_export_schema -- Updates --> context_arg
+ task_output_schema -- Passed as Raw Input to Next Task --> next_task
+ next_task --> task_raw_input
+ end
+
+ subgraph next_task [Next Task / Workflow Output Stage]
+ end
+
+ next_task -- Transformed Output becomes --> workflow_raw_output
+ workflow_raw_output -- Transform --> workflow_output_as
+ workflow_output_as -- Produces --> workflow_transformed_output
+ workflow_transformed_output -- Validated by --> workflow_output_schema
+ workflow_output_schema -- Returns --> final_output
+```
+
+## Potential Errors
+
+Several types of errors can occur during data flow processing:
+
+1. **Schema Validation Errors (`ValidationError`)**:
+ * **When**: Occurs if data fails validation against any schema (`input.schema`, `output.schema`, or `export.schema`)
+ at either workflow or task level
+ * **Type**: `https://serverlessworkflow.io/spec/1.0.0/errors/validation`
+ * **Status**: `400` (Bad Request)
+ * **Effect**: The workflow faults immediately unless handled by a `Try` task
+
+2. **Expression Evaluation Errors (`ExpressionError`)**:
+ * **When**: Occurs if a runtime expression within `input.from`, `output.as`, or `export.as` fails to evaluate.
+ Common causes include:
+ - Syntax errors in the expression
+ - References to missing data
+ - Type mismatches during evaluation
+ * **Type**: `https://serverlessworkflow.io/spec/1.0.0/errors/expression`
+ * **Status**: `400` (Bad Request)
+ * **Effect**: The workflow faults immediately unless handled by a `Try` task
+
+These errors are critical and will cause the workflow execution to halt in a `faulted` state unless properly handled
+using error handling mechanisms like `Try` tasks.
diff --git a/src/content/docs/docs/core-concepts/error-handling.mdx b/src/content/docs/docs/core-concepts/error-handling.mdx
new file mode 100644
index 0000000..fccaca7
--- /dev/null
+++ b/src/content/docs/docs/core-concepts/error-handling.mdx
@@ -0,0 +1,156 @@
+---
+title: Error Handling
+sidebar:
+ order: 50
+---
+{/* Examples are validated */}
+
+## Purpose
+
+Failures and exceptional conditions are inevitable in any complex system. The Serverless Workflow DSL provides robust
+mechanisms to define, raise, identify, and handle errors gracefully.
+
+This ensures workflows can:
+
+* Recover from anticipated issues
+* Perform compensation logic
+* Implement retries for transient problems
+* Avoid abrupt termination for manageable faults
+
+## Error Definition (Problem Details)
+
+Errors in Serverless Workflow are structured based on
+the [RFC 7807 Problem Details for HTTP APIs](https://datatracker.ietf.org/doc/html/rfc7807). This provides a
+standardized way to communicate error information.
+
+The core fields of a workflow error object are:
+
+* **`type`** (String, Required): A URI reference that identifies the specific type of error. The specification defines
+ several [Standard Error Types](#standard-error-types)
+* **`status`** (Integer, Required): The status code associated with this occurrence of the error (often analogous to
+ HTTP status codes)
+* **`title`** (String, Optional): A short, human-readable summary of the error
+* **`detail`** (String, Optional): A human-readable explanation specific to this occurrence of the error
+* **`instance`** (String, Required): A URI reference that identifies the specific occurrence of the problem. In
+ Serverless Workflow, this is typically a [JSON Pointer](https://datatracker.ietf.org/doc/html/rfc6901) indicating the
+ specific task or component within the workflow definition where the error originated (e.g., `/do/1/callApi`)
+
+See `com.lemline.core.errors.WorkflowError.kt` for the corresponding data class.
+
+## Standard Error Types
+
+The specification defines standard error `type` URIs for common issues. Runtimes **must** use these types when
+applicable to ensure consistent behavior. Authors should use these when defining custom errors for similar conditions.
+
+Base URI: `https://serverlessworkflow.io/spec/1.0.0/errors/`
+
+| Type Suffix | Default Status | Description | Implementation (`WorkflowErrorType`) |
+|:-----------------|:---------------|:-----------------------------------------------------------------------------|:-------------------------------------|
+| `configuration` | 400 | Problem with the workflow definition itself (e.g., invalid `then` target) | `CONFIGURATION` |
+| `validation` | 400 | Input/output data failed schema validation (`input.schema`, `output.schema`) | `VALIDATION` |
+| `expression` | 400 | Evaluation of a runtime expression failed (e.g., syntax error, bad access) | `EXPRESSION` |
+| `authentication` | 401 | Authentication failed when accessing a protected resource | `AUTHENTICATION` |
+| `authorization` | 403 | Insufficient permissions to access a resource (e.g., secrets) | `AUTHORIZATION` |
+| `timeout` | 408 | A configured workflow or task timeout was exceeded | `TIMEOUT` |
+| `communication` | 500 | Error during communication with an external service (e.g., HTTP call fail) | `COMMUNICATION` |
+| `runtime` | 500 | General runtime error not covered by other types | `RUNTIME` |
+
+## Raising Errors
+
+Errors can enter the system in two main ways:
+
+1. **Implicitly by the Runtime**: The workflow engine raises standard errors automatically when certain conditions
+ occur:
+ * Expression evaluation fails (`expression` error)
+ * Schema validation fails (`validation` error)
+ * A task or workflow times out (`timeout` error)
+ * A `call` task fails due to network issues (`communication` error)
+ * Authentication/Authorization issues arise (`authentication`/`authorization` errors)
+ * Workflow definition issues are detected (`configuration` error)
+
+2. **Explicitly using the `Raise` Task**: Authors can use the [`Raise` task](/docs/error-handling/raise/) to explicitly throw a
+ specific error (either predefined in `use.errors` or defined inline) based on workflow logic:
+ ```yaml
+ - checkInventory:
+ # ... check logic ...
+ if: "${ .stock < .needed }"
+ raise:
+ error: outOfStockError # Defined in use.errors
+ with:
+ detail: "Needed ${ .needed }, only ${ .stock } available"
+ ```
+
+## Catching and Handling Errors (`Try...Catch`)
+
+The primary mechanism for handling errors is the [`Try` task](/docs/error-handling/try/). A `Try` task consists of:
+
+1. A `try` block containing the tasks that might raise errors
+2. A `catch` block defining how to handle those errors
+
+```yaml
+- myTryTask:
+ try:
+ # Block of tasks to attempt
+ - taskToTry:
+ # ... might raise an error ...
+ catch:
+ # Block defining how to handle errors from 'try'
+ errors:
+ with:
+ type: ".../specificErrorType" # 1. Filter which errors to catch
+ when: "${ $error.instance == '/do/0/taskToTry' }" # 2. Conditional catch
+ as: myError # 3. Store error object
+ retry: myRetryPolicy # 4. Optionally retry
+ do: # 5. Execute compensation logic
+ - logError:
+ set:
+ errorMessage: "${ $myError.detail }"
+ - setFallback:
+ set:
+ status: "error"
+ message: "Operation failed: ${ $myError.detail }"
+```
+
+### Error Handling Components
+
+1. **Filtering (`catch.errors.with`)**:
+ * Specify criteria (`type`, `status`, etc.) to only catch specific errors
+ * If omitted, all errors are candidates for catching (subject to `when`/`exceptWhen`)
+
+2. **Conditional Catch (`catch.when`, `catch.exceptWhen`)**:
+ * Further refine catching logic using runtime expressions
+ * Expressions are evaluated against the error object (available via `catch.as` variable)
+
+3. **Error Access (`catch.as`)**:
+ * The caught error object is made available within the `catch` block scope
+ * Used in `when`, `exceptWhen`, `retry`, and `do` blocks
+ * Default variable name is `$error` if not specified
+
+4. **Retries (`catch.retry`)**:
+ * If an error is caught, a retry policy can be defined
+ * The retry policy determines how many times to retry and with what delay
+ * The *entire* `try` block is re-executed after the delay
+ * See [`Try` task documentation](/docs/error-handling/try/) for retry policy details
+
+5. **Compensation (`catch.do`)**:
+ * Executed if an error is caught and not retried (or retries are exhausted)
+ * Allows for cleanup, logging, or setting fallback values
+ * The output of this block becomes the output of the `Try` task
+
+## Error Propagation
+
+Error propagation follows these rules:
+
+1. An error propagates upwards if:
+ * It occurs outside any `Try` task, OR
+ * It occurs within a `try` block but is *not* matched by the corresponding `catch` filters, OR
+ * It is caught but retries are exhausted and there is no `catch.do` block
+
+2. The runtime searches the parent scope for another enclosing `Try` task:
+ * If found, the error is handled by the outer `Try` task's logic
+ * If not found, the error continues propagating
+
+3. If the error propagates to the top level without being caught:
+ * The entire workflow execution **faults**
+ * The workflow terminates unsuccessfully
+ * The uncaught error is reported in the workflow result
\ No newline at end of file
diff --git a/src/content/docs/docs/core-concepts/flow-control.mdx b/src/content/docs/docs/core-concepts/flow-control.mdx
new file mode 100644
index 0000000..d4a30f4
--- /dev/null
+++ b/src/content/docs/docs/core-concepts/flow-control.mdx
@@ -0,0 +1,204 @@
+---
+title: Flow Control and Directives
+sidebar:
+ order: 20
+---
+
+{/* Examples are validated */}
+
+## Purpose
+
+Flow control determines the order in which tasks are executed within a Serverless Workflow. While the default behavior
+is simple sequential execution, the DSL provides explicit mechanisms, primarily through the `then` directive, to create
+more complex and conditional execution paths.
+
+## Default Flow: Sequential Execution
+
+By default, if no explicit flow control is specified, tasks within a sequence (like the top-level `do` block or the
+block within a `Do` task) execute in the order they are declared in the YAML file.
+
+```yaml
+document:
+ dsl: '1.0.0'
+ # ...
+do:
+ - validateOrder:
+ # ...
+
+ - checkInventory:
+ # ...
+
+ - processPayment:
+ # ...
+```
+
+In this example, `validateOrder`, `checkInventory`, and `processPayment` run one after the other.
+
+## Conditional Execution: The `if` Property
+
+All tasks support an optional `if` property, which allows for conditional execution based on the current state.
+
+* **Type**: `string` (Runtime Expression)
+* **Required**: No
+
+The value of the `if` property is a [Runtime Expression](/docs/core-concepts/runtime-expressions/) that **must** evaluate to a
+boolean (`true` or `false`).
+
+* **Evaluation**: The expression is evaluated *before* the main logic of the task begins, using the task's *transformed
+ input* and the current context.
+* **Effect**:
+ * If the expression evaluates to `true` (or if the `if` property is omitted), the task executes normally.
+ * If the expression evaluates to `false`, the entire task is skipped and the workflow continues as this task does
+ not exist (e.g. the workflow *does not follow* that skipped task's `then` directive to determine the next step).
+
+```yaml
+document:
+ dsl: '1.0.0'
+ # ...
+do:
+ - checkInput:
+ set:
+ # Assume input might be { "process": true } or { "process": false }
+ message: "Checking input..."
+ - conditionalTask:
+ if: "${ .process }" # Only run if input field 'process' is true
+ call: doSomethingImportant
+ # 'then' would be not followed if 'if' was false
+ then: nextStep
+ - taskAfterConditional: # This task is skipped if conditionalTask runs
+ call: log
+ with:
+ message: "Conditional task was skipped."
+ then: nextStep
+ - nextStep:
+ # ... execution continues here ...
+```
+
+## Continuation: The `then` Directive
+
+The `then` property can be added to most task definitions to explicitly control which task executes next after the
+current task completes successfully.
+
+* **Type**: `string` (Task Name) | (`continue`, `exit`, `end`)
+* **Required**: No
+
+### Targeting a Specific Task
+
+You can provide the **name** of another task within the *same scope* (i.e., at the same level in the `do` list) as the
+value for `then`. This creates a jump or branch in the execution path.
+
+```yaml
+do:
+ - start:
+ set: { value: 1 }
+ then: processValue # Jump to processValue after start
+ - skippedTask:
+ # This task is skipped because 'start' jumps over it
+ set: { value: 2 }
+ - processValue:
+ set: { processed: "${ .value * 10 }" }
+ # Default: continues sequentially to 'finish'
+ - finish:
+ call: log
+ with: { result: "${ .processed }" }
+```
+
+A critical rule is that a `then` directive specifying a task name **can only target tasks declared within the same
+scope (level)**. You cannot use `then` to jump *into* or *out of* nested `Do` blocks or other flow constructs directly
+by name.
+
+```yaml
+do:
+ - outerTask1:
+ do: # Inner Scope 1
+ - innerTaskA:
+ # ...
+ # then: outerTask2 # INVALID: Cannot target task outside inner scope
+ - innerTaskB:
+ # ...
+ then: exit # VALID: Exits Inner Scope 1
+ - outerTask2:
+ # ...
+```
+
+To achieve jumps across scopes, you typically use `exit` to return control to the parent scope, which can then direct
+the flow using its own sequence or `then` directives.
+
+### Using Flow Directives
+
+Instead of a task name, `then` can use specific keywords (Flow Directives) to control the flow in predefined ways:
+
+1. **`continue`** (Default):
+ * **Meaning**: Explicitly specifies the default behavior - proceed to the next task in the declaration order.
+ * **Usage**: Rarely needed unless overriding a default behavior in a specific context or for clarity.
+ ```yaml
+ - taskX:
+ # ...
+ then: continue # Explicitly go to taskY next
+ - taskY:
+ # ...
+ ```
+ * **Data Flow**: as usual, the `raw input` of the next task is set to the `transformed output` of the task that
+ called
+ `continue`.
+
+2. **`exit`**:
+ * **Meaning**: Stops processing the *current* sequence of tasks (e.g., the current `Do` block or `For` loop
+ iteration) and transfers control back to the parent flow construct. The parent then determines its next step (
+ often based on its *own* `then` directive or sequence).
+ * **Usage**: Useful for early termination of a sub-flow or loop based on a condition met within it.
+ ```yaml
+ - parentTask:
+ do:
+ - checkCondition:
+ if: "${ .status == 'done' }"
+ then: exit # Exit this inner 'do' block
+ - processNormally: # Skipped if condition was 'done'
+ # ...
+ - taskAfterExit: # Skipped if condition was 'done'
+ # ...
+ then: finalStep # Parent continues here if inner block exited or completed normally
+ - finalStep:
+ # ...
+ ```
+ * **Data Flow**: The `raw output` of the parent task is set to the `transformed output` of the task that called
+ `exit`.
+
+3. **`end`**:
+ * **Meaning**: Gracefully terminates the execution of the *entire* workflow instance immediately.
+ * **Usage**: Used for scenarios where processing should stop completely based on a certain condition or outcome,
+ without executing any further tasks.
+ ```yaml
+ - criticalCheck:
+ call: getSystemStatus
+ - decideTermination:
+ if: "${ .status == 'FATAL' }"
+ then: end # Stop the whole workflow
+ - continueNormalProcessing: # Skipped if status was FATAL
+ # ...
+ ```
+ * **Data Flow**: the `raw output` of the workflow instance is set to the `transformed output` of the task that
+ called
+ `end`.
+
+## Task-Specific Flow Control
+
+Some tasks inherently control the flow:
+
+* **`Switch`**: Evaluates `when` conditions and uses the `then` directive of the *first matching case* to determine the
+ next task.
+
+* **`Try`**: If an error is caught and handled by a `catch.do` block, the flow continues after the `Try` task based on
+ the `Try` task's own `then` directive. If an error is caught and retried, the flow loops back to the beginning of the
+ `try` block. If an uncaught error occurs, the flow is interrupted.
+
+* **`Raise`**: Immediately interrupts the normal flow and transfers control to the error handling mechanism (searching
+ for a `Try`). Its `then` directive is ignored.
+
+## Potential Errors
+
+* **Target Not Found**: If `then` specifies a task name that does not exist within the same scope, the workflow will
+ raise a `Configuration` error (e.g., `https://serverlessworkflow.io/spec/1.0.0/errors/configuration`).
+
+* **Invalid Directive**: Using an unknown string (that isn't a valid task name or `continue`/`exit`/`end`) in the `then`
+ property will also cause the workflow to raise a `Configuration` error.
diff --git a/src/content/docs/docs/core-concepts/lifecycle-events.mdx b/src/content/docs/docs/core-concepts/lifecycle-events.mdx
new file mode 100644
index 0000000..fcd994c
--- /dev/null
+++ b/src/content/docs/docs/core-concepts/lifecycle-events.mdx
@@ -0,0 +1,354 @@
+---
+title: Lifecycle Events
+sidebar:
+ order: 60
+---
+
+# Lifecycle Events
+
+## Introduction
+
+Lifecycle events are standardized [CloudEvents](https://cloudevents.io/) that provide visibility into the state changes
+of workflows and tasks throughout their execution. These events offer valuable insights for monitoring, auditing, and
+building reactive systems that respond to workflow states.
+
+> **Lifecycle Events vs. Regular Events**
+>
+> Lifecycle events are fundamentally different from the regular events used in workflow tasks `Emit` and `Listen`:
+>
+> - **Lifecycle Events** are automatically emitted by the runtime to track the state of workflows and tasks.
+ They are meant for monitoring and observability purposes. Do not try to emit lifecycle events using `Emit` tasks.
+> - **Regular Events** are explicitly used in your workflow using `Emit` and `Listen` tasks, for workflow control flow
+ and inter-workflow communication.
+>
+
+These lifecycle events follow the [CloudEvents specification](https://github.com/cloudevents/spec), promoting
+interoperability
+with other systems and standardized event handling.
+
+## Event Publishing
+
+Runtimes implementing the Serverless Workflow specification are expected to publish these events upon state changes.
+While the recommended approach is using
+the [HTTP protocol binding](https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/bindings/http-protocol-binding.md)
+with [structured content mode](https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/bindings/http-protocol-binding.md#32-structured-content-mode),
+other transports adhering to the CloudEvents specification may be used.
+
+## Workflow Lifecycle Events
+
+Workflow lifecycle events track the state of workflow instances as they progress from creation to completion. Each event
+carries consistent information including workflow identity, status transitions, timestamps, and relevant metadata.
+
+| Event Type | Description | Required |
+|-----------------------------------------------------------|--------------------------------------------------------|:--------:|
+| `io.serverlessworkflow.workflow.started.v1` | Emitted when a workflow instance starts execution | Yes |
+| `io.serverlessworkflow.workflow.suspended.v1` | Emitted when a workflow execution is suspended | Yes |
+| `io.serverlessworkflow.workflow.resumed.v1` | Emitted when a suspended workflow resumes execution | Yes |
+| `io.serverlessworkflow.workflow.correlation-started.v1` | Emitted when a workflow begins correlating events | Yes |
+| `io.serverlessworkflow.workflow.correlation-completed.v1` | Emitted when a workflow completes an event correlation | Yes |
+| `io.serverlessworkflow.workflow.cancelled.v1` | Emitted when a workflow execution is cancelled | Yes |
+| `io.serverlessworkflow.workflow.faulted.v1` | Emitted when a workflow encounters an unhandled error | Yes |
+| `io.serverlessworkflow.workflow.completed.v1` | Emitted when a workflow successfully completes | Yes |
+| `io.serverlessworkflow.workflow.status-changed.v1` | Emitted when a workflow's status phase changes | No |
+
+### Workflow Started Event
+
+The `io.serverlessworkflow.workflow.started.v1` event is emitted when a workflow instance begins execution. It typically
+includes:
+
+- **Workflow identity** (ID, namespace, name, version)
+- **Timestamp** when the workflow started
+- **Input data** (optional, may be excluded for privacy/security)
+- **Parent workflow** information (if started by another workflow)
+- **Correlation IDs** (if applicable)
+
+Example event data:
+
+```json
+{
+ "workflowId": "order-processing-12345",
+ "namespace": "com.example.orders",
+ "name": "process-order",
+ "version": "1.2.0",
+ "startedAt": "2023-11-15T14:30:00Z",
+ "parentWorkflowId": "customer-onboarding-789",
+ "input": {
+ "orderId": "ORD-12345",
+ "customerId": "CUST-6789"
+ }
+}
+```
+
+### Workflow Completed Event
+
+The `io.serverlessworkflow.workflow.completed.v1` event is emitted when a workflow successfully completes all its tasks.
+It typically includes:
+
+- **Workflow identity** (ID, namespace, name, version)
+- **Timestamps** for both start and completion
+- **Output data** (optional, may be excluded for privacy/security)
+- **Execution duration**
+- **Metrics** about the workflow execution (optional)
+
+Example event data:
+
+```json
+{
+ "workflowId": "order-processing-12345",
+ "namespace": "com.example.orders",
+ "name": "process-order",
+ "version": "1.2.0",
+ "startedAt": "2023-11-15T14:30:00Z",
+ "completedAt": "2023-11-15T14:35:27Z",
+ "durationMs": 327000,
+ "output": {
+ "orderId": "ORD-12345",
+ "status": "COMPLETED",
+ "paymentId": "PAY-87654"
+ },
+ "metrics": {
+ "tasksExecuted": 5,
+ "retries": 1
+ }
+}
+```
+
+### Workflow Correlation Events
+
+The correlation events provide visibility into how workflows interact with external events:
+
+- **Correlation Started**: Emitted when a workflow begins waiting for specific events with correlation criteria.
+- **Correlation Completed**: Emitted when a workflow receives all required correlated events.
+
+The Correlation Completed event is particularly useful as it includes `correlationKeys` that show which specific data
+values were matched.
+
+Example correlation completed event data:
+
+```json
+{
+ "workflowId": "payment-processing-67890",
+ "namespace": "com.example.payments",
+ "name": "process-payment",
+ "version": "1.0.0",
+ "correlationKeys": {
+ "matchOrderId": "ORD-12345",
+ "matchTransactionType": "PURCHASE"
+ },
+ "receivedAt": "2023-11-15T14:32:15Z"
+}
+```
+
+## Task Lifecycle Events
+
+Task lifecycle events provide detailed information about the execution of individual tasks within a workflow. These
+events help in monitoring task progress, identifying bottlenecks, and troubleshooting issues.
+
+| Event Type | Description | Required |
+|------------------------------------------------|----------------------------------------------|:--------:|
+| `io.serverlessworkflow.task.created.v1` | Emitted when a task is created | Yes |
+| `io.serverlessworkflow.task.started.v1` | Emitted when a task begins execution | Yes |
+| `io.serverlessworkflow.task.suspended.v1` | Emitted when a task is suspended | Yes |
+| `io.serverlessworkflow.task.resumed.v1` | Emitted when a suspended task resumes | Yes |
+| `io.serverlessworkflow.task.retried.v1` | Emitted when a task is retried after failure | Yes |
+| `io.serverlessworkflow.task.cancelled.v1` | Emitted when a task is cancelled | Yes |
+| `io.serverlessworkflow.task.faulted.v1` | Emitted when a task encounters an error | Yes |
+| `io.serverlessworkflow.task.completed.v1` | Emitted when a task successfully completes | Yes |
+| `io.serverlessworkflow.task.status-changed.v1` | Emitted when a task's status phase changes | No |
+
+### Task Started Event
+
+The `io.serverlessworkflow.task.started.v1` event is emitted when a task begins execution. It typically includes:
+
+- **Task identity** (reference, type)
+- **Workflow identity** (ID, namespace, name, version)
+- **Timestamp** when the task started
+- **Input data** (optional, may be excluded for privacy/security)
+
+Example event data:
+
+```json
+{
+ "taskReference": "validateOrder",
+ "taskType": "function",
+ "workflowId": "order-processing-12345",
+ "namespace": "com.example.orders",
+ "name": "process-order",
+ "version": "1.2.0",
+ "startedAt": "2023-11-15T14:30:05Z",
+ "input": {
+ "order": {
+ "id": "ORD-12345",
+ "items": [
+ {
+ "id": "ITEM-1",
+ "quantity": 2
+ },
+ {
+ "id": "ITEM-2",
+ "quantity": 1
+ }
+ ]
+ }
+ }
+}
+```
+
+### Task Completed Event
+
+The `io.serverlessworkflow.task.completed.v1` event is emitted when a task successfully completes. It typically
+includes:
+
+- **Task identity** (reference, type)
+- **Workflow identity** (ID, namespace, name, version)
+- **Timestamps** for both start and completion
+- **Output data** (optional, may be excluded for privacy/security)
+- **Execution duration**
+
+Example event data:
+
+```json
+{
+ "taskReference": "validateOrder",
+ "taskType": "function",
+ "workflowId": "order-processing-12345",
+ "namespace": "com.example.orders",
+ "name": "process-order",
+ "version": "1.2.0",
+ "startedAt": "2023-11-15T14:30:05Z",
+ "completedAt": "2023-11-15T14:30:07Z",
+ "durationMs": 2000,
+ "output": {
+ "isValid": true,
+ "validatedItems": [
+ {
+ "id": "ITEM-1",
+ "quantity": 2
+ },
+ {
+ "id": "ITEM-2",
+ "quantity": 1
+ }
+ ]
+ }
+}
+```
+
+### Task Faulted Event
+
+The `io.serverlessworkflow.task.faulted.v1` event is emitted when a task encounters an error. This event is particularly
+valuable for monitoring and troubleshooting. It typically includes:
+
+- **Task identity** (reference, type)
+- **Workflow identity** (ID, namespace, name, version)
+- **Error details** (type, status, message)
+- **Timestamps** for start and fault occurrence
+- **Input data** that caused the fault (optional)
+
+Example event data:
+
+```json
+{
+ "taskReference": "processPayment",
+ "taskType": "function",
+ "workflowId": "order-processing-12345",
+ "namespace": "com.example.orders",
+ "name": "process-order",
+ "version": "1.2.0",
+ "startedAt": "2023-11-15T14:32:10Z",
+ "faultedAt": "2023-11-15T14:32:12Z",
+ "error": {
+ "type": "https://serverlessworkflow.io/spec/1.0.0/errors/validation",
+ "status": 400,
+ "detail": "Invalid payment information: Credit card expired",
+ "instance": "/do/processPayment"
+ }
+}
+```
+
+### Task Retried Event
+
+The `io.serverlessworkflow.task.retried.v1` event is emitted when a task is retried after a failure. It includes:
+
+- **Task identity** (reference, type)
+- **Workflow identity** (ID, namespace, name, version)
+- **Error details** that triggered the retry
+- **Retry information** (attempt number, delay)
+
+Example event data:
+
+```json
+{
+ "taskReference": "sendNotification",
+ "taskType": "function",
+ "workflowId": "order-processing-12345",
+ "namespace": "com.example.orders",
+ "name": "process-order",
+ "version": "1.2.0",
+ "retriedAt": "2023-11-15T14:33:15Z",
+ "attemptNumber": 2,
+ "delayMs": 1000,
+ "previousError": {
+ "type": "https://serverlessworkflow.io/spec/1.0.0/errors/communication",
+ "status": 503,
+ "detail": "Notification service temporarily unavailable"
+ }
+}
+```
+
+## Use Cases for Lifecycle Events
+
+Lifecycle events enable several important use cases:
+
+1. **Workflow Monitoring and Observability**
+ - Real-time dashboards showing workflow execution status
+ - Identifying bottlenecks or slow-running tasks
+ - Tracking workflow completion rates and durations
+
+2. **Auditing and Compliance**
+ - Recording a complete history of workflow and task executions
+ - Tracking who or what initiated workflows
+ - Providing evidence for regulatory compliance requirements
+
+3. **Event-Driven Reactions**
+ - Triggering compensating workflows when a workflow faults
+ - Notifying systems or users when workflows reach certain states
+ - Updating external systems based on workflow progress
+
+4. **Debugging and Troubleshooting**
+ - Investigating the root cause of workflow failures
+ - Tracking the execution path through complex workflow definitions
+ - Identifying patterns in task retries or failures
+
+## Consuming Lifecycle Events
+
+Lifecycle events can be consumed by:
+
+1. **Event Processors** - Systems that subscribe to and process these events can perform a wide range of actions, from
+ generating metrics to initiating other workflows.
+
+2. **Monitoring Tools** - Specialized monitoring tools can provide dashboards and alerts based on lifecycle events.
+
+3. **Audit Repositories** - Events can be stored in audit repositories for historical record-keeping and analysis.
+
+4. **Custom Applications** - Applications can subscribe to specific events to perform business-specific actions.
+
+## Status Changed Events
+
+The `status-changed` events (`io.serverlessworkflow.workflow.status-changed.v1` and
+`io.serverlessworkflow.task.status-changed.v1`) are optional convenience events. They:
+
+- Contain minimal data (primarily just the status transition)
+- Provide a lightweight alternative for consumers who only need to track status changes
+- May be emitted in addition to the more specific lifecycle events
+
+Since these events add additional overhead for runtime implementations and most status changes are already covered by
+the main lifecycle events, their implementation is optional.
+
+## Conclusion
+
+Lifecycle events provide a comprehensive way to gain visibility into workflow and task execution. By leveraging these
+standardized events, developers can build more observable, auditable, and reactive systems that respond intelligently to
+workflow state changes. Runtime implementations adhering to the Serverless Workflow specification provide these events
+as part of their standard behavior, enabling consistent monitoring and integration patterns across different
+environments.
\ No newline at end of file
diff --git a/src/content/docs/docs/core-concepts/overview.mdx b/src/content/docs/docs/core-concepts/overview.mdx
new file mode 100644
index 0000000..b8bdf52
--- /dev/null
+++ b/src/content/docs/docs/core-concepts/overview.mdx
@@ -0,0 +1,25 @@
+---
+title: Core Concepts Overview
+sidebar:
+ order: 0
+ label: Overview
+---
+{/* Examples are validated */}
+
+## Introduction
+
+Before diving into specific workflow tasks, it's essential to understand the fundamental concepts that underpin the Serverless Workflow DSL. These core concepts define how data moves, how dynamic values are evaluated, how execution proceeds, and how errors are managed within a workflow.
+
+Mastering these concepts will enable you to design more effective, robust, and maintainable workflows that follow best practices and industry standards.
+
+## Concepts Covered
+
+This section covers the following critical mechanisms:
+
+* **[Data Flow](/docs/core-concepts/data-flow-management/):** Explains how data is initialized, manipulated, passed between tasks, and ultimately influences the workflow's outcome. Covers the workflow context, task inputs/outputs, and data filtering/transformation.
+* **[Runtime Expressions](/docs/core-concepts/runtime-expressions/):** Details the syntax and usage of expressions evaluated *during* workflow execution. These are crucial for accessing data, making decisions, and dynamically configuring tasks based on the current context.
+* **[Flow Control](/docs/core-concepts/flow-control/):** Describes the mechanisms that dictate the sequence of execution, including conditional branching (`Switch`), looping (`For`), parallel execution (`Fork`), and sequential progression (`Do`, `then`).
+* **[Error Handling](/docs/core-concepts/error-handling/):** Covers how workflows detect, manage, and recover from errors using mechanisms like `Try/Catch` blocks, standard error types, and custom error definitions (`Raise`).
+* **[Lifecycle Events](/docs/core-concepts/lifecycle-events/):** Explains the standardized events emitted during workflow and task execution, providing observability into state changes and enabling monitoring, auditing, and event-driven reactions to workflow status.
+
+Understanding these building blocks provides the foundation necessary to effectively utilize the various [Tasks](/docs/control-flow/overview/) and [Event](/docs/event-tasks/overview/) handling capabilities of the DSL.
\ No newline at end of file
diff --git a/src/content/docs/docs/core-concepts/runtime-expressions.mdx b/src/content/docs/docs/core-concepts/runtime-expressions.mdx
new file mode 100644
index 0000000..55f1c9a
--- /dev/null
+++ b/src/content/docs/docs/core-concepts/runtime-expressions.mdx
@@ -0,0 +1,131 @@
+---
+title: Runtime Expressions
+sidebar:
+ order: 40
+---
+
+## Purpose
+
+Runtime expressions are dynamic elements within the Serverless Workflow DSL that enable flexible and adaptable workflow
+behaviors. They provide a powerful way to:
+
+* Access and manipulate data during workflow execution (e.g., input data, context data, task outputs).
+* Evaluate conditions for tasks like `if`, `while` (in `For` loops), and `when` (in `Switch` and `Try` tasks).
+* Transform data structures using `input.from`, `output.as`, and `export.as`.
+* Dynamically configure task parameters (e.g., construct API request bodies or URIs).
+
+## Syntax and Evaluation
+
+By default, expressions are written using the syntax of the configured expression language. The standard DSL uses JQ.
+
+* **JQ Syntax**: Expressions typically access data using dot notation (e.g., `.fieldName`, `.user.id`).
+* **DSL Embedding**: Within the YAML workflow definition, expressions are usually embedded within strings prefixed with
+ `@`. The runtime automatically identifies and evaluates these.
+ * Example: `message: "${ .user.name } processed item ${ .item.id }"`
+* **Strict vs. Loose Mode (Conceptual)**: While the DSL documentation mentions modes like `strict` (`${...}`) and
+ `loose`, the common practice and examples strongly favor the `@` prefix for automatic evaluation by the runtime. Using
+ `${...}` might be supported by specific runtimes or language extensions but isn't the primary method shown.
+
+## Expression Language
+
+* **Default**: The default and mandatory expression language is **JQ** ([
+ `jq` documentation](https://jqlang.github.io/jq/)). The implementation uses the `net.thisptr.jackson.jq` library (see
+ `com.lemline.core.expressions.JQExpression.kt`).
+* **Configuration**: Runtimes *may* support other languages. If so, the language for a workflow can typically be
+ specified using the top-level `evaluate.language` property in the workflow definition (refer to the
+ [Resources Configuration Overview](/docs/resources-configuration/overview/)).
+
+## Available Arguments
+
+During evaluation, the runtime makes several predefined variables (arguments) available within the expression scope.
+These provide access to the current state of the workflow and task execution.
+
+* **`.` (Dot)**: In JQ, the single dot (`.`) usually represents the **primary input data** being evaluated by the
+ expression. For example:
+ * In `task.input.from`, `.` is the task's *raw input*.
+ * In `task.output.as`, `.` is the task's *raw output*.
+ * In `task.export.as`, `.` is the task's *transformed output*.
+ * In `task.if`, `.` is the task's *transformed input*.
+ * *Avoid confusion*: While `.` represents the immediate data, use the explicit arguments like `$input`, `$output`,
+ `$context` when you need to be certain about which piece of data you are accessing, especially in complex
+ expressions.
+
+* **`$context`**: (Object) The workflow's current shared context data. This is modified by `task.export.as`.
+* **`$input`**: (Any) The task's **transformed input** (i.e., after `task.input.from` has been applied).
+* **`$output`**: (Any) The task's **transformed output** (i.e., after `task.output.as` has been applied).
+* **`$secrets`**: (Object) A key/value map of secrets available to the workflow. **Caution**: Use with care, as
+ embedding secrets directly in expressions or passing them as inputs might expose sensitive data in logs or outputs.
+* **`$authorization`**: (AuthorizationDescriptor) Describes the resolved authorization details if the task uses
+ authentication.
+* **`$task`**: (TaskDescriptor) Describes the current task being executed.
+* **`$workflow`**: (WorkflowDescriptor) Describes the current workflow instance.
+* **`$runtime`**: (RuntimeDescriptor) Describes the runtime environment executing the workflow.
+
+### Argument Structures
+
+These structures define the data available within the descriptor arguments. See `com.lemline.core.expressions.scopes.*`
+classes for implementation details.
+
+#### Runtime Descriptor (`$runtime`)
+
+* `name`: (String) Runtime name (e.g., "lemline").
+* `version`: (String) Runtime version (e.g., "1.0.0-SNAPSHOT").
+* `metadata`: (Object) Implementation-specific key-value pairs.
+
+#### Workflow Descriptor (`$workflow`)
+
+* `id`: (String) Unique ID of the current workflow execution.
+* `definition`: (Object) The parsed workflow definition.
+* `input`: (Any) The *raw* workflow input (before `workflow.input.from`).
+* `startedAt`: (DateTimeDescriptor) Workflow start time.
+
+#### Task Descriptor (`$task`)
+
+* `name`: (String) The task's name (e.g., "processData").
+* `reference`: (String) The task's unique path identifier (e.g., "/do/1/myTask").
+* `definition`: (Object) The parsed task definition.
+* `input`: (Any) The task's *raw* input (before `task.input.from`).
+* `output`: (Any) The task's *raw* output (before `task.output.as`). Note: This is only available *after* the task
+ executes, primarily for `output.as` and `export.as` expressions.
+* `startedAt`: (DateTimeDescriptor) Task start time.
+
+#### Authorization Descriptor (`$authorization`)
+
+* `scheme`: (String) Resolved scheme (e.g., "Bearer").
+* `parameter`: (String) Resolved parameter (e.g., the token).
+
+#### DateTime Descriptor (used within `$workflow.startedAt`, `$task.startedAt`)
+
+* `iso8601`: (String) ISO 8601 formatted date-time string (e.g., "2023-10-27T10:00:00Z").
+* `epoch.seconds`: (Integer) Seconds since Unix epoch.
+* `epoch.milliseconds`: (Integer) Milliseconds since Unix epoch.
+
+### Argument Availability
+
+The following table shows which arguments are available when evaluating different DSL properties:
+
+| Runtime Expression | Evaluated On Data (`.`) | Produces | `$context` | `$input` | `$output` | `$secrets` | `$task` | `$workflow` | `$runtime` | `$authorization` |
+|:----------------------|:-------------------------------------------|:---------------------------|:-----------|:---------|:----------|:-----------|:--------|:------------|:-----------|:-----------------|
+| Workflow `input.from` | Raw workflow input | Transformed workflow input | | | | ✔ | | ✔ | ✔ | |
+| Task `input.from` | Raw task input (output from previous task) | Transformed task input | ✔ | | | ✔ | ✔ | ✔ | ✔ | |
+| Task `if` | Transformed task input | Boolean | ✔ | ✔ | | ✔ | ✔ | ✔ | ✔ | |
+| Task definition\* | Transformed task input | Task execution parameters | ✔ | ✔ | | ✔ | ✔ | ✔ | ✔ | ✔ |
+| Task `output.as` | Raw task output | Transformed task output | ✔ | ✔ | | ✔ | ✔ | ✔ | ✔ | ✔ |
+| Task `export.as` | Transformed task output | New `$context` value | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ |
+| Workflow `output.as` | Transformed output of the *last* task | Final workflow output | ✔ | | | ✔ | | ✔ | ✔ | |
+
+*\*Refers to expressions used *within* a task's configuration, like constructing an HTTP body or URI.*
+
+**Note on `$secrets`**: While available in many places, it's generally safest to restrict usage primarily to
+`task.input.from` or specific configurations (like authentication blocks) where the runtime can handle them securely,
+rather than embedding them directly into general-purpose fields like log messages or HTTP bodies.
+
+## Error Handling
+
+If the evaluation of a runtime expression fails (e.g., due to invalid syntax, accessing a non-existent field in JQ), the
+runtime raises a standard error:
+
+* **Type**: `https://serverlessworkflow.io/spec/1.0.0/errors/expression`
+* **Status**: `400` (Bad Request)
+
+This error can be caught and handled using a [`Try`](/docs/error-handling/try/) task.
\ No newline at end of file
diff --git a/src/content/docs/docs/core-concepts/workflow-definition.mdx b/src/content/docs/docs/core-concepts/workflow-definition.mdx
new file mode 100644
index 0000000..6b70d12
--- /dev/null
+++ b/src/content/docs/docs/core-concepts/workflow-definition.mdx
@@ -0,0 +1,147 @@
+---
+title: Workflow Definition Structure
+sidebar:
+ order: 10
+---
+
+## Introduction
+
+This page describes the top-level structure and properties of a Serverless Workflow definition file, typically written
+in YAML. Understanding this overall structure is the first step in creating your own workflows.
+
+A workflow definition file acts as the blueprint for your automated process, defining metadata, reusable resources,
+scheduling information (if any), and the core execution logic.
+
+## Top-Level Properties
+
+A Serverless Workflow definition document consists of the following top-level properties:
+
+* **`dsl`** (String, Required): Specifies the version of the Serverless Workflow DSL specification the document conforms
+ to (e.g., `'1.0.0'`). This ensures the workflow engine interprets the syntax correctly.
+
+* **`namespace`** (String, Required): Provides a logical grouping for the workflow, often used for organization,
+ identification, and potentially scoping of resources or events within a runtime environment (e.g.,
+ `'com.example.billing'`).
+
+* **`name`** (String, Required): The unique name of the workflow within its namespace (e.g., `'process-invoice'`).
+ `namespace` + `name` typically forms a unique workflow identifier.
+
+* **`version`** (String, Required): The version of this specific workflow definition (e.g., `'1.0.2'`). This allows for
+ versioning and evolution of workflows over time. In a given environment, the combination of `namespace`, `name`, and `version` must be unique to identify a specific workflow definition.
+
+* **`title`** (String, Optional): A short, human-readable title for the workflow (e.g.,
+ `'Invoice Processing Workflow'`).
+
+* **`description`** (String, Optional): A more detailed human-readable description of the workflow's purpose.
+
+* **`use`** (Optional): Defines reusable resources and definitions used throughout the workflow. This includes
+ things like function definitions, event definitions, authentication configurations, etc.
+ See [Resource Catalog](/docs/resources-configuration/resource-catalog/) for details.
+
+* **`do`** (Array\, Required): Defines the core execution logic of the workflow as a sequence (or
+ structure)
+ of Tasks. This is where the main steps, control flow, and actions are specified.
+
+* **`schedule`** (Object, Optional): Defines how the workflow should be triggered based on time
+ schedules or events. The following formats are supported:
+ * **Time-based Schedule (Cron)**: Uses standard cron syntax to execute workflows on a time-based schedule.
+ ```yaml
+ schedule:
+ cron: '0 0 * * *' # Run daily at midnight
+ ```
+ * **Event-based Schedule**: Specifies an event that triggers the workflow.
+ ```yaml
+ schedule:
+ - when:
+ event: EventName # References an event defined in use.events
+ start: firstTaskName # Optional: specifies which task to start with
+ ```
+ * **Multiple Trigger Schedules**: An array of scheduling conditions.
+ ```yaml
+ schedule:
+ - cron: '0 0 * * *' # Time-based trigger
+ - when: # Event-based trigger
+ event: EventName
+ ```
+
+ > **Multiple Workflow Instances and Scheduling**
+ >
+ > When a workflow definition includes a `schedule` property and multiple instances of this workflow are started:
+ >
+ > - Each workflow instance maintains its own independent schedule
+ > - For time-based schedules with `cron` or `every`, each instance will execute at the specified times, potentially
+ resulting in parallel executions
+ > - For event-based schedules, each instance will independently listen for and react to the specified events
+ > - When using `after` scheduling (which runs the workflow again after completion), each instance manages its own
+ restart cycle
+
+
+* **`timeout`** (Optional): Defines the maximum duration the entire workflow instance is allowed to execute
+ before being timed out. This can prevent runaway workflows, resource leaks, or deadlock situations.
+ ```yaml
+ timeout:
+ hours: 1
+ minutes: 30 # Total timeout of 1 hour and 30 minutes
+ ```
+ See [Timeouts](/docs/resources-configuration/timeouts/) for more details on timeout configurations.
+
+* **`evaluate`** (Optional): Configures how runtime expressions are evaluated within the workflow. This allows
+ customization of the expression language and evaluation mode.
+ ```yaml
+ evaluate:
+ language: jq # The language used for runtime expressions (defaults to 'jq')
+ mode: strict # Evaluation mode: 'strict' requires expressions to be enclosed in ${ },
+ # while 'loose' evaluates any value (defaults to 'strict')
+ ```
+ These settings affect how all runtime expressions throughout the workflow are processed.
+
+* **`metadata`** (Map\, Optional): An optional map of custom key-value pairs that can be used to attach arbitrary
+ metadata to the workflow definition (e.g., author, team, deployment environment tags). These values are typically used
+ for documentation, governance, or filtering workflows in management UIs, but don't affect runtime behavior.
+ ```yaml
+ metadata:
+ author: "Jane Doe"
+ team: "Order Processing Team"
+ environment: "production"
+ priority: "high"
+ lastReviewDate: "2023-10-15"
+ ```
+
+* **`extensions`** (Map\, Optional): Defines extensions that enhance or modify the behavior of tasks in
+ the
+ workflow. Extensions can be used to implement cross-cutting concerns like logging, monitoring, or mocking. Each
+ extension consists of a name, specification for which tasks it extends, and the tasks to execute before and/or after
+ the extended task.
+ ```yaml
+ extensions:
+ - logging:
+ extend: all # Apply to all tasks
+ before:
+ - logTaskStart:
+ # Task to run before each extended task
+ after:
+ - logTaskEnd:
+ # Task to run after each extended task
+ ```
+
+* **`input`** (Object, Optional): Configures the validation and transformation of data coming into the workflow. This
+ allows for ensuring the workflow only receives properly structured input and can transform it into the format required
+ by the workflow.
+ ```yaml
+ input:
+ schema: # JSON Schema for validating incoming data
+ type: object
+ required: ["orderId", "customerId"]
+ properties:
+ orderId: { type: string }
+ customerId: { type: string }
+ items: { type: array }
+ from: "${ . | select(.items != null) }" # Transform the input before processing
+ ```
+
+* **`output`** (Object, Optional): Configures the filtering and transformation of data that the workflow will return.
+ This ensures the workflow produces consistent and properly formatted results.
+ ```yaml
+ output:
+ as: "${ { orderId: .orderId, status: .status, processedItems: .items | length } }"
+ ```
diff --git a/src/content/docs/docs/error-handling/overview.mdx b/src/content/docs/docs/error-handling/overview.mdx
new file mode 100644
index 0000000..d73e5d0
--- /dev/null
+++ b/src/content/docs/docs/error-handling/overview.mdx
@@ -0,0 +1,379 @@
+---
+title: Error Handling Overview
+sidebar:
+ order: 0
+ label: Overview
+---
+
+# Error Handling Tasks
+
+Error handling tasks provide mechanisms to manage exceptions, failures, and unexpected conditions in your workflows.
+They enable you to create resilient workflows that can detect, respond to, and recover from errors gracefully.
+
+## Error Handling Task Types
+
+| Task | Purpose |
+|----------------------------|----------------------------------------------------------|
+| [Try](/docs/error-handling/try/) | Execute tasks with error handling and recovery logic |
+| [Raise](/docs/error-handling/raise/) | Explicitly throw errors to signal exceptional conditions |
+
+## When to Use Error Handling Tasks
+
+### Exception Handling with Try
+
+Use the **Try** task when you need to:
+
+- Catch and handle potential errors from risky operations
+- Provide alternative execution paths when errors occur
+- Implement retry strategies for transient failures
+- Clean up resources after errors
+- Log and report error details
+- Structure error recovery workflows
+
+### Error Signaling with Raise
+
+Use the **Raise** task when you need to:
+
+- Signal that an exceptional condition has occurred
+- Abort the current execution path when validation fails
+- Create custom error types for specific failure scenarios
+- Provide detailed error context to upstream error handlers
+- Implement business rule validations that may fail
+
+## Try Task Examples
+
+### Basic Error Handling
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: basic-error-handling
+ version: '1.0.0'
+do:
+ - processOrder:
+ try:
+ - validateOrder:
+ call: http
+ with:
+ endpoint: "https://api.example.com/validate-order"
+ method: "POST"
+ body: ${ .input.order }
+ headers:
+ Content-Type: "application/json"
+ - processPayment:
+ call: processPayment
+ with:
+ order: ${ .input.order }
+ paymentMethod: ${ .input.paymentMethod }
+ result: paymentResult
+ catch:
+ as: error
+ do:
+ - logError:
+ call: http
+ with:
+ endpoint: "https://api.example.com/log-error"
+ method: "POST"
+ body:
+ error: ${ .error }
+ context: "Order processing"
+ headers:
+ Content-Type: "application/json"
+ - notifyCustomer:
+ call: http
+ with:
+ endpoint: "https://api.example.com/send-email"
+ method: "POST"
+ body:
+ to: ${ .input.order.customerEmail }
+ subject: "Order Processing Failed"
+ body: "${ 'We apologize, but we could not process your order. Error: ' + .error.message }"
+ headers:
+ Content-Type: "application/json"
+```
+
+### Retry Logic for Transient Errors
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: retry-logic
+ version: '1.0.0'
+do:
+ - retryTask:
+ try:
+ - makeHttpCall:
+ call: http
+ with:
+ method: "POST"
+ endpoint: "https://external-api.example.com/process"
+ headers:
+ Content-Type: "application/json"
+ Authorization: ${ "Bearer " + $secret.apiKey }
+ body: ${ .requestData }
+ catch:
+ as: error
+ errors:
+ with:
+ type: "PERMISSION_DENIED"
+ retry:
+ when: ${ .error.code == "UNAVAILABLE" || .error.code == "RESOURCE_EXHAUSTED" }
+ delay: PT2S
+ backoff:
+ exponential:
+ multiplier: 2
+ maxDelay: PT10S
+ limit:
+ attempt:
+ count: 3
+ do:
+ - refreshCredentials:
+ call: credentialRefresher
+ with:
+ run:
+ script:
+ language: javascript
+ code: |
+ function credentialRefresher(currentKey) {
+ // Implementation of credential refresh logic
+ return { apiKey: "new-api-key" };
+ }
+ arguments:
+ currentKey: ${ .input.apiKey }
+ return: stdout
+ - retryHttpCall:
+ call: http
+ with:
+ method: "POST"
+ endpoint: "https://external-api.example.com/process"
+ headers:
+ Content-Type: "application/json"
+ Authorization: ${ "Bearer " + .newCredentials.apiKey }
+ body: ${ .input.requestData }
+```
+
+## Raise Task Examples
+
+### Input Validation
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: input-validation
+ version: '1.0.0'
+do:
+ - validateInput:
+ do:
+ - checkRequiredFields:
+ if: ${ !.input.email || !.input.username || !.input.password }
+ raise:
+ error:
+ type: "https://serverlessworkflow.io/errors/validation/invalid-argument"
+ status: 400
+ title: "Missing required fields"
+ detail: "${ \"The following fields are required but missing: \" + [if(!.input.email) \"email\", if(!.input.username) \"username\", if(!.input.password) \"password\"].filter(Boolean).join(\", \") }"
+
+ - validateEmail:
+ if: ${ !.input.email.match(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$/) }
+ raise:
+ error:
+ type: "https://serverlessworkflow.io/errors/validation/invalid-argument"
+ status: 400
+ title: "Invalid email format"
+ detail: "${ \"Email address '\" + .input.email + \"' does not match the required format: username@domain.tld\" }"
+
+ - validatePassword:
+ if: ${ .input.password.length < 8 }
+ raise:
+ error:
+ type: "https://serverlessworkflow.io/errors/validation/invalid-argument"
+ status: 400
+ title: "Password too short"
+ detail: "${ \"Password must be at least 8 characters long. Current length: \" + .input.password.length }"
+
+```
+
+### Business Rule Validation
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: business-rule-validation
+ version: '1.0.0'
+do:
+ - checkInventory:
+ call: http
+ with:
+ method: "GET"
+ endpoint: "https://api.example.com/inventory"
+ query:
+ productId: ${ .input.productId }
+ headers:
+ Content-Type: "application/json"
+
+ - validateOrder:
+ do:
+ - checkProductAvailability:
+ if: ${ .inventoryData.quantityAvailable < .input.quantity }
+ raise:
+ error:
+ type: "https://serverlessworkflow.io/errors/validation/failed-precondition"
+ status: 412
+ title: "Insufficient inventory"
+ detail: "${ \"Product '\" + .input.productId + \"' has insufficient inventory. Requested: \" + .input.quantity + \", Available: \" + .inventoryData.quantityAvailable + \". Next restock date: \" + .inventoryData.nextDeliveryDate }"
+
+ - checkOrderLimit:
+ if: ${ .input.quantity > 10 && .input.customerType != "wholesale" }
+ raise:
+ error:
+ type: "https://serverlessworkflow.io/errors/authorization/permission-denied"
+ status: 403
+ title: "Retail customers are limited to 10 units per order"
+ detail: "${ \"Customer type '\" + .input.customerType + \"' is limited to \" + 10 + \" units per order. Requested quantity: \" + .input.quantity + \". Resolution: Apply for a wholesale account or reduce order quantity.\" }"
+```
+
+## Combining Try and Raise
+
+A powerful pattern is combining Try and Raise tasks for comprehensive error handling:
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: comprehensive-error-handling
+ version: '1.0.0'
+use:
+ functions:
+ - logger:
+ ### ... logger implementation
+ - supportSystem:
+ ### ... support system implementation
+do:
+ - processOrder:
+ try:
+ - validateOrder:
+ do:
+ - checkOrderData:
+ if: ${ !.input.order.items || .input.order.items.length == 0 }
+ raise:
+ error:
+ type: "https://serverlessworkflow.io/errors/validation/invalid-argument"
+ status: 400
+ title: "Order must contain at least one item"
+ detail: "The order must contain at least one item to be processed"
+
+ - verifyPaymentInfo:
+ if: ${ !.input.paymentMethod || !.input.paymentMethod.type }
+ raise:
+ error:
+ type: "https://serverlessworkflow.io/errors/validation/invalid-argument"
+ status: 400
+ title: "Payment validation failed"
+ detail: "Payment information is required"
+
+ - processPayment:
+ call: http
+ with:
+ method: "POST"
+ endpoint:
+ uri: "https://api.example.com/payments/process"
+ authentication:
+ basic:
+ username: "${ .user }"
+ password: "${ $secrets.pass }"
+ headers:
+ Content-Type: "application/json"
+ body: ${ .input }
+
+ - verifyPaymentSuccess:
+ if: ${ !.paymentResult.success }
+ raise:
+ error:
+ type: "https://api.example.com/payments/process/errors/ABORTED""
+ status: 503
+ detail: ${ "Payment failed: " + .paymentResult.reason }
+
+ catch:
+ as: orderError
+ do:
+ - handleOrderError:
+ switch:
+ - condition: ${ .orderError.error == "INVALID_ARGUMENT" }
+ then: handleValidationError
+ - condition: ${ .orderError.error == "ABORTED" }
+ then: handlePaymentError
+ - condition: true
+ then: handleGenericError
+
+ - handleValidationError:
+ set:
+ result:
+ success: false
+ type: "VALIDATION_ERROR"
+ message: ${ .orderError.message }
+ details: ${ .orderError.details || {} }
+ then: exit
+
+ - handlePaymentError:
+ do:
+ - logPaymentIssue:
+ call: function
+ with:
+ function: logger
+ args:
+ level: "WARNING"
+ message:
+ ${ "Payment processing failed for order: " + .input.order.id }
+ data: ${ .orderError }
+
+ - suggestAlternativePayment:
+ set:
+ result:
+ success: false
+ type: "PAYMENT_ERROR"
+ message: ${ .orderError.message }
+ suggestedActions: [
+ "Try a different payment method",
+ "Verify your payment details",
+ "Contact your payment provider"
+ ]
+ then: exit
+
+ - handleGenericError:
+ do:
+ - logError:
+ call: logger
+ with:
+ args:
+ level: "ERROR"
+ message:
+ ${ "Unexpected error processing order: " + .input.order.id }
+ error: ${ .orderError }
+
+ - createSupportTicket:
+ call: supportSystem
+ with:
+ args:
+ issueType: "ORDER_PROCESSING_FAILURE"
+ orderId: ${ .input.order.id }
+ customerEmail: ${ .input.order.customerEmail }
+ errorDetails: ${ .orderError }
+
+ - setErrorResponse:
+ set:
+ result:
+ success: false
+ type: "SYSTEM_ERROR"
+ message: "We encountered an unexpected issue processing your order"
+ supportTicket: ${ .ticketInfo.ticketId }
+ supportEmail: "support@example.com"
+ then: exit
+```
+
+Error handling tasks are essential for building robust, production-ready workflows. The Try and Raise tasks work
+together to provide comprehensive error management capabilities, allowing you to create workflows that gracefully handle
+exceptions, implement recovery strategies, and maintain system reliability even when unexpected conditions occur.
\ No newline at end of file
diff --git a/src/content/docs/docs/error-handling/raise.mdx b/src/content/docs/docs/error-handling/raise.mdx
new file mode 100644
index 0000000..e58fd4a
--- /dev/null
+++ b/src/content/docs/docs/error-handling/raise.mdx
@@ -0,0 +1,148 @@
+---
+title: Raise
+sidebar:
+ order: 20
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+## Purpose
+
+The `Raise` task is used to explicitly signal an error condition within the workflow.
+It constructs and throws a specific [Workflow Error](/docs/core-concepts/error-handling/) object,
+which then interrupts the normal flow and triggers the error handling mechanism
+(typically caught by an enclosing [`Try`](/docs/error-handling/try/) task).
+
+It's primarily used for:
+
+* Signaling business-level errors or exceptional conditions discovered during workflow logic.
+* Manually triggering fault handling paths.
+* Converting non-standard error conditions into standardized workflow errors.
+* Terminating a specific execution path due to an unrecoverable state.
+
+## Basic Usage
+
+Here's an example of raising a custom error based on input data:
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: raise-basic
+ version: '1.0.0'
+use:
+ errors:
+ # Define a reusable error type
+ invalidInputError:
+ type: https://mycompany.com/errors/invalid-input
+ title: "Invalid Input Received"
+ status: 400 # Bad Request
+do:
+ - validateInput:
+ if: "${ .value < 0 }" # Check if input value is negative
+ raise:
+ error: invalidInputError # Reference the defined error
+ - processValue: # Only executed if value >= 0
+ # ...
+```
+
+In this example, if the `validateInput` task receives an input object where `.value` is less than 0, it raises the
+predefined `invalidInputError`. This raised error
+would then typically be caught by a surrounding `Try` task.
+
+You can also define the error inline:
+
+```yaml
+do:
+ - checkInventory:
+ # ... logic to check stock ...
+ if: "${ .stockCount < .requestedAmount }"
+ raise:
+ # Define the error directly within the raise task
+ error:
+ type: https://mycompany.com/errors/out-of-stock
+ title: "Insufficient Stock"
+ status: 409 # Conflict
+ detail: "Requested ${ .requestedAmount }, but only ${ .stockCount } available."
+```
+
+## Additional Examples
+
+### Example: Raising Error with Dynamic Detail (Inline)
+
+```yaml
+do:
+ - processItem:
+ # ... some logic ...
+ if: "${ .itemStatus == \"FAILED\" }"
+ raise:
+ error:
+ type: "https://myapp.com/errors/processing-failure"
+ title: "Item Processing Failed"
+ status: 500
+ # Construct detail dynamically using workflow context/input
+ detail: "Failed to process item ID ${ .itemId }. Error code: ${ $context.errorCodeFromPreviousStep }"
+ instance: "${ $task.reference }" # Include task path for context
+```
+
+This example shows how to define an error completely inline, using runtime expressions within the `detail` and `instance` fields to provide context-specific information when the error is raised.
+
+### Example: Raising Error to be Caught by Try
+
+```yaml
+do:
+ - outerTask:
+ try:
+ - riskyOperation:
+ if: "${ .needsSpecialHandling == false }"
+ # This error will be caught by the 'catch' block below
+ raise:
+ error: specialHandlingRequiredError # Defined in workflow.use.errors
+ - normalProcessing: # Skipped if error was raised
+ # ...
+ catch:
+ errors:
+ with: { type: "https://myapp.com/errors/special-handling" } # Matches the type from specialHandlingRequiredError
+ do:
+ - handleSpecialCase:
+ # ... logic for the special case ...
+ then: continueAfterTry
+ - continueAfterTry:
+ # ...
+```
+
+Here, the `riskyOperation` might raise a specific error. The surrounding `Try` task is configured to `catch` errors of that specific type (`specialHandlingRequiredError` presumably has the type `https://myapp.com/errors/special-handling`). Instead of faulting the workflow, control transfers to the `catch.do` block (`handleSpecialCase`).
+
+## Configuration Options
+
+### `raise` (Object, Required)
+
+This mandatory object defines the error to be raised.
+
+* **`error`** (String | Error, Required): Specifies the error to raise. This can be:
+ * A **String**: The name of an [Error definition](dsl-error-handling.md#error-definition-problem-details)
+ pre-defined in the `workflow.use.errors` section.
+ * An **Inline Error Object**: A complete [Error object](dsl-error-handling.md#error-definition-problem-details)
+ defined directly within the `raise` task, specifying `type`, `status`, `title`, `detail`, etc.
+
+## Data Flow
+
+
+
+**Note on `Raise` Data Flow**:
+* Standard `input.from` and `input.schema` are processed before the task attempts to raise the error.
+* The resulting `transformedInput` is available if needed by expressions within the `raise.error` definition (though the error definition itself is often static).
+* Crucially, if the `Raise` task executes (i.e., its `if` condition is met or absent), it **never produces an output** and **does not process `output.as` or `export.as` logic**. Its sole purpose upon execution is to interrupt the flow by raising the specified error.
+
+## Flow Control
+
+
+
+**Note on `Raise` Flow Control**:
+* The standard `if` condition is evaluated first. If `false`, the `Raise` task is skipped entirely, and its `then` directive is followed as usual.
+* However, if the `if` condition is `true` (or absent), the `Raise` task *will* execute.
+* Upon execution, it **immediately raises the specified error** and transfers control to the [Error Handling](/docs/core-concepts/error-handling/) mechanism (searching for a suitable `Try` block).
+* Consequently, the `Raise` task's `then` directive is **completely ignored** when the task executes and raises its error.
+
+
diff --git a/src/content/docs/docs/error-handling/try.mdx b/src/content/docs/docs/error-handling/try.mdx
new file mode 100644
index 0000000..188fa8a
--- /dev/null
+++ b/src/content/docs/docs/error-handling/try.mdx
@@ -0,0 +1,318 @@
+---
+title: Try
+siderbar:
+ order: 10
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+## Purpose
+
+The `Try` task provides robust error handling and optional retry mechanisms within a workflow. It allows you to attempt
+the execution of a block of tasks and define how to react if an error occurs during that execution.
+
+It's primarily used for:
+
+* Gracefully catching specific errors raised by tasks within the `try` block.
+* Executing alternative compensation or cleanup tasks when an error is caught (using the `catch.do` block).
+* Implementing automatic retries for transient errors (like network timeouts or temporary service unavailability) with
+ configurable delays, backoff strategies, and limits.
+* Preventing specific errors from halting the entire workflow.
+
+## Basic Usage
+
+Here's a simple example of trying to call an external service and catching a potential communication error:
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: try-catch-basic
+ version: '1.0.0'
+do:
+ - attemptServiceCall:
+ try:
+ # Tasks to attempt inside the 'try' block (must be a list)
+ - callMyApi:
+ call: http # See [HTTP Call Task](/docs/call-tasks/http/) for details
+ with:
+ method: get
+ uri: https://api.unreliable.com/data
+ # This call might fail with a communication error
+ catch:
+ # Define how to handle errors caught from the 'try' block
+ errors:
+ # Catch specific error types (optional)
+ with:
+ type: https://serverlessworkflow.io/spec/1.0.0/errors/communication
+ as: "apiError" # Store the caught error details in $apiError variable (default is $error)
+ do:
+ # Tasks to execute ONLY if an error matching the filter is caught
+ - logFailure:
+ call: log
+ with:
+ level: "warn"
+ message: "API call failed. Error: ${ $apiError }"
+ - setDefaultValue:
+ set: # Provide a default output if the try block failed
+ data: null
+ status: "failed"
+ - continueProcessing: # Executes after attemptServiceCall (either success or caught error)
+ # Input is the output of 'callMyApi' if successful,
+ # or the output of 'setDefaultValue' if an error was caught.
+ # ...
+```
+
+In this example, if `callMyApi` fails with a standard `communication` error, the `catch` block is activated. The error
+details are stored in the `$apiError` variable (accessible within the `catch.do` block), the failure is logged, and a
+default output is set. Execution then proceeds normally to `continueProcessing`. If `callMyApi` succeeds, the `catch`
+block is skipped entirely.
+
+Here's an example demonstrating automatic retries:
+
+```yaml
+do:
+ - attemptWithRetry:
+ try:
+ - fetchCrucialData:
+ call: http
+ with: # ... service details ...
+ # Might temporarily fail with 503
+ catch:
+ errors:
+ with:
+ status: 503 # Only catch 'Service Unavailable' errors
+ retry:
+ delay: PT2S # Initial delay 2 seconds
+ backoff:
+ exponential: { } # Exponential backoff (2s, 4s, 8s...)
+ limit:
+ attempt:
+ count: 4 # Max 1 initial attempt + 3 retries = 4 total
+ # No 'catch.do' means if all retries fail, the error is re-thrown
+ - processData: # Only reached if fetchCrucialData succeeds eventually
+ # ...
+```
+
+This example attempts `fetchCrucialData`. If it fails with a 503 status, it waits 2 seconds and retries. If it fails
+again, it waits 4 seconds, then 8 seconds. If it still fails after the 4th total attempt, the 503 error is *not*
+caught (because there's no `catch.do`), and the workflow likely faults unless a parent `Try` catches it.
+
+## Configuration Options
+
+### `try` (List\, Required)
+
+This mandatory property contains a list defining the sequence of tasks to be attempted.
+
+If any task within this block raises an error, the runtime checks the corresponding `catch` block to see if the error
+should be handled.
+
+### `catch` (Object, Required)
+
+This mandatory object defines how errors originating from the `try` block are handled.
+
+
+For an error raised in the `try` block to be considered "caught" by this `catch` block, it must sequentially pass **all** applicable filters in this order:
+
+1. **Match `errors.with`**: If `errors.with` is defined, the error's properties must exactly match all specified
+ criteria.
+2. **Pass `when`**: If `when` is defined, its runtime expression (evaluated against the error, accessible via the
+ variable named in `as`) must return `true`.
+3. **Pass `exceptWhen`**: If `exceptWhen` is defined, its runtime expression (evaluated against the error) must return
+ `false`.
+4. `retry` is configured and attempts are not exhausted (the `try` task is then retried) OR `catch.do` is defined (the
+ `catch.do` is then processed).
+
+
+
+The `catch` block can contain the following properties:
+
+* **`errors`** (Object, Optional): Filters which errors are potentially caught.
+ * **`with`**: (Object, Optional) Defines specific properties that an error raised within the `try` block must have
+ to be considered for catching by this `catch` block. This allows for fine-grained filtering based on the error's
+ characteristics.
+ * You can specify one or more standard [Error object](dsl-error-handling.md#error-definition-problem-details)
+ fields: `type`, `status`, `instance`, `title`, `details`.
+ * **Matching Logic**: An incoming error matches the `with` filter **only if *all* fields specified within
+ the `with` object exactly match the corresponding fields in the raised error object.** It acts as a logical
+ AND condition.
+ * If the `with` object is omitted, this specific property-based filtering step is skipped, and all errors are
+ considered potentially catchable (subject to further filtering by `when`/`exceptWhen`).
+ * **Example**:
+ ```yaml
+ catch:
+ errors:
+ with:
+ # Only catch errors of this specific type AND status
+ type: "https://serverlessworkflow.io/spec/1.0.0/errors/communication"
+ status: 503
+ # instance: /do/0/callApi # Optionally match instance too
+ # ... other catch properties (as, when, retry, do) ...
+ ```
+* **`as`** (String, Optional): Specifies the variable name used to store the caught error object within the scope of the
+ `catch` block (`when`, `exceptWhen`, `retry`, `do`). Defaults to `error` (accessible as `$error` in expressions).
+* **`when`** (String, Optional): A [Runtime Expression](/docs/core-concepts/runtime-expressions/) evaluated *if* an error matches the
+ `errors.with` filter. The expression has access to the caught error via the variable named by `as`. The error is only
+ caught if this expression evaluates to `true`.
+ * **Example**: Catch communication errors only if they occurred for a specific instance:
+ ```yaml
+ catch:
+ errors:
+ with: { type: ".../communication" }
+ as: "commErr" # Optional, using 'commErr' instead of default 'error'
+ when: "${ $commErr.instance == '/do/0/unreliableApiCall' }"
+ # ... retry or do ...
+ ```
+* **`exceptWhen`** (String, Optional): A [Runtime Expression](/docs/core-concepts/runtime-expressions/) evaluated *if* an error
+ matches the `errors.with` filter *and* the `when` condition (if present) was true. The expression has access to the
+ caught error. The error is *not* caught if this expression evaluates to `true`.
+ * **Example**: Catch all validation errors *except* those related to a specific field:
+ ```yaml
+ catch:
+ errors:
+ with: { type: ".../validation" }
+ # Uses default 'error' variable -> $error
+ exceptWhen: "${ $error.details | contains(\"userEmail\" }" # Don't catch if details mention userEmail
+ do:
+ # Handle other validation errors
+ ```
+ * **`retry`** (String | Object, Optional): Defines the retry strategy if an error is caught (passes filters). Can be
+ a
+ string referencing a named `RetryPolicy` defined in `workflow.use.retries`, or an inline `RetryPolicy` object.
+ * **`when`** / **`exceptWhen`** (String, Optional): Runtime expressions evaluated *before* calculating the retry
+ delay to conditionally decide *if* a retry should occur for the caught error.
+ * **`delay`** (String | Object, Required in Policy): Base delay before the first retry (ISO 8601 Duration or
+ object
+ like `{ seconds: 5 }`).
+ * **`backoff`** (Object, Optional): Strategy for increasing delay between retries.
+ * `constant: {}`: No increase (default if `backoff` omitted).
+ * `linear: {}`: Delay increases linearly (delay * (1 + attemptIndex)).
+ * `exponential: {}`: Delay increases exponentially (delay ^ (1 + attemptIndex)).
+ * **`limit`** (Object, Optional): Defines limits for retrying.
+ * `attempt`: (Object, Optional) Limits related to individual attempts.
+ * `count` (Integer, Optional): The maximum number of *total* attempts allowed (the initial attempt plus
+ all
+ retries). If this limit is reached, retrying stops.
+ * `duration` (String | Object - Duration, Optional): The maximum allowed duration for any *single*
+ attempt (
+ initial or retry), measured from the start of that specific attempt's execution until its completion (
+ success or error). If any individual attempt exceeds this duration, it's considered a failure (
+ potentially
+ triggering a `Timeout` error or the next retry if attempts remain), even if the overall retry duration
+ limit (`limit.duration`) hasn't been reached. This duration does *not* include the `delay` time
+ preceding
+ the attempt.
+ * `duration` (String | Object - Duration, Optional): The maximum *total* duration allowed for the entire
+ retry
+ process, measured from the start of the *initial* attempt and encompassing all subsequent attempt
+ execution
+ times *and* the delay periods between them. If this overall duration is exceeded, retrying stops
+ immediately,
+ even if the attempt count (`limit.attempt.count`) hasn't been reached.
+ * **`jitter`** (Object, Optional): Introduces randomness to the retry delay to help prevent simultaneous retries
+ from multiple workflow instances causing a "thundering herd" problem on downstream services. Contains the
+ following properties:
+ * **`from`** (`duration`, Required): The minimum duration value for the random jitter range.
+ * **`to`** (`duration`, Required): The maximum duration value for the random jitter range.
+ * **How it works**: After the base delay (and any backoff multiplication) is calculated, a random duration
+ chosen uniformly from the range [`from`, `to`] (inclusive) is added to it. The actual delay before the
+ next
+ retry will be `calculated_delay + random(jitter.from, jitter.to)`.
+ * **Example**: If the calculated delay (after backoff) is 10 seconds and `jitter` is
+ `{ from: { seconds: 1 }, to: { seconds: 3 } }`, the actual delay before the next retry will be a random
+ value
+ between 11.0 and 13.0 seconds.
+
+ #### Examples
+
+ **1. Simple Retry Count:** Retry up to 3 times with a fixed 5-second delay.
+
+ ```yaml
+ retry:
+ delay: { seconds: 5 }
+ limit:
+ attempt:
+ count: 4 # 1 initial + 3 retries
+ ```
+
+ **2. Linear Backoff with Total Duration Limit:** Retry with delay increasing linearly (2s, 4s, 6s...), stopping
+ after a
+ total of 1 minute.
+
+ ```yaml
+ retry:
+ delay: { seconds: 2 }
+ backoff:
+ linear: { }
+ limit:
+ duration: { minutes: 1 }
+ ```
+
+ **3. Exponential Backoff with Jitter:** Retry with exponentially increasing delay (1s, 2s, 4s...) plus random
+ jitter
+ between 0.5 and 1.5 seconds.
+
+ ```yaml
+ retry:
+ delay: { seconds: 1 }
+ backoff:
+ exponential: { }
+ jitter:
+ from: { milliseconds: 500 }
+ to: { seconds: 1, milliseconds: 500 }
+ limit:
+ attempt:
+ count: 5
+ ```
+
+ **4. Conditional Retry:** Only retry if the error details mention a timeout, using the default error variable
+ `$error`.
+
+ ```yaml
+ retry:
+ when: "${ $error.details | contains(\"timeout\") }" # Only retry on timeouts
+ delay: { seconds: 10 }
+ limit:
+ attempt:
+ count: 3
+ ```
+* **`do`** (List\, Optional): A block of tasks to execute sequentially *only if* an error is successfully
+ caught (passes `errors.with`, `when`, and `exceptWhen` filters).
+ * **Output**: If the `catch.do` block executes, its final transformed output becomes the `rawOutput` of the entire
+ `Try` task.
+ * **Omission**: If `catch.do` is omitted and retries are exhausted, the error propagates upwards.
+
+
+ If a `retry` policy is also defined for the caught error, the `catch.do` block will **only** execute if **all retry
+ attempts are exhausted without success**.
+
+ * If any retry attempt *succeeds*, the `catch.do` block is skipped entirely, the `Try` task completes successfully
+ using the output of the successful retry attempt, and execution proceeds via the `Try` task's `then` directive.
+ * If all retries *fail* (due to limits or persistent errors), *then* the `catch.do` block (if present) is executed.
+
+
+
+## Data Flow
+
+
+**Note on `Try` Data Flow**:
+* The `rawOutput` used by `output.as` and `export.as` is determined as follows:
+ * If the `try` block completes successfully, `rawOutput` is the `transformedOutput` of the last task within `try`.
+ * If an error is caught and handled by executing the `catch.do` block, `rawOutput` is the `transformedOutput` of the last task within `catch.do`.
+ * If an error is caught, retries are exhausted (or not configured), and there is **no** `catch.do` block, the error propagates upwards. The `Try` task does not produce an output in this case, as it doesn't complete successfully.
+
+## Flow Control
+
+
+**Note on `Try` Flow Control**:
+* The `Try` task's `then` directive is followed **if and only if** the task completes successfully. Successful completion occurs under these conditions:
+ 1. The `try` block finishes without raising any error that is caught by the `catch` filters.
+ 2. An error *is* caught, a `retry` policy is triggered, and **any retry attempt succeeds**.
+ 3. An error *is* caught, all `retry` attempts (if configured) are exhausted without success, **and** the `catch.do` block (if present) executes successfully.
+* In all other scenarios, the `Try` task does not complete successfully, and its `then` directive is **not** followed. This includes:
+ * An error raised in `try` that is not caught by the `catch` filters.
+ * An error that is caught, exhausts all retries (or has no retry policy), and has **no** `catch.do` block to handle it (the error propagates).
+ * The `catch.do` block itself raises an error.
+
+
diff --git a/src/content/docs/docs/event-tasks/emit.mdx b/src/content/docs/docs/event-tasks/emit.mdx
new file mode 100644
index 0000000..1738464
--- /dev/null
+++ b/src/content/docs/docs/event-tasks/emit.mdx
@@ -0,0 +1,78 @@
+---
+title: Emit
+sidebar:
+ order: 20
+---
+{/* Examples are validated */}
+
+## Purpose
+
+The `Emit` task allows workflows to publish [CloudEvents](https://cloudevents.io/) to event brokers or messaging
+systems.
+
+This facilitates communication and coordination between different components and services. With the `Emit` task,
+workflows can seamlessly integrate with event-driven architectures, enabling real-time processing, event-driven
+decision-making, and reactive behavior based on external systems consuming these events.
+
+## Usage Example
+
+```yaml
+document:
+ dsl: '1.0.0' # Assuming alpha5 or later based on reference example
+ namespace: test
+ name: emit-example
+ version: '0.1.0'
+use:
+ secrets:
+ - orderServiceApiKey
+ authentications:
+ orderServiceAuth:
+ bearer:
+ token: ${ $secrets.orderServiceApiKey }
+do:
+ - placeOrder:
+ call: http
+ with:
+ method: POST
+ endpoint:
+ uri: "http://orders-service/api/v1/orders"
+ authentication:
+ use: orderServiceAuth
+ headers:
+ Content-Type: "application/json"
+ body:
+ customerId: ${ .customerId }
+ items: ${ .items }
+ output:
+ as: orderResult
+ then: emitOrderPlacedEvent
+ catch:
+ errors:
+ with:
+ type: "*"
+ then: handleOrderError
+ - emitOrderPlacedEvent:
+ emit:
+ event:
+ with:
+ source: "https://petstore.com/orders"
+ type: "com.petstore.order.placed.v1"
+ subject: "${ .orderResult.orderId }"
+ data:
+ client:
+ firstName: "${ $context.customer.first }"
+ lastName: "${ $context.customer.last }"
+ orderId: "${ .orderResult.orderId }"
+ items: "${ .orderResult.items }"
+ output:
+ as: eventResult
+ - handleOrderError:
+ emit:
+ event:
+ with:
+ source: "https://petstore.com/orders"
+ type: "com.petstore.order.error.v1"
+ subject: "order-error"
+ data:
+ error: "${ .error.message }"
+ customerId: "${ .customerId }"
\ No newline at end of file
diff --git a/src/content/docs/docs/event-tasks/listen.mdx b/src/content/docs/docs/event-tasks/listen.mdx
new file mode 100644
index 0000000..b9ffb8f
--- /dev/null
+++ b/src/content/docs/docs/event-tasks/listen.mdx
@@ -0,0 +1,160 @@
+---
+title: Listen
+sidebar:
+ order: 10
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+{/* Examples are validated */}
+
+## Purpose
+
+The `Listen` task pauses the workflow execution until one or more specific external events are received, based on
+defined conditions. It allows workflows to react to asynchronous occurrences from external systems or other parts of the
+application.
+
+This is crucial for building event-driven workflows where progression depends on external signals.
+
+## Usage Example
+
+```yaml
+document:
+ dsl: '1.0.0' # Assuming alpha5 or later based on reference example
+ namespace: examples
+ name: listen-for-vitals
+ version: '1.0.0'
+do:
+ - startMonitoring:
+ # ... task that initiates monitoring ...
+ - waitForVitalSignAlert:
+ listen:
+ to:
+ any: # Complete when any of the following conditions are met
+ - with: # Condition 1: High Temperature
+ source: "http://vitals-service"
+ type: "com.fake-hospital.vitals.measurements.temperature"
+ data: '${ .temperature > 38 }'
+ - with: # Condition 2: Abnormal BPM
+ source: "http://vitals-service"
+ type: "com.fake-hospital.vitals.measurements.bpm"
+ data: '${ .bpm < 60 or .bpm > 100 }'
+ read: data # Read only the event data (default)
+ timeout:
+ after: PT1H
+ then: processAlert
+ - processAlert:
+ # ... task that handles the alert based on the received event data ...
+ # The task output here is an array containing the event(s) that met the condition
+```
+
+In this example, the workflow pauses at `waitForVitalSignAlert` until an event matching either the high temperature
+condition or the abnormal BPM condition arrives, or until the 1-hour timeout is reached. The output will be an array
+containing the data of the event(s) that triggered completion.
+
+## Configuration Options
+
+### `listen` (Listen, Required)
+
+This object defines the core parameters for the listening behavior.
+
+* **`to`** (`eventConsumptionStrategy`, Required): Configures how event conditions are defined and correlated. Common
+ strategies include:
+ * `any`: Defines a list of conditions (using `with`). The task completes as soon as *any single event matching any*
+ listed condition is received.
+ * **Use Case**: Waiting for one of several possible outcomes or signals (e.g., 'approved' OR 'rejected', '
+ payment succeeded' OR 'payment failed').
+ * **Example**:
+ ```yaml
+ listen:
+ to:
+ any:
+ - with: { type: com.example.approval } # Match event type
+ - with: { source: /orders/urgent } # Match event source
+ ```
+ * `all`: Defines a list of conditions (using `with`). The task completes only when *at least one event matching
+ each* listed condition has been received.
+ * **Use Case**: Waiting for multiple distinct signals or acknowledgments before proceeding (e.g., confirmation
+ from inventory AND confirmation from shipping).
+ * **Example**:
+ ```yaml
+ listen:
+ to:
+ all:
+ - with: { type: inventory.checked }
+ - with: { type: shipping.ready }
+ ```
+ * `one`: Defines a *single* condition (using `with`). The task completes only when an event matching this specific
+ condition is received.
+ * **Use Case**: Waiting for a specific, singular event (e.g., 'order shipped' notification, 'user verified'
+ signal).
+ * **Example**:
+ ```yaml
+ listen:
+ to:
+ one:
+ with: { type: user.verified, subject: "user-123" }
+ ```
+ * Each condition is typically defined using a `with` object specifying:
+ * CloudEvent attributes to match (e.g., `type`, `source`).
+ * An optional `data` property containing a [Runtime Expression](/docs/core-concepts/runtime-expressions/) evaluated against
+ the *event data* for further filtering.
+ * The strategy may also include an optional `until` clause, defining a separate event condition.
+ * **Use Case**: Providing an alternative, event-based exit condition for the listening state, often used for
+ cancellation or timeouts triggered by specific events (e.g., 'order cancelled', 'timeout notification').
+ If an event matching the `until` condition is received, the `Listen` task terminates *immediately*, regardless
+ of whether the main `any`/`all`/`one` conditions were satisfied.
+ * Events matching the `until` condition are **not** included in the task's output array.
+ * **Example**:
+ ```yaml
+ listen:
+ to:
+ any: # Wait for approval OR rejection
+ - with: { type: com.example.approval }
+ - with: { type: com.example.rejection }
+ until: # But stop immediately if a cancellation event arrives
+ with: { type: com.example.cancellation }
+ ```
+ * *(Note: The exact structure for defining `any`, `all`, `one`, and `until` depends on the specific definition of
+ the `eventConsumptionStrategy` type in the full DSL reference.)*
+
+* **`read`** (String - `data` | `envelope` | `raw`, Optional, Default: `data`): Specifies what part of the consumed
+ event(s) should be included in the task's output array:
+ * `data`: Include only the event's data payload.
+ * `envelope`: Include the full event envelope (context attributes + data).
+ * `raw`: Include the event's raw data as received.
+
+* **`foreach`** (`subscriptionIterator`, Optional): If
+ specified, configures how to process each consumed event individually using a sub-flow (often defined in
+ `foreach.output.as` or a `do` block if the iterator type allows). Consumed events are processed sequentially (FIFO).
+
+* **`timeout`** (String - ISO 8601 Duration, Optional): Specifies the maximum time to wait for the required events based
+ on the `listen.to` strategy. If the timeout is reached before the conditions are met, the task **must** fault with a
+ `Timeout` error (`https://serverlessworkflow.io/spec/1.0.0/errors/timeout`).
+
+### Data Flow
+
+
+
+**Note**:
+
+* The `transformed input` to the `Listen` task is available for use within the `data` expressions defined under
+ `listen.to` (e.g., referencing `$context`).
+* The `raw output` of the `Listen` task is **always** a sequentially ordered array containing the content specified by
+ `listen.read` (e.g., event data, envelope, or raw) for **all** the event(s) consumed to satisfy the `listen.to`
+ condition.
+* If `foreach` is used, the transformation configured within the iterator (e.g., `foreach.output.as`) is applied to each
+ item *before* it's added to the final output array.
+* Standard `output.as` and `export.as` process this resulting `rawOutput` array.
+
+> [!WARNING]
+> Events consumed solely to satisfy an `until` clause should **not** be included in the task's output array.
+
+### Flow Control
+
+
+
+**Note**:
+* The `if` condition is evaluated *before* the task starts listening. If false, the task is skipped, and its `then` directive is followed immediately.
+* The `then` directive is followed only *after* the required event(s) are successfully received (and processed, if `foreach` is used) before the `timeout`. If the task times out, it faults, and `then` is *not* followed.
\ No newline at end of file
diff --git a/src/content/docs/docs/event-tasks/overview.mdx b/src/content/docs/docs/event-tasks/overview.mdx
new file mode 100644
index 0000000..454ca4a
--- /dev/null
+++ b/src/content/docs/docs/event-tasks/overview.mdx
@@ -0,0 +1,278 @@
+---
+title: Event Tasks Overview
+sidebar:
+ order: 0
+ label: Overview
+---
+
+{/* Examples are validated */}
+
+
+Event tasks enable workflows to interact with event-driven systems, allowing workflows to listen for external events and emit events to other systems, facilitating asynchronous and reactive patterns in your applications.
+
+## Common Use Cases
+
+Leveraging events in your workflows unlocks several powerful patterns:
+
+* **Waiting for External Triggers:** A workflow can start or resume based on an external event, such as a new file
+ arriving in storage, a message landing on a queue, or a webhook notification. (Often implemented using `Listen` or
+ platform-specific triggers).
+* **Reacting to System Changes:** Workflows can listen for events indicating changes in other systems (e.g., inventory
+ updates, user profile changes) and trigger appropriate actions.
+* **Asynchronous Task Completion:** A workflow can initiate a long-running operation via a call task and then use
+ `Listen` to wait for a completion event from that operation, rather than blocking synchronously.
+* **Inter-Workflow Communication:** One workflow can `Emit` an event that triggers or provides data to another workflow
+ instance via `Listen`.
+* **Saga Pattern / Compensating Transactions:** Events can signal the success or failure of steps in a distributed
+ transaction, allowing other services or workflows to react and perform compensating actions if necessary.
+* **Decoupled Integration:** Services can communicate via events without needing direct knowledge of each other,
+ promoting loose coupling and independent evolution.
+
+
+## Available Event Tasks
+
+| Task | Purpose |
+|------|---------|
+| [Listen](/docs/event-tasks/listen/) | Wait for events from external sources and optionally filter them |
+| [Emit](/docs/event-tasks/emit/) | Send events to external systems or trigger other workflows |
+
+## When to Use Event Tasks
+
+- Use **Listen** when you need to:
+ - Start or resume workflows based on external events
+ - Wait for specific conditions signaled by events
+ - Implement event-driven patterns like event sourcing
+ - Build reactive workflows that respond to system changes
+ - Coordinate long-running processes across distributed systems
+
+- Use **Emit** when you need to:
+ - Notify other systems about workflow state changes
+ - Trigger parallel workflows or microservices
+ - Implement pub/sub patterns for loose coupling
+ - Broadcast completion or progress updates
+ - Signal transitions in business processes
+
+## Examples
+
+### Order Processing with Event Handling
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: event-driven-order-processing
+ version: '1.0.0'
+use:
+ functions:
+ reserveInventory:
+ call: function
+ with:
+ function: reserveInventory
+ args:
+ orderId: ${ .orderId }
+ items: ${ .items }
+do:
+ - waitForNewOrder:
+ listen:
+ to:
+ one:
+ with:
+ source: "http://order-service"
+ type: "com.example.order.created"
+ read: "data"
+
+ - reserveInventoryItems:
+ call: "reserveInventory"
+ with:
+ args:
+ orderId: ${ .waitForNewOrder.data.orderId }
+ items: ${ .waitForNewOrder.data.items }
+
+ - waitForPayment:
+ listen:
+ to:
+ one:
+ with:
+ source: "http://payment-service"
+ type: "com.example.payment.processed"
+ correlate:
+ orderId:
+ from: ${ .data.orderId }
+ expect: ${ .waitForNewOrder.data.orderId }
+ read: "data"
+ timeout:
+ after: PT1H
+
+ - checkPaymentStatus:
+ switch:
+ - case:
+ when: ${ .waitForPayment.data.status == "SUCCESS" }
+ then: "confirmOrder"
+ - default:
+ then: "handleFailedPayment"
+
+ - confirmOrder:
+ emit:
+ event:
+ with:
+ source: "http://order-system"
+ type: "com.example.order.confirmed"
+ data:
+ orderId: ${ .waitForNewOrder.data.orderId }
+ status: "CONFIRMED"
+ paymentId: ${ .waitForPayment.data.paymentId }
+ confirmedAt: ${ new Date().toISOString() }
+ then: exit
+
+ - handleFailedPayment:
+ emit:
+ event:
+ with:
+ source: "http://order-system"
+ type: "com.example.order.failed"
+ data:
+ orderId: ${ .waitForNewOrder.data.orderId }
+ status: "PAYMENT_FAILED"
+ reason: "Payment processing failed"
+ failedAt: ${ new Date().toISOString() }
+ then: exit
+```
+
+
+
+
+
+
+### Event-Based Microservice Coordination
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: microservice-coordination
+ version: '1.0.0'
+use:
+ functions:
+ createUserProfile:
+ call: http
+ with:
+ method: POST
+ endpoint: "http://profile-service/api/v1/profiles"
+ headers:
+ Content-Type: "application/json"
+ body:
+ userId: ${ .userId }
+ email: ${ .email }
+ username: ${ .username }
+ sendWelcomeEmail:
+ call: http
+ with:
+ method: POST
+ endpoint: "http://email-service/api/v1/emails"
+ headers:
+ Content-Type: "application/json"
+ body:
+ to: ${ .to }
+ template: "welcome-email"
+ data:
+ name: ${ .name }
+ language: ${ .language }
+do:
+ - waitForNewUser:
+ listen:
+ to:
+ one:
+ with:
+ source: "http://user-service"
+ type: "com.example.user.registered"
+ read: data
+ timeout:
+ after: PT30M
+
+ - processNewUser:
+ fork:
+ branches:
+ - createProfile:
+ do:
+ - initiateProfileCreation:
+ call: createUserProfile
+ with:
+ args:
+ userId: ${ .waitForNewUser.data.userId }
+ email: ${ .waitForNewUser.data.email }
+ username: ${ .waitForNewUser.data.username }
+
+ - notifyProfileCreated:
+ emit:
+ event:
+ with:
+ source: "http://profile-service"
+ type: "com.example.profile.created"
+ data:
+ userId: ${ .waitForNewUser.data.userId }
+ profileId: ${ .initiateProfileCreation.profileId }
+ createdAt: ${ new Date().toISOString() }
+
+ - sendWelcome:
+ do:
+ - initiateWelcomeEmail:
+ call: sendWelcomeEmail
+ with:
+ args:
+ to: ${ .waitForNewUser.data.email }
+ name: ${ .waitForNewUser.data.firstName }
+ language: ${ .waitForNewUser.data.preferences.language || "en" }
+
+ - notifyEmailSent:
+ emit:
+ event:
+ with:
+ source: "http://email-service"
+ type: "com.example.email.sent"
+ data:
+ userId: ${ .waitForNewUser.data.userId }
+ emailId: ${ .initiateWelcomeEmail.emailId }
+ sentAt: ${ new Date().toISOString() }
+
+ - completeOnboarding:
+ listen:
+ to:
+ all:
+ - with:
+ source: "http://profile-service"
+ type: "com.example.profile.created"
+ correlate:
+ userId:
+ from: ${ .data.userId }
+ expect: ${ .waitForNewUser.data.userId }
+ - with:
+ source: "http://email-service"
+ type: "com.example.email.sent"
+ correlate:
+ userId:
+ from: ${ .data.userId }
+ expect: ${ .waitForNewUser.data.userId }
+ read: data
+ timeout:
+ after: PT30M
+
+ - notifyOnboardingCompletion:
+ emit:
+ event:
+ with:
+ source: "http://onboarding-service"
+ type: "com.example.onboarding.completed"
+ data:
+ userId: ${ .waitForNewUser.data.userId }
+ status: "COMPLETE"
+ profileId: ${ .completeOnboarding.data[0].profileId }
+ emailSent: true
+ completedAt: ${ new Date().toISOString() }
+```
+
+Event tasks form the foundation of event-driven architecture within workflows,
+enabling responsive, loosely-coupled systems that can react to changes across distributed environments.
+By using Listen and Emit tasks appropriately, workflows can participate in complex event ecosystems,
+coordinating business processes that span multiple services while maintaining resilience and scalability.
+
+
diff --git a/src/content/docs/docs/index.mdx b/src/content/docs/docs/index.mdx
new file mode 100644
index 0000000..69bff72
--- /dev/null
+++ b/src/content/docs/docs/index.mdx
@@ -0,0 +1,85 @@
+---
+title: Introduction to the Serverless Workflow DSL
+---
+
+The Serverless Workflow DSL (Domain Specific Language) provides a powerful and flexible way to define workflows in a
+declarative manner. This documentation will guide you through the key concepts and components of the DSL.
+
+## Documentation Structure
+
+This documentation is organized into several main sections:
+
+1. **Workflow Definition**: Learn how to define and structure your workflows
+ - [Workflow Definition](/docs/core-concepts/workflow-definition/)
+ - [Workflow Examples](/docs/workflow-definition-examples/)
+
+2. **Tasks**: Explore the different types of tasks available
+ - [Flow Control Tasks](/docs/control-flow/overview/)
+ - [Data Tasks](/docs/core-concepts/data-flow-management/)
+ - [Set Task](/docs/set/) - For data manipulation and transformation
+ - [Data Flow Management](/docs/core-concepts/data-flow-management/) - For input/output handling and validation
+ - [Error Handling Tasks](/docs/core-concepts/error-handling/)
+ - [Time Tasks](/docs/wait/)
+ - [Event Tasks](/docs/event-tasks/overview/)
+ - [Call Tasks](/docs/call-tasks/overview/)
+ - [Run Tasks](/docs/run-tasks/overview/)
+
+3. **Events**: Understand how to work with events
+ - [Event Tasks](/docs/event-tasks/overview/)
+ - [Event Correlation](/docs/resources-configuration/event-correlation/)
+
+4. **Functions**: Learn about function definitions and usage
+ - [Function Definition](/docs/call-tasks/function/)
+ - [Function Types](/docs/call-tasks/overview/)
+
+5. **Advanced Features**: Explore advanced workflow capabilities
+ - [Retries](/docs/error-handling/try/)
+ - [Secrets](/docs/resources-configuration/secrets/)
+
+## What is the Serverless Workflow DSL?
+
+The Serverless Workflow DSL allows you to define workflows using a structured, human-readable format (typically YAML).
+Instead of writing imperative code to manage state, handle retries, coordinate tasks, and react to events, you *declare*
+the desired flow and behaviour. A compliant workflow engine then interprets this definition and executes the process
+reliably.
+
+## Key Principles and Benefits
+
+* **Declarative:** Define *what* your workflow should do, not the low-level *how*. This leads to clearer, more
+ maintainable definitions.
+* **Vendor-Neutral:** Based on a CNCF (Cloud Native Computing Foundation) specification, promoting portability across
+ different runtime platforms and cloud providers.
+* **Human-Readable:** Primarily uses YAML, making workflow definitions easy to read, understand, and version control.
+* **Comprehensive:** Supports a wide range of constructs, including sequential and parallel execution, conditional
+ logic, error handling, timeouts, retries, event handling, and integration with various external systems (functions,
+ APIs, message brokers).
+* **Extensible:** Designed to integrate custom logic and external services seamlessly through well-defined task types.
+
+## Who is this Documentation For?
+
+This documentation is intended for:
+
+* **Developers:** Building applications that require orchestration or automation of processes.
+* **Architects:** Designing distributed systems and event-driven architectures.
+* **Operations Engineers:** Automating infrastructure tasks and operational procedures.
+* Anyone interested in learning and applying the Serverless Workflow standard.
+
+## Getting Started
+
+To get started with the Serverless Workflow DSL:
+
+1. Read this **Introduction** page thoroughly.
+2. Explore the **Core Concepts** section, paying particular attention to [Data Flow](/docs/core-concepts/data-flow-management/)
+ and [Runtime Expressions](/docs/core-concepts/runtime-expressions/), as they are used extensively.
+3. Review the **Tasks** and **Events** sections to understand the available building blocks.
+4. Dive into the specific **Task** or **Concept** pages relevant to your immediate needs. The examples within each page
+ are designed to illustrate practical usage.
+
+## Additional Resources
+
+- [Serverless Workflow Specification](https://serverlessworkflow.io/)
+- [Serverless Workflow Examples](https://github.com/serverlessworkflow/specification/tree/main/examples)
+{/*- [Serverless Workflow Tools](https://serverlessworkflow.io/tools/)*/}
+
+We hope this documentation helps you effectively leverage the power and flexibility of the Serverless Workflow DSL.
+Happy orchestrating!
\ No newline at end of file
diff --git a/src/content/docs/docs/reference/dsl.mdx b/src/content/docs/docs/reference/dsl.mdx
new file mode 100644
index 0000000..19e0825
--- /dev/null
+++ b/src/content/docs/docs/reference/dsl.mdx
@@ -0,0 +1,5 @@
+---
+title: DSL Reference
+---
+
+The DSL reference can be found on our [GitHub](https://github.com/serverlessworkflow/specification/blob/main/dsl-reference.md).
\ No newline at end of file
diff --git a/src/content/docs/docs/resources-configuration/authentication.mdx b/src/content/docs/docs/resources-configuration/authentication.mdx
new file mode 100644
index 0000000..bbb7c71
--- /dev/null
+++ b/src/content/docs/docs/resources-configuration/authentication.mdx
@@ -0,0 +1,172 @@
+---
+title: Authentication
+sidebar:
+ order: 20
+---
+{/* Examples are validated */}
+
+## Purpose
+
+Authentication definitions in Serverless Workflow specify the methods and credentials required for a workflow to
+securely access protected external resources or services. This allows workflows to interact with APIs, databases,
+message brokers, or other systems that require identity verification.
+
+Authentication can be defined in two main places:
+
+1. **Globally**: Under the `use.authentications` section of the workflow definition. This creates named, reusable
+ authentication policies.
+2. **Inline**: Directly within the configuration of a task or resource that requires authentication (e.g., inside an
+ `endpoint` object for an HTTP call or a Resource Catalog).
+
+## Defining Reusable Authentications (`use.authentications`)
+
+Defining authentications globally under `use.authentications` promotes reusability and separates sensitive details (
+potentially using secrets) from the task logic.
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: auth-examples
+ name: reusable-auth-workflow
+ version: '1.0.0'
+use:
+ secrets:
+ - myApiKeySecret
+ - myOauthClientSecret
+ # Define named authentication policies
+ authentications:
+ # Basic Auth using secrets
+ mySystemBasicAuth:
+ basic:
+ username: service-account
+ password: ${ $secrets.myApiKeySecret }
+ # OAuth2 Client Credentials
+ myApiClientOAuth:
+ oauth2:
+ authority: https://auth.example.com
+ grant: client_credentials
+ client:
+ id: workflow-client-id
+ secret: ${ $secrets.myOauthClientSecret }
+ scopes: [ read:data, write:data ]
+ # Simple Bearer Token (potentially from context)
+ userBearerTokenAuth:
+ bearer:
+ token: ${ $secrets.userProvidedToken }
+ # Digest Auth (Requires username/password, often from secrets)
+ myDigestServiceAuth:
+ digest:
+ username: digest-user
+ password: ${ $secrets.digestPassword }
+ # OIDC (Conceptual - structure similar to OAuth2, often relies on discovery)
+ myOidcProviderAuth:
+ oidc:
+ authority: https://oidc.example.com/.well-known/openid-configuration
+ grant: client_credentials # Or other appropriate grant
+ client:
+ id: workflow-oidc-client
+ secret: ${ $secrets.oidcClientSecret }
+ scopes: [ openid, profile, email ]
+ # Certificate Auth (Conceptual - structure highly runtime dependent)
+ # Runtimes might expect paths to cert/key files, or references to secrets
+ # containing the certificate/key data.
+ myCertAuth:
+ certificate:
+ # Example: Reference secrets containing PEM-encoded cert/key
+ clientCertSecret: clientCertificatePem
+ clientKeySecret: clientPrivateKeyPem
+ # Example: Or maybe file paths accessible to the runtime
+ # clientCertPath: /etc/certs/client.crt
+ # clientKeyPath: /etc/certs/client.key
+ # caCertPath: /etc/certs/ca.crt # Optional CA cert path
+do:
+ - getData:
+ call: http
+ with:
+ method: get
+ endpoint:
+ uri: https://api.example.com/data
+ # Reference the named authentication policy
+ authentication: myApiClientOAuth
+ then: processData
+ - updateSystem:
+ call: http
+ with:
+ method: post
+ endpoint: https://internalsystem.example.com/update
+ # Reference another named policy
+ authentication: mySystemBasicAuth
+ body:
+ # ...
+```
+
+**Key Properties (`use.authentications`):**
+
+* **`use.authentications`** (Map\): A map where each key is a user-defined name for the
+ authentication policy (e.g., `mySystemBasicAuth`) and the value is an `Authentication` object definition.
+
+## Authentication Object Structure
+
+The `Authentication` object defines the specific mechanism. Only *one* of the scheme-specific properties (`basic`,
+`bearer`, `oauth2`, etc.) should be defined per object.
+
+**Key Properties (`Authentication` Object):**
+
+* **`use`** (String, Optional): *Used only in inline definitions*. References the name of a globally defined
+ authentication policy from `use.authentications`. Cannot be used within `use.authentications` itself.
+* **`basic`** (Object, Optional): Defines Basic Authentication. Contains:
+ * `username` (String, Required): The username.
+ * `password` (String, Required): The password (often uses a `${ $secrets... }` expression).
+* **`bearer`** (Object, Optional): Defines Bearer Token Authentication. Contains:
+ * `token` (String, Required): The bearer token value (often uses a `${ $secrets... }` or `${ $context... }`
+ expression).
+* **`oauth2`** (Object, Optional): Defines OAuth2 Authentication. Contains properties like:
+ * `authority` (String, Required): The URI of the OAuth2 authorization server.
+ * `grant` (String, Required): The grant type (e.g., `client_credentials`, `password`, `refresh_token`).
+ * `client.id` (String): The client ID.
+ * `client.secret` (String): The client secret.
+ * `client.authentication` (String): Client auth method (e.g., `client_secret_basic`, `client_secret_post`).
+ * `scopes` (List<String>): List of requested scopes.
+ * `audiences` (List<String>): List of intended audiences.
+ * (Other properties for different grants like `username`, `password`, `subject`, `actor`, specific endpoints - refer
+ to detailed OAuth2 specifications if needed).
+* **`oidc`** (Object, Optional): Defines OpenID Connect Authentication. Often relies on OIDC discovery via the
+ `authority` URL. Contains properties like:
+ * `authority` (String, Required): The URI of the OIDC provider (often pointing to the discovery document).
+ * `grant` (String, Required): The grant type (e.g., `client_credentials`, `authorization_code`, `password`).
+ * `client.id` (String): The client ID.
+ * `client.secret` (String): The client secret.
+ * `client.authentication` (String): Client auth method.
+ * `scopes` (List<String>): List of requested OIDC scopes (e.g., `openid`, `profile`, `email`).
+ * `audiences` (List<String>): List of intended audiences.
+ * (Other grant-specific properties like `username`, `password`, `subject`, `actor`).
+* **`certificate`** (Object, Optional): Defines Certificate-based Authentication (details depend on runtime support).
+* **`digest`** (Object, Optional): Defines Digest Authentication. Contains:
+ * `username` (String, Required): The username.
+ * `password` (String, Required): The password.
+
+*(Note: Support for `certificate` and the exact properties for `oauth2`/`oidc` may vary slightly depending on the
+specific grant type and runtime implementation. Refer to the runtime documentation for precise details if deviating from
+common flows like client credentials).*
+
+## Inline Authentication
+
+Authentication can also be defined directly where it's needed, typically within an `endpoint` object.
+
+```yaml
+do:
+ - callServiceWithInlineAuth:
+ call: http
+ with:
+ method: get
+ endpoint:
+ uri: https://onetime-service.example.com/resource
+ # Define authentication directly here
+ authentication:
+ basic:
+ username: temp_user
+ password: ${ $secrets.tempPassword }
+```
+
+While convenient for simple cases, defining authentication inline is less reusable and can clutter task definitions.
+Using the global `use.authentications` section is generally preferred.
\ No newline at end of file
diff --git a/src/content/docs/docs/resources-configuration/event-correlation.mdx b/src/content/docs/docs/resources-configuration/event-correlation.mdx
new file mode 100644
index 0000000..a89a8f3
--- /dev/null
+++ b/src/content/docs/docs/resources-configuration/event-correlation.mdx
@@ -0,0 +1,164 @@
+---
+title: Event Correlation
+sidebar:
+ order: 40
+---
+
+{/* Examples are validated */}
+
+
+# Event Correlation
+
+## Purpose
+
+Event Correlation is a mechanism within the Serverless Workflow DSL used to filter and match incoming events based on **dynamic data values**, rather than just static attributes like event `type` or `source`. It allows a workflow instance to wait for or react to events that are specifically related to its current context or the data it is processing.
+
+This is essential for scenarios such as:
+
+* Matching a specific `orderId` in an incoming event with the `orderId` being processed by the current workflow instance.
+* Ensuring a payment confirmation event corresponds to the correct `transactionId` stored in the workflow context.
+* Linking related events based on a shared `correlationId` present in their data payloads or attributes.
+
+## How it Works
+
+Correlation is configured within an [`Event Filter`](#event-filter-object-structure) using the `correlate` property. This property holds a map of user-defined correlation definitions.
+
+For an incoming event to match the filter based on correlation, **all** defined correlations in the `correlate` map must succeed.
+
+### Event Filter Object Structure
+
+An `Event Filter` object is used in tasks like [Listen](/docs/event-tasks/listen/) and potentially in event-driven `schedule` definitions. It contains:
+
+* **`with`** (Object, Required): Defines matching criteria based on standard event properties (like `type`, `source`, `subject`, `data`). Values can be static strings, regular expressions, or runtime expressions.
+* **`correlate`** (Map\, Optional): A map where each key is a user-defined name for a correlation check (e.g., `matchOrderId`), and the value is a `Correlation` object defining the matching logic.
+
+### Correlation Object Structure
+
+Each `Correlation` object within the `correlate` map defines a single data-based matching rule:
+
+* **`from`** (String, Required): A [Runtime Expression](/docs/core-concepts/runtime-expressions/) evaluated against the **incoming event** (its context attributes and `data` payload) to extract a value. For example, `${ .data.transactionId }`.
+* **`expect`** (String, Optional): Defines the value that the extracted `from` value must match.
+ * Can be a **static (constant) value** (e.g., `expect: "processed"`).
+ * Can be a [Runtime Expression](/docs/core-concepts/runtime-expressions/) evaluated against the **workflow's context** (`$context`, `$secrets`, etc.) *at the time the filter is checked* (e.g., `expect: "${ $context.currentTransactionId }"`).
+ * **If omitted:** The value extracted by the `from` expression from the *first* matching event encountered establishes the expectation for subsequent events within the same filter evaluation (this behavior is primarily relevant for specific `Listen` scenarios where multiple events might be processed sequentially against the same filter instance).
+
+**Matching Logic:** An event satisfies a single correlation definition if the value produced by the `from` expression (evaluated on the event) is equal to the value produced by the `expect` expression (evaluated on the workflow context) or the static `expect` value.
+
+## Usage Examples
+
+### Example 1: Matching Dynamic Context Data (within Listen Task)
+
+Imagine a workflow processing an order needs to wait for a specific payment confirmation event based on the order ID stored in the workflow's context.
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: order-processing
+ name: wait-for-payment
+ version: '1.0.0'
+do:
+ - storeOrderId:
+ set:
+ # Assume input contains orderId, store it in context
+ orderId: "${ .initialOrder.id }"
+ export:
+ as: "${ $context + { currentOrderId: .orderId } }"
+ - waitForPaymentEvent:
+ listen:
+ to:
+ one: # Wait for one specific event
+ with: # Basic filtering on event type
+ type: com.payment.processed.v1
+ # Correlation based on data
+ correlate:
+ # Name this correlation check 'matchOrderId'
+ matchOrderId:
+ # Extract 'transaction.orderRef' from the incoming event's data
+ from: "${ .data.transaction.orderRef }"
+ # Expect it to match the 'currentOrderId' stored in workflow context
+ expect: "${ $context.currentOrderId }"
+ # Optional: Timeout if the event doesn't arrive
+ timeout:
+ after:
+ minutes: 30
+ # Output contains the received event if successful
+ - processPaymentConfirmation:
+ # ... task input is the matched payment event ...
+```
+
+In this example, the `waitForPaymentEvent` task will only proceed if it receives an event with `type: com.payment.processed.v1` AND the value of `data.transaction.orderRef` inside that event matches the `currentOrderId` currently stored in the workflow's context.
+
+### Example 2: Matching a Static Value
+
+Suppose a workflow needs to wait for a system status update event indicating that a specific component (`component-abc`) has become 'Ready'.
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: system-monitor
+ name: wait-for-component-ready
+ version: '1.0.0'
+do:
+ - waitForComponentReady:
+ listen:
+ to:
+ one:
+ with:
+ type: com.system.status.update.v1
+ source: http://systems/monitoring/component-abc # Filter by source
+ correlate:
+ matchComponentStatus:
+ # Extract the status field from the event data
+ from: "${ .data.status }"
+ # Expect the status to be the exact string 'Ready'
+ expect: "Ready"
+ - componentIsReady:
+ # ... task to execute now that the component is ready ...
+```
+
+Here, the correlation `matchComponentStatus` checks if the `status` field within the event's data payload is exactly equal to the static string `"Ready"`.
+
+### Example 3: Requiring Multiple Correlations
+
+Consider a scenario where a workflow orchestrates a travel booking and needs to wait for a confirmation event that matches both the specific `bookingId` and the `provider` ('AcmeAir') involved.
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: travel-booking
+ name: wait-for-flight-confirmation
+ version: '1.0.0'
+do:
+ - storeBookingInfo:
+ set:
+ bookingId: "${ .flightRequest.id }"
+ provider: "AcmeAir"
+ export:
+ as: "${ $context + { currentBookingId: .bookingId, currentProvider: .provider } }"
+ then: waitForConfirmation
+ - waitForConfirmation:
+ listen:
+ to:
+ one:
+ with:
+ type: com.travel.confirmation.flight.v1
+ # Requires BOTH correlations to succeed
+ correlate:
+ matchBookingId:
+ from: "${ .data.confirmation.bookingRef }"
+ expect: "${ $context.currentBookingId }"
+ matchProvider:
+ from: "${ .data.confirmation.providerName }"
+ expect: "${ $context.currentProvider }" # Or could be expect: "AcmeAir"
+ timeout:
+ after:
+ hours: 1
+ - processFlightConfirmation:
+ # ... task input is the confirmation event matching both criteria ...
+```
+
+In this case, the `listen` task will only be satisfied by an event of the correct type where `data.confirmation.bookingRef` matches the `$context.currentBookingId` **AND** `data.confirmation.providerName` matches `$context.currentProvider`.
+
+## Relationship to Lifecycle Events
+
+When a workflow successfully completes an event correlation (e.g., a `Listen` task receives all required correlated events), the runtime typically emits a `Workflow Correlation Completed Event`. This standard lifecycle event includes a `correlationKeys` map containing the names (e.g., `matchOrderId` from the example above) and the resolved values of the correlations that were successfully matched. This provides visibility into *which* specific data values led to the correlation match.
\ No newline at end of file
diff --git a/src/content/docs/docs/resources-configuration/overview.mdx b/src/content/docs/docs/resources-configuration/overview.mdx
new file mode 100644
index 0000000..e408913
--- /dev/null
+++ b/src/content/docs/docs/resources-configuration/overview.mdx
@@ -0,0 +1,23 @@
+---
+title: Resources & Configuration Overview
+sidebar:
+ order: 0
+ label: Overview
+---
+
+## Introduction
+
+Beyond the core workflow logic and task execution, the Serverless Workflow DSL provides mechanisms for managing reusable resources, defining specific syntaxes, and configuring cross-cutting concerns like security and timing. This section covers these essential aspects that help you build more robust, maintainable, and secure workflows.
+
+Understanding these features allows you to effectively manage external dependencies, authentication details, and global settings within your workflow definitions.
+
+## Topics Covered
+
+This section delves into the following key areas:
+
+* **[Resource Catalog](/docs/resources-configuration/resource-catalog/):** Explains how to define and reference reusable components like function definitions, authentication details, event definitions, and more. This promotes modularity and avoids repetition within your workflow files.
+* **[URI Templates](/docs/resources-configuration/uri-templates/):** Describes the syntax for URI templates (RFC 6570) used in certain configuration fields (like error types or potentially within API calls), allowing for standardized and potentially dynamic URI construction.
+* **[Timeouts](/docs/resources-configuration/timeouts/):** Covers how to configure various timeouts within your workflow, including overall workflow duration, task execution limits, and event listening periods, ensuring workflows don't run indefinitely or get stuck.
+* **[Authentication](/docs/resources-configuration/authentication/):** Details how to define and reference authentication configurations (like Basic Auth, OAuth2, Bearer Tokens) needed to securely interact with external services called by your workflow tasks.
+
+These configuration and resource management capabilities are vital for integrating workflows with real-world systems and ensuring they operate securely and efficiently.
\ No newline at end of file
diff --git a/src/content/docs/docs/resources-configuration/resource-catalog.mdx b/src/content/docs/docs/resources-configuration/resource-catalog.mdx
new file mode 100644
index 0000000..ba130ec
--- /dev/null
+++ b/src/content/docs/docs/resources-configuration/resource-catalog.mdx
@@ -0,0 +1,256 @@
+---
+title: Resource Catalogs
+sidebar:
+ order: 60
+---
+
+## Purpose
+
+A **Resource Catalog** is an external collection of reusable components, primarily [Functions](/docs/call-tasks/function/) in
+current usage, that can be referenced and imported into Serverless Workflows.
+
+The main goals of using catalogs are:
+
+* **Reusability**: Define common components (like functions) once and use them across multiple workflows.
+* **Modularity**: Keep complex or domain-specific logic separate from the main workflow definition.
+* **Versioning**: Manage different versions of reusable components independently.
+* **Consistency**: Ensure that multiple workflows use the same implementation of shared logic.
+* **Discovery**: Provide a central place to find available reusable components.
+
+## Defining and Using Catalogs
+
+Catalogs are defined in the workflow's `use.catalogs` section. Each catalog requires a unique name within the workflow
+and an `endpoint` specifying its location.
+
+### Workflow Structure
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: my_workflows
+ name: workflow-using-catalog
+ version: '1.0.0'
+use:
+ catalogs: # Defines named catalogs
+ : # User-defined name for the catalog
+ endpoint: # Definition of where the catalog is located
+ uri: # URI (e.g., URL, file path) of the catalog root
+ authentication: # Optional authentication for the endpoint
+ :
+ endpoint: # ... another catalog definition ...
+do:
+ - # ... tasks ...
+```
+
+**Key Properties (from [Resources Configuration Overview](/docs/resources-configuration/overview/)):**
+
+* **`use.catalogs`** (Map\): A map where each key is a user-defined name for the catalog (e.g.,
+ `sharedUtils`, `publicApis`) and the value is a `Catalog` object definition.
+* **`Catalog` Object**: Defines a catalog resource.
+ * **`endpoint`** (Object, Required): Defines the location and access method for the catalog. Contains:
+ * `uri` (String | Object, Required): The URI (as a string or URI template object) pointing to the root of the
+ catalog.
+ * `authentication` (String | Object, Optional): Authentication details (inline definition or reference by name)
+ needed to access the catalog's `endpoint` itself.
+
+### Calling Cataloged Functions
+
+While catalogs can potentially hold various resources, the primary use case currently detailed is calling functions. To
+call a function defined within a catalog, you use a specific format in the `call` property of a task:
+
+`{functionName}:{functionVersion}${catalogName}`
+
+* `functionName`: The name of the function within the catalog.
+* `functionVersion`: The specific semantic version of the function to use.
+* `catalogName`: The name given to the catalog in the `use.catalogs` section.
+
+**Example:**
+
+```yaml
+do:
+ - processSharedLogic:
+ # Calls version '1.2.0' of 'dataProcessor' function
+ # from the catalog named 'sharedUtils'
+ call: dataProcessor:1.2.0@sharedUtils
+ with:
+ config: "${ .processingConfig }"
+ inputData: "${ .rawInput }"
+```
+
+## Catalog Structure
+
+To ensure portability and allow runtimes to consistently locate resources, catalogs hosted externally (especially in Git
+repositories) are expected to follow a specific file structure. The Serverless Workflow specification recommends a
+structure like this (refer to
+the [official catalog structure documentation](https://github.com/serverlessworkflow/catalog?tab=readme-ov-file#structure)
+for precise details):
+
+```
+my-catalog-repo/
+├── functions/
+│ ├── functionA/
+│ │ ├── 1.0.0/
+│ │ │ └── function.yaml # Definition for v1.0.0
+│ │ └── 1.1.0/
+│ │ └── function.yaml # Definition for v1.1.0
+│ └── functionB/
+│ └── 0.5.2/
+│ └── function.yaml
+├── authentications/ # Example for other potential resource types
+│ └── mySharedAuth/
+│ └── authentication.yaml
+└── README.md
+```
+
+### Runtime Resolution (Git Repositories)
+
+When a catalog endpoint points to a Git repository (like GitHub or GitLab), runtimes are expected to resolve the *raw*
+content URLs for the definition files (e.g., `function.yaml`, `authentication.yaml`).
+
+For example, if the catalog endpoint is `https://github.com/my-org/catalog/tree/main` and you call
+`log:1.0.0@myCatalog`, the runtime should look for the definition at a path like `functions/log/1.0.0/function.yaml`
+within that repository and fetch its raw content (e.g., from
+`https://raw.githubusercontent.com/my-org/catalog/main/functions/log/1.0.0/function.yaml`).
+
+## Default Catalog
+
+Runtimes *may* provide a **Default Catalog**. This is a special, implicitly available catalog that doesn't need to be
+explicitly defined in `use.catalogs`. It allows runtime administrators or platform providers to make common functions (
+or potentially other resources) readily available to all workflows without extra configuration.
+
+To call a function from the default catalog, use the reserved name `default` as the catalog name:
+
+`{functionName}:{functionVersion}@default`
+
+**Example:**
+
+```yaml
+do:
+ - logInfo:
+ # Assumes 'logMessage:1.0' exists in the runtime's default catalog
+ call: logMessage:1.0@default
+ with:
+ level: INFO
+ text: "Task completed successfully."
+```
+
+How the runtime manages and resolves resources in the default catalog is implementation-specific (e.g., database, local
+files, pre-configured remote repository).
+
+## Additional Examples
+
+### Example: Defining Multiple Catalogs with Authentication
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: multi_catalog_example
+ name: process-with-shared-and-private
+ version: '1.0.0'
+use:
+ secrets: [ gitHubToken, privateCatalogKey ]
+ catalogs:
+ # Public, well-known catalog (e.g., official Serverless Workflow catalog)
+ swPublic:
+ endpoint:
+ uri: https://github.com/serverlessworkflow/catalog
+ # Maybe requires a token for higher rate limits or private access within org
+ authentication:
+ bearer: ${ $secrets.gitHubToken }
+ # Internal, private catalog hosted on a company server
+ internalTools:
+ endpoint:
+ uri: https://git.mycompany.com/workflow-tools
+ authentication:
+ basic:
+ username: workflow-runner
+ password: ${ $secrets.privateCatalogKey }
+do:
+ - initialLog:
+ # Using a function from the public catalog
+ call: log:0.5.2@swPublic
+ with:
+ message: "Starting process"
+ - runInternalTool:
+ # Using a function from the internal catalog
+ call: data-validator:2.1.0@internalTools
+ with:
+ input: "${ .rawData }"
+```
+
+### Example: Using a Local File-Based Catalog
+
+```yaml
+# Assume catalog functions are defined in /opt/workflow-catalogs/local-utils
+document:
+ dsl: '1.0.0'
+ namespace: local_dev
+ name: test-local-catalog
+ version: '1.0.0'
+use:
+ catalogs:
+ local:
+ endpoint:
+ # Path accessible by the workflow runtime
+ uri: file:///opt/workflow-catalogs/local-utils
+do:
+ - formatData:
+ # Call function from the local file system catalog
+ call: formatter:1.0.0@local
+ with:
+ value: "${ .inputString }"
+```
+
+## Creating and Publishing Resources
+
+The process for creating and sharing resources involves defining the resource in a structured way and making it
+accessible via a catalog endpoint.
+
+While the most concrete example provided by the specification is for [Functions](/docs/call-tasks/function/), the general
+steps apply to any resource type potentially supported by a catalog:
+
+1. **Define the Resource**: Create a definition file (e.g., `function.yaml` for functions) that describes the resource
+ according to the Serverless Workflow specification. This file typically includes:
+ * Metadata (like name, version, description).
+ * Input/Output schemas (if applicable, highly recommended for clarity and validation).
+ * The core configuration or logic of the resource (e.g., the `run` definition for a function, the properties of an
+ authentication policy).
+
+2. **Structure the Catalog**: Place the definition file within a directory structure that follows the
+ recommended [Catalog Structure](#catalog-structure). This typically involves grouping resources by type (e.g.,
+ `functions/`, `authentications/`) and then by name and version.
+
+3. **Host the Catalog**: Make the catalog directory structure accessible via a URI (e.g., host it in a Git repository,
+ on a web server, or in a shared file system accessible by the runtime).
+
+4. **Publish (Optional)**:
+ * For broader visibility and reuse, consider contributing your resource definitions to a public catalog, like the
+ official [Serverless Workflow Catalog](https://github.com/serverlessworkflow/catalog).
+ * For internal use, ensure the catalog is hosted where your organization's workflows can access it (e.g., an
+ internal Git server, artifact repository).
+
+**Example: Function Definition File (`function.yaml`)**
+
+```yaml
+# function.yaml - Example structure
+input:
+ schema:
+ document:
+ type: object
+ # ... input properties ...
+output:
+ schema:
+ document:
+ type: object
+ # ... output properties ...
+run: # Defines how the function executes
+ script: # Example: running a script
+ language: javascript
+ code: |
+ // ... function logic ...
+ # ... arguments ...
+```
+
+For a detailed walkthrough of creating a *custom function* and its `function.yaml` file, refer to the DSL concepts
+documentation regarding function creation. The principles outlined there for definition and structure can be adapted for
+other potential resource types based on future catalog specifications.
\ No newline at end of file
diff --git a/src/content/docs/docs/resources-configuration/secrets.mdx b/src/content/docs/docs/resources-configuration/secrets.mdx
new file mode 100644
index 0000000..ce81bf5
--- /dev/null
+++ b/src/content/docs/docs/resources-configuration/secrets.mdx
@@ -0,0 +1,129 @@
+---
+title: Secrets Management
+sidebar:
+ order: 10
+ label: Secrets
+---
+
+## Introduction
+
+Secure handling of sensitive information is critical in modern workflow systems. The Serverless Workflow DSL provides a structured approach to secrets management that allows workflows to securely access protected resources without exposing sensitive information in workflow definitions.
+
+Secrets can include credentials, API keys, tokens, certificates, and other sensitive information required to access external services or systems.
+
+## Declaring Secrets
+
+Secrets are declared in the `use.secrets` section of a workflow definition. This establishes which secrets a workflow requires access to, without containing the actual secret values.
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: secrets-example
+ version: '1.0.0'
+use:
+ secrets:
+ - api-key-secret
+ - database-credentials
+ - oauth-client-secret
+```
+
+## Secret Resolution
+
+The actual secret values are never stored within workflow definitions. Instead, the workflow runtime is responsible for retrieving secret values at execution time from a secure storage service or vault. Common secret storage solutions include:
+
+- Kubernetes Secrets
+- HashiCorp Vault
+- Cloud provider secret managers (AWS Secrets Manager, Azure Key Vault, Google Secret Manager)
+- Environment variables (for development environments only)
+
+## Accessing Secrets in Workflows
+
+Secrets can be referenced within workflow tasks using the `$SECRETS` context variable. This approach ensures that sensitive values never appear in the workflow definition itself.
+
+### Example: Using a Secret in an HTTP Call
+
+```yaml
+do:
+ - callProtectedAPI:
+ call: http
+ with:
+ method: post
+ endpoint: https://api.example.com/data
+ headers:
+ Authorization: "Bearer $SECRETS.api-key-secret"
+```
+
+### Example: Authentication with Secrets
+
+```yaml
+use:
+ authentications:
+ customerApiAuth:
+ oauth2:
+ authority: https://auth.example.com
+ grant: client_credentials
+ client:
+ id: "client-id"
+ secret: "$SECRETS.oauth-client-secret"
+ secrets:
+ - oauth-client-secret
+```
+
+## Security Considerations and Best Practices
+
+1. **Principle of Least Privilege**: Only request access to secrets that are absolutely necessary for the workflow's operation.
+
+2. **Secret Rotation**: Implement regular rotation of secrets and ensure your workflow can handle updated secret values without disruption.
+
+3. **Auditing**: Ensure your secrets management system maintains logs of which workflows accessed which secrets, for compliance and security monitoring.
+
+4. **No Secret Values in Logs**: Configure your workflow runtime to prevent secret values from appearing in logs or error messages.
+
+5. **Access Control**: Implement proper access controls to restrict which users or systems can deploy workflows that access sensitive secrets.
+
+6. **Different Secrets Per Environment**: Use different secret values across development, testing, and production environments.
+
+## Error Handling
+
+If a workflow attempts to access a secret that doesn't exist or to which it doesn't have access, the workflow runtime will raise an error:
+
+```
+https://serverlessworkflow.io/spec/1.0.0/errors/authorization
+```
+
+This error should be handled appropriately using error handling mechanisms like the `Try` task:
+
+```yaml
+do:
+ - accessProtectedResource:
+ try:
+ do:
+ - callWithSecret:
+ call: http
+ with:
+ method: get
+ endpoint: https://api.example.com/protected
+ headers:
+ Authorization: "Bearer $SECRETS.api-key-secret"
+ catch:
+ - error: https://serverlessworkflow.io/spec/1.0.0/errors/authorization
+ as: secretError
+ handle:
+ - logSecretError:
+ call: function
+ with:
+ function: logError
+ arguments:
+ message: "Missing or unauthorized secret access"
+```
+
+## Integration with External Secret Management Systems
+
+Workflow runtimes typically integrate with organization-wide secret management systems. The specific configuration of these integrations is typically handled outside the workflow definition itself, often in the runtime configuration.
+
+The workflow simply references secrets by name, and the runtime handles resolving those references securely at execution time.
+
+## Conclusion
+
+Effective secrets management is a critical aspect of secure workflow design. The Serverless Workflow DSL provides a clean separation between workflow logic and sensitive information through its secrets mechanism. By following best practices for secrets management, you can build workflows that securely access protected resources while maintaining the confidentiality of sensitive information.
\ No newline at end of file
diff --git a/src/content/docs/docs/resources-configuration/timeouts.mdx b/src/content/docs/docs/resources-configuration/timeouts.mdx
new file mode 100644
index 0000000..179cb11
--- /dev/null
+++ b/src/content/docs/docs/resources-configuration/timeouts.mdx
@@ -0,0 +1,76 @@
+---
+title: Timeouts
+sidebar:
+ order: 30
+---
+
+## Purpose
+
+Timeouts provide a crucial mechanism for limiting the maximum execution duration of an entire workflow or individual
+tasks within it.
+
+Setting timeouts helps prevent workflows or tasks from running indefinitely due to unexpected delays, external service
+issues, or infinite loops, ensuring resource efficiency and predictable behavior.
+
+## How Timeouts Work
+
+When a configured timeout duration is reached for a workflow or a task:
+
+1. The execution of the workflow or task is **abruptly interrupted**.
+2. A standard **Timeout Error** is raised.
+3. If this error is not caught by an enclosing `Try` task, the workflow or task transitions immediately to the `faulted`
+ status phase, and the overall workflow execution halts.
+
+## Configuration
+
+Timeouts can be configured at two levels:
+
+1. **Workflow Level**: Set using the top-level `timeout` property in the workflow document.
+2. **Task Level**: Set using the `timeout` property within a specific task definition.
+
+In both cases, the `timeout` property takes an object with the following structure:
+
+* **`after`** (`duration`, Required): Specifies the duration after which the timeout occurs. The duration itself is
+ defined using an object detailing days, hours, minutes, seconds, or milliseconds.
+
+```yaml
+# Workflow-level timeout
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: workflow-with-timeout
+ version: '1.0.0'
+ timeout: # Applies to the entire workflow execution
+ after:
+ minutes: 30 # Workflow will fault if it runs longer than 30 minutes
+do:
+ - step1:
+ # ...
+
+---
+# Task-level timeout
+do:
+ - callExternalService:
+ call: http
+ with:
+ uri: https://api.externalservice.com/data
+ method: get
+ timeout: # Applies only to the callExternalService task
+ after:
+ seconds: 15 # Task will fault if it takes longer than 15 seconds
+ then: processResult
+ - processResult:
+ # ...
+```
+
+## Timeout Error Details
+
+When a timeout occurs, the specific error raised **must** conform to the following:
+
+* **`type`**: `https://serverlessworkflow.io/spec/1.0.0/errors/timeout`
+* **`status`**: `408` (Request Timeout) is recommended.
+* **`instance`**: Should point to the JSON Pointer of the workflow or task that timed out (e.g., `/`,
+ `/do/callExternalService`).
+
+This standard error format allows for consistent error handling using `Try` tasks specifically targeting timeout
+conditions.
\ No newline at end of file
diff --git a/src/content/docs/docs/resources-configuration/uri-templates.mdx b/src/content/docs/docs/resources-configuration/uri-templates.mdx
new file mode 100644
index 0000000..657f81f
--- /dev/null
+++ b/src/content/docs/docs/resources-configuration/uri-templates.mdx
@@ -0,0 +1,68 @@
+---
+title: URI Templates
+sidebar:
+ order: 50
+---
+
+## Purpose
+
+URI Templates provide a limited mechanism for creating dynamic URIs within the Serverless Workflow DSL, based on
+variable substitution. They are primarily used in places where a URI needs to be constructed based on available data,
+such as defining the endpoint for an HTTP `call`.
+
+This feature is based on [RFC 6570](https://datatracker.ietf.org/doc/html/rfc6570) but supports only a small subset of
+its capabilities.
+
+## Supported Syntax: Simple String Expansion
+
+The only syntax supported is **Simple String Expansion**: `{variableName}`.
+
+* The workflow runtime identifies placeholders enclosed in curly braces `{}`.
+* It looks for a variable with the exact name specified inside the braces (e.g., `variableName`) within the current data
+ context.
+* It replaces the entire `{variableName}` placeholder with the **value** of that variable.
+* If no variable with that exact name is found, the placeholder is replaced with an **empty string**.
+
+```yaml
+# Example in an HTTP call
+call: http
+with:
+ uri: "https://api.example.com/users/{userId}/profile"
+ # Assumes a variable named 'userId' exists in the data context
+ # If userId is "abc", the final URI becomes:
+ # https://api.example.com/users/abc/profile
+```
+
+## Limitations Compared to Runtime Expressions (`${...}`)
+
+URI Templates are **much less powerful** than standard [Runtime Expressions](/docs/core-concepts/runtime-expressions/). Key
+limitations include:
+
+1. **No Dot Notation:** You cannot access nested properties within variables (e.g., `{user.id}` is invalid for accessing
+ `id` within a `user` object). The template processor looks for a variable literally named `user.id`. You must ensure
+ the required value exists as a top-level variable (e.g., use `{userId}` instead).
+2. **Restricted Variable Types:** The variable referenced inside `{}` **must** resolve to a `string`, `number`,
+ `boolean`, or `null`. Using variables that resolve to objects or arrays will result in an error.
+3. **No Access to Workflow Arguments:** You **cannot** use special workflow arguments like `$context`, `$task`,
+ `$input`, `$output`, or `$secrets` inside `{}` placeholders. Use standard Runtime Expressions (`${...}`) when you
+ need access to these.
+
+```yaml
+# --- INVALID USAGE ---
+# uri: "https://api.example.com/data/{$context.requestPath}" # Cannot use $context
+# uri: "https://api.example.com/items/{item.id}" # Cannot use dot notation
+
+# --- VALID USAGE (using Runtime Expression instead) ---
+# uri: "${ \"https://api.example.com/data/\" + $context.requestPath }"
+```
+
+Choose URI Templates only for very simple substitutions where the limitations are acceptable. For more complex URI
+construction or access to workflow context, use standard Runtime Expressions.
+
+## Error Handling
+
+If a variable referenced within a URI Template resolves to an unsupported type (like an object or array), the workflow *
+*must** fault with an error:
+
+* **`type`**: `https://serverlessworkflow.io/spec/1.0.0/errors/expression`
+* **`status`**: `400` (Bad Request)
\ No newline at end of file
diff --git a/src/content/docs/docs/run-tasks/container.mdx b/src/content/docs/docs/run-tasks/container.mdx
new file mode 100644
index 0000000..59f5377
--- /dev/null
+++ b/src/content/docs/docs/run-tasks/container.mdx
@@ -0,0 +1,140 @@
+---
+title: "Run Container Task (`run: container`)"
+sidebar:
+ order: 10
+ label: Run Container
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+## Purpose
+
+The `run: container` task allows a workflow to execute an external process encapsulated within a containerized environment (e.g., Docker). This is useful for running complex applications, specific versions of tools, or any process that benefits from isolation and reproducible environments.
+
+## Usage Example
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: container_runs
+ name: run-simple-container
+ version: '1.0.0'
+do:
+ - executeInContainer:
+ run:
+ container:
+ image: busybox:latest # The container image to use
+ command: "echo 'Hello from container'" # Command to run inside
+ # `await: true` (default) waits for completion
+ # `return: stdout` (default) sets task output to container's stdout
+ then: processContainerOutput
+ - processContainerOutput:
+ # Input will be "Hello from container\n"
+ set:
+ message: "{ trim: . }"
+
+## Additional Examples
+
+### Example: Using Environment Variables and Volumes
+
+```yaml
+do:
+ - processDataWithContainer:
+ run:
+ container:
+ image: my-data-processor:latest
+ # Pass dynamic data via environment variables
+ environment:
+ INPUT_FILE: "/data/${ .inputFile }"
+ OUTPUT_DIR: "/results"
+ API_KEY: "${ $secrets.processorApiKey }"
+ # Mount host volumes into the container
+ volumes:
+ "/mnt/shared/input": "/data:ro" # Read-only input
+ "/mnt/shared/output": "/results" # Writable output
+ return: code # Just check the exit code
+ then: checkProcessingStatus
+```
+
+### Example: Getting All Process Results
+
+```yaml
+do:
+ - runDiagnostics:
+ run:
+ container:
+ image: diagnostics-tool:v2
+ command: "--check-all --verbose"
+ # Get code, stdout, and stderr
+ return: all
+ then: analyzeDiagnostics
+ - analyzeDiagnostics:
+ # Input: { "code": 0, "stdout": "...", "stderr": "..." }
+ set:
+ exitCode: "${ .code }"
+ outputLog: "${ .stdout }"
+ errorLog: "${ .stderr }"
+```
+
+### Example: Running Without Awaiting (Fire and Forget)
+
+```yaml
+do:
+ - triggerBackgroundJob:
+ run:
+ container:
+ image: background-worker:latest
+ environment:
+ JOB_ID: "${ .jobId }"
+ # Don't wait for the container to finish
+ await: false
+ # Task output is its input because await is false
+ # Workflow continues immediately
+ then: recordJobTriggered
+```
+
+## Configuration Options
+
+The configuration is provided under the `run` property, specifically within the nested `container` object.
+
+### `run` (Object, Required)
+
+* **`container`** (Object, Required): Defines the container process configuration.
+ * **`image`** (String, Required): The name and tag of the container image to run (e.g., `ubuntu:latest`, `my-custom-app:1.2`).
+ * **`name`** (String, Optional): A [Runtime Expression](/docs/core-concepts/runtime-expressions/) evaluated to assign a specific name to the running container instance. Useful for identification and management. Runtimes may have a default naming convention (e.g., `{workflow.name}-{uuid}.{workflow.namespace}-{task.name}`).
+ * **`command`** (String, Optional): The command and its arguments to execute inside the container. If not specified, the image's default command (e.g., `ENTRYPOINT` or `CMD` in a Dockerfile) is used.
+ * **`ports`** (Map\, Optional): Defines port mappings between the host and the container (e.g., `{"8080": "80/tcp"}`). The exact format and capabilities may depend on the container runtime.
+ * **`volumes`** (Map\, Optional): Defines volume mappings between the host and the container (e.g., `{"/host/data": "/container/data:ro"}`). The exact format and capabilities may depend on the container runtime.
+ * **`environment`** (Map\, Optional): A key/value map of environment variables to set inside the container. Values can be static strings or evaluated from [Runtime Expressions](/docs/core-concepts/runtime-expressions/).
+ * **`lifetime`** (Object, Optional): Configures the container's lifecycle management after execution. Contains:
+ * `cleanup` (String, Required, Default: `never`): The cleanup policy. Supported values:
+ * `always`: The container is deleted immediately after execution.
+ * `never`: The runtime should never delete the container.
+ * `eventually`: The container is deleted after the duration specified in `after`.
+ * `after` (String | Object, Optional): The duration after execution to wait before deleting the container. Required if `cleanup` is `eventually`. Can be an ISO 8601 duration string or a [Duration Object](TODO: Link to Duration object page if exists).
+* **`await`** (Boolean, Optional, Default: `true`):
+ * `true`: The workflow task waits for the container process to complete before proceeding. The task's output is determined by the `return` property.
+ * `false`: The task starts the container and proceeds immediately without waiting for completion. The task's `rawOutput` is its `transformedInput`.
+* **`return`** (String - `stdout` | `stderr` | `code` | `all` | `none`, Optional, Default: `stdout`): Specifies what the task's `rawOutput` should be when `await` is `true`.
+ * `stdout`: The standard output (stdout) stream of the container process.
+ * `stderr`: The standard error (stderr) stream of the container process.
+ * `code`: The integer exit code of the container process.
+ * `all`: An object containing the results from the process. It typically includes:
+ * `code` (Integer): The exit code of the process.
+ * `stdout` (String | Null): The content captured from the standard output stream.
+ * `stderr` (String | Null): The content captured from the standard error stream.
+ * `none`: The task produces no output (evaluates to `null`).
+
+### Data Flow
+
+
+**Note**:
+* The `transformedInput` to the task is available for use within runtime expressions in the `run.container.environment` and potentially `run.container.name` or `run.container.command`.
+* The `rawOutput` depends on the `run.await` and `run.return` settings. If `await` is `false`, output is the `transformedInput`. Otherwise, it's determined by `return`.
+* Standard `output.as` and `export.as` process this resulting `rawOutput`.
+
+### Flow Control
+
+
+**Note**: If `await` is `true` and the container process exits with a non-zero status code (indicating an error), a `Runtime` error is typically raised, and the `then` directive is *not* followed (unless caught by `Try`). If `await` is `false`, process errors are generally not automatically caught by the workflow task itself.
\ No newline at end of file
diff --git a/src/content/docs/docs/run-tasks/overview.mdx b/src/content/docs/docs/run-tasks/overview.mdx
new file mode 100644
index 0000000..705d37b
--- /dev/null
+++ b/src/content/docs/docs/run-tasks/overview.mdx
@@ -0,0 +1,50 @@
+---
+title: Run Tasks Overview
+sidebar:
+ order: 0
+ label: Overview
+---
+
+## Purpose
+
+The `run` task type in the Serverless Workflow DSL provides a powerful mechanism for executing external processes from within a workflow. This allows workflows to leverage existing scripts, command-line tools, containerized applications, or even trigger other workflows.
+
+This capability is essential for:
+
+* Integrating with legacy systems or tools that don't have modern APIs.
+* Performing complex computations or data processing using specialized environments (containers).
+* Executing custom logic written in various scripting languages.
+* Orchestrating larger processes by breaking them down into reusable sub-workflows.
+
+## Types of Run Tasks
+
+The specific type of process to execute is determined by the nested object provided under the `run` property:
+
+* **`run: container`**:
+ * Executes a command within a specified container image (e.g., Docker).
+ * Ideal for running applications with specific dependencies or isolated environments.
+ * See: [Run Container Task](/docs/run-tasks/container/)
+
+* **`run: script`**:
+ * Executes inline code or an external script file using a supported scripting language interpreter (e.g., JavaScript, Python) available in the runtime.
+ * Useful for custom logic, data manipulation, and simple integrations.
+ * See: [Run Script Task](/docs/run-tasks/script/)
+
+* **`run: shell`**:
+ * Executes a command line instruction using the host system's default shell.
+ * Suitable for simple system interactions and invoking command-line utilities.
+ * See: [Run Shell Command Task](/docs/run-tasks/shell-command/)
+
+* **`run: workflow`**:
+ * Invokes and executes another Serverless Workflow definition as a sub-workflow.
+ * Enables modular design and composition of complex processes.
+ * See: [Run Workflow Task](/docs/run-tasks/workflow/)
+
+## Common Concepts
+
+While each run type executes a different kind of process, they share some common configuration options under the main `run` property:
+
+* **`await`**: Determines if the workflow task should wait for the external process to complete before proceeding.
+* **`return`**: Specifies what information from the completed process (e.g., standard output, standard error, exit code) should become the output of the workflow task (only applicable when `await` is true).
+
+Please refer to the individual task pages linked above for detailed configuration options, examples, and specific behaviors related to each process type.
\ No newline at end of file
diff --git a/src/content/docs/docs/run-tasks/script.mdx b/src/content/docs/docs/run-tasks/script.mdx
new file mode 100644
index 0000000..4bbe46b
--- /dev/null
+++ b/src/content/docs/docs/run-tasks/script.mdx
@@ -0,0 +1,165 @@
+---
+title: "Run Script Task (`run: script`)"
+sidebar:
+ order: 20
+ label: Run Script
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+## Purpose
+
+The `run: script` task allows a workflow to execute a script written in a supported language (e.g., JavaScript, Python) directly within the runtime environment. This is useful for custom logic, data transformations, or simple integrations that don't warrant a full container or external function call.
+
+## Usage Example
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: script_runs
+ name: run-simple-script
+ version: '1.0.0'
+do:
+ - executeScript:
+ run:
+ script:
+ language: js # Specify the script language
+ code: |
+ // Multi-line JavaScript code
+ function process(input) {
+ let message = `Processed item: ${input.id}`;
+ console.log('Processing done.'); // Goes to stderr usually
+ return { result: message, status: 'complete' };
+ }
+ process($input); // Use $input argument
+ # Pass arguments to the script environment
+ arguments:
+ input: "${ . }" # Pass the entire task input
+ # `await: true` (default) waits for completion
+ # `return: stdout` (default) - Note: Script return value is often stdout
+ return: stdout # Explicitly get script return value via stdout
+ then: processScriptOutput
+ - processScriptOutput:
+ # Input might be { result: "Processed item: 123", status: "complete" }
+ set:
+ finalMessage: "${ .result }"
+
+## Additional Examples
+
+### Example: Loading Script from External Source
+
+```yaml
+# Assume 'scriptRepo' external resource is defined
+use:
+ resources:
+ scriptRepo:
+ endpoint: file:///etc/workflow-scripts/
+
+do:
+ - runExternalScript:
+ run:
+ script:
+ language: python
+ # Load code from a file defined by the resource
+ source:
+ resource: scriptRepo
+ path: /validators/email_validator.py
+ arguments:
+ email: "${ .userEmail }"
+ return: stdout # Expect script to print JSON result
+ then: processValidation
+```
+
+### Example: Using Python with Environment Variables
+
+```yaml
+do:
+ - processWithPython:
+ run:
+ script:
+ language: python
+ code: |
+ import os
+ import json
+
+ api_key = os.environ.get('API_KEY')
+ input_data = json.loads(os.environ.get('INPUT_JSON'))
+
+ # ... process data using api_key and input_data ...
+ result = { "status": "processed", "items_count": len(input_data['items']) }
+
+ print(json.dumps(result)) # Return JSON via stdout
+ # Pass data via environment variables
+ environment:
+ API_KEY: "${ $secrets.pythonServiceKey }"
+ INPUT_JSON: "${ toJsonString(.) }" # Assume toJsonString function
+ return: stdout
+ then: handlePythonResult
+```
+
+### Example: Returning Exit Code
+
+```yaml
+do:
+ - checkDataIntegrity:
+ run:
+ script:
+ language: js
+ code: |
+ // Script exits with 0 on success, 1 on failure
+ const isValid = checkData($input.data);
+ process.exit(isValid ? 0 : 1);
+ arguments: { input: "${ . }" }
+ # Capture the script's exit code
+ return: code
+ then: handleIntegrityCheck
+ - handleIntegrityCheck:
+ # Input will be 0 or 1
+ switch:
+ - if: "${ . == 0 }"
+ then: dataIsValid
+ - then: dataIsInvalid
+```
+
+## Configuration Options
+
+The configuration is provided under the `run` property, specifically within the nested `script` object.
+
+### `run` (Object, Required)
+
+* **`script`** (Object, Required): Defines the script process configuration.
+ * **`language`** (String, Required): The language of the script. Supported values are defined by the specification (e.g., `js`, `python`). Check the DSL reference for specific supported versions (e.g., `JavaScript ES2024`, `Python 3.13.x`). Using other versions might require the [Run Container Task](/docs/run-tasks/container/).
+ * **`code`** (String, Conditionally Required): The inline source code of the script to execute. Required if `source` is not provided.
+ * **`source`** (Object, Conditionally Required): An object pointing to an external file containing the script source code. Required if `code` is not provided. It contains:
+ * `endpoint` (Object, Required): Defines the location of the script file.
+ * `uri` (String | Object, Required): The URI (string or URI template object) pointing to the script file.
+ * `authentication` (String | Object, Optional): Authentication details (inline or reference) needed to access the script file URI.
+ * **`arguments`** (Map\, Optional): A key/value map defining arguments or variables made available to the script's execution environment. Values can be static or derived using [Runtime Expressions](/docs/core-concepts/runtime-expressions/). How these are exposed depends on the language (e.g., global variables in JS, injected context in Python).
+ * **`environment`** (Map\, Optional): A key/value map of environment variables to set for the script execution process. Values can be static strings or evaluated from [Runtime Expressions](/docs/core-concepts/runtime-expressions/).
+* **`await`** (Boolean, Optional, Default: `true`):
+ * `true`: The workflow task waits for the script process to complete before proceeding. The task's output is determined by the `return` property.
+ * `false`: The task starts the script and proceeds immediately. The task's `rawOutput` is its `transformedInput`.
+* **`return`** (String - `stdout` | `stderr` | `code` | `all` | `none`, Optional, Default: `stdout`): Specifies what the task's `rawOutput` should be when `await` is `true`.
+ * `stdout`: The standard output (stdout) stream of the script. *Note: Many scripting runtimes map the script's return value to stdout.*
+ * `stderr`: The standard error (stderr) stream (e.g., `console.error` or unhandled exceptions).
+ * `code`: The integer exit code (usually 0 for success).
+ * `all`: An object containing the results from the script process. It typically includes:
+ * `code` (Integer): The exit code of the script.
+ * `stdout` (String | Null): The content captured from the standard output stream.
+ * `stderr` (String | Null): The content captured from the standard error stream.
+ * `none`: The task produces no output (evaluates to `null`).
+
+### Data Flow
+
+
+**Note**:
+* The `transformedInput` to the task is available for use within runtime expressions in the `run.script.arguments` and `run.script.environment`.
+* The script itself typically accesses data passed via `run.script.arguments`.
+* The `rawOutput` depends on the `run.await` and `run.return` settings.
+* Standard `output.as` and `export.as` process this resulting `rawOutput`.
+
+### Flow Control
+
+
+**Note**: If `await` is `true` and the script process exits with a non-zero status code or throws an unhandled exception, a `Runtime` error is typically raised, and the `then` directive is *not* followed (unless caught by `Try`).
\ No newline at end of file
diff --git a/src/content/docs/docs/run-tasks/shell-command.mdx b/src/content/docs/docs/run-tasks/shell-command.mdx
new file mode 100644
index 0000000..248c38d
--- /dev/null
+++ b/src/content/docs/docs/run-tasks/shell-command.mdx
@@ -0,0 +1,117 @@
+---
+title: "Run Shell Command Task (`run: shell`)"
+sidebar:
+ order: 30
+ label: Run Shell Command
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+## Purpose
+
+The `run: shell` task allows a workflow to execute a command line instruction using the host system's default shell (e.g., bash, sh, cmd, powershell). This is useful for simple system interactions, invoking command-line tools, or file system operations.
+
+## Usage Example
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: shell-runs
+ name: run-simple-shell
+ version: '1.0.0'
+do:
+ - executeShellCommand:
+ run:
+ shell:
+ command: 'echo "Processing file: ${fileName}" && grep -c "ERROR" "${filePath}"'
+ # Pass arguments as environment variables
+ environment:
+ fileName: "${ .inputFile.name }"
+ filePath: "${ .inputFile.path }"
+ # `await: true` (default) waits for completion
+ # `return: stdout` (default) captures the standard output
+ return: stdout
+ then: processShellOutput
+ - processShellOutput:
+ # Input might be "Processing file: data.log\n3" (if 3 errors found)
+ set:
+ # Example: extract the error count (implementation depends on shell/tools)
+ errorCount: "${ parseInteger(split(. , '\n')[1]) }"
+
+## Additional Examples
+
+### Example: Capturing Standard Error
+
+```yaml
+do:
+ - attemptRiskyOperation:
+ run:
+ shell:
+ # Command might succeed (stdout) or fail (stderr)
+ command: "/opt/tools/do-something --input /data/input.txt || echo 'Fallback value'"
+ # Capture stderr instead of stdout
+ return: stderr
+ then: handleOperationError
+ - handleOperationError:
+ # Input is the content sent to stderr, or empty if none
+ set:
+ errorMessage: "${ . }"
+```
+
+### Example: Capturing All Outputs
+
+```yaml
+do:
+ - executeUtility:
+ run:
+ shell:
+ command: "utility-tool --process ${ .fileId } --verbose"
+ # Get exit code, stdout, and stderr together
+ return: all
+ then: processUtilityResult
+ - processUtilityResult:
+ # Input: { "code": 0, "stdout": "...", "stderr": "..." }
+ set:
+ exitCode: "${ .code }"
+ outputLog: "${ .stdout }"
+ # Use stderr only if exit code was non-zero
+ errorLog: "${ if .code != 0 then .stderr else null end }"
+```
+
+## Configuration Options
+
+The configuration is provided under the `run` property, specifically within the nested `shell` object.
+
+### `run` (Object, Required)
+
+* **`shell`** (Object, Required): Defines the shell command process configuration.
+ * **`command`** (String, Required): The shell command line to execute. This string is typically passed directly to the system's shell interpreter.
+ * **`arguments`** (Map\, Optional): A key/value map defining arguments passed to the shell command. *Note: How these are passed (e.g., as command-line arguments appended to `command`, or made available in the environment) might depend on the runtime implementation. Using `environment` is often more explicit.*
+ * **`environment`** (Map\, Optional): A key/value map of environment variables to set specifically for the execution of this shell command. Values can be static strings or evaluated from [Runtime Expressions](/docs/core-concepts/runtime-expressions/). This is a common way to pass dynamic data to the command.
+* **`await`** (Boolean, Optional, Default: `true`):
+ * `true`: The workflow task waits for the shell command process to complete before proceeding. The task's output is determined by the `return` property.
+ * `false`: The task starts the shell command and proceeds immediately. The task's `rawOutput` is its `transformedInput`.
+* **`return`** (String - `stdout` | `stderr` | `code` | `all` | `none`, Optional, Default: `stdout`): Specifies what the task's `rawOutput` should be when `await` is `true`.
+ * `stdout`: The standard output (stdout) stream of the command.
+ * `stderr`: The standard error (stderr) stream of the command.
+ * `code`: The integer exit code of the command (usually 0 for success).
+ * `all`: An object containing the results from the shell process. It typically includes:
+ * `code` (Integer): The exit code of the command.
+ * `stdout` (String | Null): The content captured from the standard output stream.
+ * `stderr` (String | Null): The content captured from the standard error stream.
+ * `none`: The task produces no output (evaluates to `null`).
+
+### Data Flow
+
+
+**Note**:
+* The `transformedInput` to the task is available for use within runtime expressions in the `run.shell.environment`, `run.shell.arguments`, and `run.shell.command`.
+* The shell command typically accesses dynamic data via environment variables (set by `run.shell.environment`) or command-line arguments constructed within `run.shell.command`.
+* The `rawOutput` depends on the `run.await` and `run.return` settings.
+* Standard `output.as` and `export.as` process this resulting `rawOutput`.
+
+### Flow Control
+
+
+**Note**: If `await` is `true` and the shell command exits with a non-zero status code, a `Runtime` error is typically raised, and the `then` directive is *not* followed (unless caught by `Try`).
\ No newline at end of file
diff --git a/src/content/docs/docs/run-tasks/workflow.mdx b/src/content/docs/docs/run-tasks/workflow.mdx
new file mode 100644
index 0000000..b7b2be5
--- /dev/null
+++ b/src/content/docs/docs/run-tasks/workflow.mdx
@@ -0,0 +1,174 @@
+---
+title: "Run Workflow Task (`run: workflow`)"
+sidebar:
+ order: 40
+ label: Run Workflow
+---
+
+import CommonTaskDataFlow from '../../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../../shared/common-task-flow-control.mdx';
+
+## Purpose
+
+The `run: workflow` task allows a workflow to invoke and execute another Serverless Workflow definition as a sub-workflow. This facilitates modular design, reusability, and the composition of complex processes from smaller, focused workflows.
+
+## Usage Example
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: main_orchestration
+ name: order-processing
+ version: '1.1.0'
+# Assume 'inventory-check' workflow exists in 'shared_services' namespace
+do:
+ - checkInventory:
+ run:
+ workflow:
+ namespace: shared_services # Namespace of the workflow to run
+ name: inventory-check # Name of the workflow to run
+ version: '1.0.0' # Specific version to run
+ # Pass required input to the sub-workflow
+ input:
+ productId: "${ .order.itemId }"
+ quantity: "${ .order.quantity }"
+ # `await: true` (default) waits for the sub-workflow to complete
+ # `return: stdout` (default) captures the sub-workflow's final output
+ return: stdout
+ # Output of this task is the final output of 'inventory-check'
+ then: processInventoryResult
+ - processInventoryResult:
+ # Input might be { available: true, stockLevel: 150 }
+ switch:
+ - if: "${ .available == true }"
+ then: proceedToPayment
+ - then: notifyOutOfStock # Default case
+```
+
+## Additional Examples
+
+### Example: Running Latest Version
+
+```yaml
+do:
+ - runLatestUtility:
+ run:
+ workflow:
+ namespace: shared_utilities
+ name: data-cleanup
+ # version: 'latest' # Implicitly defaults to latest
+ input: { targetDir: "${ .tempDirectory }" }
+ return: stdout # Get the output of the latest cleanup workflow
+ then: processCleanupResult
+```
+
+### Example: Starting Sub-Workflow Without Awaiting
+
+```yaml
+do:
+ - triggerLongProcess:
+ run:
+ workflow:
+ namespace: background_jobs
+ name: monthly-report-generation
+ version: '2.0'
+ input: { month: "${ .currentMonth }", year: "${ .currentYear }" }
+ # Start the sub-workflow but don't wait for it
+ await: false
+ # Task output is its input, workflow continues immediately
+ then: logReportTriggered
+```
+
+### Example: Handling Faulted Sub-Workflow
+
+```yaml
+do:
+ - trySubWorkflow:
+ try:
+ run:
+ workflow:
+ namespace: experimental
+ name: potentially-failing-job
+ version: '0.1-beta'
+ input: "${ . }"
+ # `await: true` is default
+ # `return: stdout` is default, but will fail if sub-workflow faults
+ catch:
+ # Catch any error from the sub-workflow execution itself
+ errors:
+ with: { type: "https://serverlessworkflow.io/spec/1.0.0/errors/runtime" }
+ as: runtimeError
+ do:
+ - logSubWorkflowFailure:
+ call: logError
+ with:
+ message: "Sub-workflow experimental/potentially-failing-job faulted"
+ # Note: Accessing the sub-workflow's specific error might require `return: all` or `return: stderr` in the `run` task and inspecting the result within the catch block.
+ details: "${ $runtimeError }"
+
+```
+
+### Example: Using `return: all` to Get Status and Output/Error
+
+```yaml
+do:
+ - executeSubflowAndCheck:
+ run:
+ workflow:
+ namespace: validation_service
+ name: input-validator
+ version: '1.3'
+ input: { data: "${ .rawData }" }
+ # Get status code, stdout (if completed), and stderr (if faulted)
+ return: all
+ then: routeBasedOnSubflowResult
+ - routeBasedOnSubflowResult:
+ # Input: { "code": 0, "stdout": { "isValid": true }, "stderr": null } OR
+ # Input: { "code": 1, "stdout": null, "stderr": { "type": "...", "title": "Validation Failed", ... } }
+ switch:
+ - if: "${ .code == 0 and .stdout.isValid == true }"
+ then: processValidData
+ - if: "${ .code == 0 and .stdout.isValid == false }"
+ then: handleInvalidData
+ - then: handleSubflowError # If .code is non-zero
+```
+
+## Configuration Options
+
+The configuration is provided under the `run` property, specifically within the nested `workflow` object.
+
+### `run` (Object, Required)
+
+* **`workflow`** (Object, Required): Defines the sub-workflow execution configuration.
+ * **`namespace`** (String, Required): The namespace where the target workflow definition resides.
+ * **`name`** (String, Required): The name of the target workflow definition to execute.
+ * **`version`** (String, Optional, Default: `latest`): The specific version of the target workflow definition to execute. If omitted, the runtime typically uses the version marked as `latest`.
+ * **`input`** (Any, Optional): The input data to pass to the sub-workflow when it starts. This data is processed by the sub-workflow's own `input.from` and validated against its `input.schema`, if defined.
+* **`await`** (Boolean, Optional, Default: `true`):
+ * `true`: The parent workflow task waits for the sub-workflow process to reach a terminal state (e.g., `completed`, `faulted`, `cancelled`) before proceeding. The task's output is determined by the `return` property based on the sub-workflow's outcome.
+ * `false`: The task initiates the sub-workflow and proceeds immediately without waiting for its completion. The task's `rawOutput` is its `transformedInput`.
+* **`return`** (String - `stdout` | `stderr` | `code` | `all` | `none`, Optional, Default: `stdout`): Specifies what the task's `rawOutput` should be when `await` is `true`.
+ * `stdout`: The final output data of the sub-workflow (i.e., the result after its top-level `output.as` transformation), but only if it completed successfully.
+ * `stderr`: The WorkflowError object if the sub-workflow faulted. This object typically includes:
+ * `type` (String): A URI identifying the error type (e.g., `https://serverlessworkflow.io/spec/1.0.0/errors/runtime`).
+ * `status` (Integer): An appropriate status code (often mirroring HTTP status codes).
+ * `instance` (String): A JSON Pointer indicating the component within the sub-workflow where the error originated.
+ * `title` (String, Optional): A short, human-readable summary.
+ * `detail` (String, Optional): A more detailed explanation.
+ * `code`: A representation of the sub-workflow's terminal status (e.g., 0 for `completed`, non-zero for `faulted`/`cancelled` - specific codes may vary by runtime).
+ * `all`: An object containing the sub-workflow's status (`code`), final output (`stdout`), and error information (`stderr` if faulted).
+ * `none`: The task produces no output (evaluates to `null`).
+
+### Data Flow
+
+
+**Note**:
+* The `transformedInput` to the `run: workflow` task is available for use within runtime expressions when defining the `run.workflow.input`.
+* The `run.workflow.input` becomes the initial raw input for the sub-workflow.
+* The `rawOutput` of the `run: workflow` task depends on the `run.await` and `run.return` settings, reflecting the outcome and final data of the sub-workflow.
+* Standard `output.as` and `export.as` process this resulting `rawOutput`.
+
+### Flow Control
+
+
+**Note**: If `await` is `true` and the sub-workflow terminates in a `faulted` or `cancelled` state, a `Runtime` error (potentially containing the sub-workflow's error details in its `stderr` component if `return: all` or `return: stderr` is used) is typically raised by the `run: workflow` task, and the `then` directive is *not* followed (unless caught by `Try`).
\ No newline at end of file
diff --git a/src/content/docs/docs/set.mdx b/src/content/docs/docs/set.mdx
new file mode 100644
index 0000000..cd3bea8
--- /dev/null
+++ b/src/content/docs/docs/set.mdx
@@ -0,0 +1,320 @@
+---
+title: Set
+---
+
+import CommonTaskDataFlow from '../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../shared/common-task-flow-control.mdx';
+
+## Purpose
+
+The `Set` task is used to dynamically create or modify data within the workflow's execution flow. Its primary function
+is to define the exact output of the task by evaluating a specified structure, often
+using [Runtime Expressions](/docs/core-concepts/runtime-expressions/) to incorporate data from the task's input or the workflow
+context.
+
+It's commonly used for:
+
+* Initializing data structures.
+* Assigning values to variables.
+* Transforming or restructuring data between tasks when the standard `input.from` or `output.as` transformations are
+ insufficient or less clear.
+* Explicitly defining the data to be passed forward.
+
+## Basic Usage
+
+Here's a simple example of using `Set` to create a new object:
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: set-basic
+ version: '1.0.0'
+do:
+ - initialData:
+ set:
+ user:
+ name: "Alice"
+ id: 123
+ status: "active"
+ - processData: # Input to this task is the output of initialData
+ call: someFunction
+ with:
+ userData: "${ .user }"
+ currentStatus: "${ .status }"
+
+# Output of 'initialData' task: { "user": { "name": "Alice", "id": 123 }, "status": "active" }
+```
+
+In this example, the `initialData` task directly defines an object with `user` and `status` fields.
+This object becomes the output of `initialData` and is subsequently passed as input to the `processData` task.
+
+Here's another example using a runtime expression to combine input data:
+
+```yaml
+do:
+ - combineNames: # Assume input is { "firstName": "Bob", "lastName": "Smith" }
+ set:
+ fullName: "${ .firstName + ' ' + .lastName }"
+ originalInput: "${ . }" # Include original input if needed
+
+# Output of 'combineNames': { "fullName": "Bob Smith", "originalInput": { "firstName": "Bob", "lastName": "Smith" } }
+```
+
+## Configuration Options
+
+### `set` (Object, Required)
+
+This mandatory property defines the structure and content of the task's output.
+
+The value provided for `set` is evaluated as a template where [Runtime Expressions](/docs/core-concepts/runtime-expressions/) (e.g.,
+`${.fieldName}`, `${ $context.someValue }`) can be used.
+
+The result of evaluating this structure becomes the **raw output** of the `Set` task.
+
+```yaml
+set:
+ # Static values
+ configValue: 100
+ # Values from transformed input
+ inputValue: "${ .inputField }"
+ processedValue: "${ (.inputField * 2) + $context.offset }" # Combine input and context
+ # Nested structures
+ details:
+ timestamp: "${ $runtime.now.iso8601 }" # Using runtime variable
+ source: "workflowX"
+```
+
+## Data Flow
+
+
+**Note**: The `Set` task's primary purpose is to generate its `rawOutput` based on the `set` configuration. Standard `output.as` and `export.as` then process this generated output.
+
+## Flow Control
+
+
+
+
+## Set Task Examples
+
+### Basic Variable Assignment
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: basic-variable-assignment
+ version: '1.0.0'
+do:
+ - setOrderDefaults:
+ set:
+ orderData:
+ id: ${ .input.orderId || uuid() }
+ createdAt: ${ new Date().toISOString() }
+ status: "PENDING"
+ customerName: ${ .input.customer.firstName + " " + .input.customer.lastName }
+ priority: ${ .input.priority || "normal" }
+```
+
+### Data Transformation
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: data-transformation
+ version: '1.0.0'
+do:
+ - retrieveUserData:
+ call: function
+ with:
+ function: userService
+ args:
+ userId: ${ .input.userId }
+ result: userData
+
+ - transformUserProfile:
+ set:
+ profile:
+ displayName: ${ .userData.firstName + " " + .userData.lastName }
+ email: ${ .userData.email }
+ joined: ${ new Date(.userData.createdTimestamp).toLocaleDateString() }
+ membershipLevel: ${
+ if(.userData.totalPurchases > 10000)
+ "platinum"
+ else if(.userData.totalPurchases > 5000)
+ "gold"
+ else if(.userData.totalPurchases > 1000)
+ "silver"
+ else
+ "bronze"
+ }
+ tags: ${ .userData.interests || [] }
+```
+
+### Conditional Data Preparation
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: conditional-data-preparation
+ version: '1.0.0'
+do:
+ - prepareApiRequest:
+ set:
+ apiRequest:
+ endpoint: ${ .input.environment == "production" ? "https://api.example.com" : "https://staging-api.example.com" }
+ headers:
+ Authorization: ${ "Bearer " + .input.authToken }
+ Content-Type: "application/json"
+ Accept: "application/json"
+ body: ${
+ {
+ "userId": .input.userId,
+ "operation": .input.operation,
+ "parameters": .input.parameters || {},
+ "tracingId": uuid(),
+ "timestamp": new Date().toISOString()
+ }
+ }
+```
+
+### Aggregating Results
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: result-aggregation
+ version: '1.0.0'
+do:
+ - retrieveProductData:
+ call: function
+ with:
+ function: productService
+ args:
+ productId: ${ .input.productId }
+ result: productData
+
+ - retrievePricing:
+ call: function
+ with:
+ function: pricingService
+ args:
+ productId: ${ .input.productId }
+ region: ${ .input.region }
+ result: pricingData
+
+ - retrieveInventory:
+ call: function
+ with:
+ function: inventoryService
+ args:
+ productId: ${ .input.productId }
+ warehouseId: ${ .input.warehouseId }
+ result: inventoryData
+
+ - aggregateProductInfo:
+ set:
+ enrichedProduct: ${
+ {
+ "id": .productData.id,
+ "name": .productData.name,
+ "description": .productData.description,
+ "category": .productData.category,
+ "pricing": {
+ "basePrice": .pricingData.basePrice,
+ "currency": .pricingData.currency,
+ "discountPercentage": .pricingData.discountPercentage || 0,
+ "finalPrice": .pricingData.finalPrice,
+ "taxRate": .pricingData.taxRate
+ },
+ "inventory": {
+ "available": .inventoryData.quantityAvailable > 0,
+ "quantity": .inventoryData.quantityAvailable,
+ "location": .inventoryData.warehouseLocation,
+ "estimatedRestockDate": .inventoryData.quantityAvailable <= 0 ? .inventoryData.nextDeliveryDate : null
+ },
+ "lastUpdated": new Date().toISOString()
+ }
+ }
+```
+
+### Error Response Formatting
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: error-response-formatting
+ version: '1.0.0'
+do:
+ - try:
+ do:
+ - processOrder:
+ call: function
+ with:
+ function: orderProcessor
+ args:
+ order: ${ .input.order }
+ result: processingResult
+ catch:
+ as: error
+ do:
+ - formatErrorResponse:
+ set:
+ errorResponse: ${
+ {
+ "success": false,
+ "errorCode": .error.code || "UNKNOWN_ERROR",
+ "message": .error.message || "An unexpected error occurred",
+ "details": .error.details || null,
+ "timestamp": new Date().toISOString(),
+ "requestId": .input.requestId || uuid(),
+ "suggestions": [
+ "Try again later",
+ "Contact support if the issue persists"
+ ]
+ }
+ }
+```
+
+## Working with JSON Paths
+
+The Set task leverages JQ-style JSON path expressions for powerful data manipulation:
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: json-path-examples
+ version: '1.0.0'
+do:
+ - manipulateData:
+ set:
+ # Access nested properties
+ customerEmail: ${ .input.order.customer.contactInfo.email }
+
+ # Array operations
+ firstItem: ${ .input.items[0] }
+ itemCount: ${ .input.items | length }
+
+ # Filtering arrays
+ expensiveItems: ${ .input.products[.price > 100] }
+
+ # Mapping arrays
+ productNames: ${ .input.products[].name }
+
+ # Combining data
+ combinedList: ${ .list1 + .list2 }
+
+ # Conditional assignment
+ status: ${ if(.input.approved) "APPROVED" else "PENDING" }
+
+ # String operations
+ upperCaseName: ${ .input.name | upper }
+ formattedId: ${ .input.id | pad_left(10, "0") }
+```
+
+Data tasks are essential for maintaining clean data flow throughout your workflows and preparing data in exactly the right format for each subsequent task. The Set task in particular offers significant flexibility in how you transform and manipulate data within your workflow's execution.
\ No newline at end of file
diff --git a/src/content/docs/docs/wait.mdx b/src/content/docs/docs/wait.mdx
new file mode 100644
index 0000000..8fd015a
--- /dev/null
+++ b/src/content/docs/docs/wait.mdx
@@ -0,0 +1,417 @@
+---
+title: Wait
+---
+
+import CommonTaskDataFlow from '../shared/common-task-data-flow.mdx';
+import CommonTaskFlowControl from '../shared/common-task-flow-control.mdx';
+
+## Purpose
+
+The `Wait` task is used to introduce a pause or delay into the workflow execution. It halts the workflow for a specified
+duration before proceeding to the next task.
+
+It's primarily used for:
+
+* Implementing timed delays between tasks.
+* Waiting for external processes or systems to complete, where a fixed delay is acceptable.
+* Rate limiting or throttling workflow execution.
+* Scheduling subsequent actions after a specific interval.
+
+## Basic Usage
+
+Here's a simple example of waiting for 5 seconds:
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: wait-basic
+ version: '1.0.0'
+do:
+ - startProcess:
+ call: startLongRunningJob
+ # ...
+ - waitForCompletion:
+ wait:
+ duration: PT5S # Wait for 5 seconds
+ - checkStatus:
+ call: getJobStatus
+ # Input to checkStatus is the output of startProcess,
+ # as Wait just passes data through.
+ # ...
+```
+
+In this example, after the `startProcess` task completes, the `waitForCompletion` task pauses the workflow for exactly 5
+seconds before the `checkStatus` task is executed.
+
+## Configuration Options
+
+### `wait` (Object, Required)
+
+This mandatory object defines the duration of the pause.
+
+* **`duration` ** (String, Required):
+ A [duration string](https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html) that specifies
+ the length of time to wait. It must be in the ISO-8601 duration format (e.g., `PT5S` for 5 seconds, `PT1H30M` for 1
+ hour
+ and 30 minutes).
+
+## Data Flow
+
+
+**Note**: The `Wait` task typically acts as a pass-through for data; its `rawOutput` is identical to its `transformedInput` unless explicitly changed by `output.as`.
+
+## Flow Control
+
+
+
+## Wait Task Examples
+
+### Basic Delay
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: basic-delay
+ version: '1.0.0'
+do:
+ - sendNotification:
+ call: function
+ with:
+ function: notificationService
+ args:
+ user: ${ .input.userId }
+ message: "Your account verification process has started"
+
+ - waitForProcessing:
+ wait:
+ duration: PT30S
+
+ - checkVerificationStatus:
+ call: function
+ with:
+ function: verificationService
+ args:
+ userId: ${ .input.userId }
+ result: verificationStatus
+```
+
+### Wait Until Specific Time
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: scheduled-execution
+ version: '1.0.0'
+do:
+ - scheduleMaintenanceWindow:
+ set:
+ maintenanceWindowStart: ${ .input.maintenanceTime || "2023-12-15T02:00:00Z" }
+
+ - notifyUsersAboutMaintenance:
+ call: function
+ with:
+ function: notificationService
+ args:
+ type: "MAINTENANCE_NOTIFICATION"
+ message: ${ "System maintenance scheduled for " + .maintenanceWindowStart }
+ users: ${ .input.affectedUsers }
+
+ - waitUntilMaintenanceWindow:
+ wait:
+ timestamp: ${ .maintenanceWindowStart }
+
+ - performMaintenance:
+ call: function
+ with:
+ function: maintenanceService
+ args:
+ systems: ${ .input.targetSystems }
+ operationType: "SCHEDULED_MAINTENANCE"
+```
+
+### Dynamic Wait Duration
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: dynamic-delay
+ version: '1.0.0'
+do:
+ - determineWaitTime:
+ set:
+ waitTime: ${
+ if (.input.priority == "high") {
+ "PT5S" // 5 seconds for high priority
+ } else if (.input.priority == "medium") {
+ "PT1M" // 1 minute for medium priority
+ } else {
+ "PT5M" // 5 minutes for low priority
+ }
+ }
+
+ - waitBasedOnPriority:
+ wait:
+ duration: ${ .waitTime }
+
+ - processItem:
+ call: function
+ with:
+ function: itemProcessor
+ args:
+ itemId: ${ .input.itemId }
+ processingType: ${ .input.processingType }
+```
+
+### Implementing Polling Pattern
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: polling-pattern
+ version: '1.0.0'
+do:
+ - startLongRunningProcess:
+ call: function
+ with:
+ function: batchProcessor
+ args:
+ batchId: ${ .input.batchId }
+ operationType: "START"
+ result: processStatus
+
+ - initializePolling:
+ set:
+ maxAttempts: 10
+ currentAttempt: 0
+ complete: false
+
+ - pollForCompletion:
+ while: ${ !.complete && .currentAttempt < .maxAttempts }
+ do:
+ - waitForNextPoll:
+ wait:
+ duration: PT10S
+
+ - incrementAttempt:
+ set:
+ currentAttempt: ${ .currentAttempt + 1 }
+
+ - checkStatus:
+ call: function
+ with:
+ function: batchProcessor
+ args:
+ batchId: ${ .input.batchId }
+ operationType: "STATUS"
+ result: processStatus
+
+ - updateCompletionStatus:
+ set:
+ complete: ${ .processStatus.status == "COMPLETED" || .processStatus.status == "FAILED" }
+
+ - handleProcessResults:
+ if: ${ .complete }
+ then:
+ - returnResults:
+ set:
+ result:
+ batchId: ${ .input.batchId }
+ status: ${ .processStatus.status }
+ details: ${ .processStatus.details }
+ else:
+ - handleTimeout:
+ set:
+ result:
+ batchId: ${ .input.batchId }
+ status: "TIMED_OUT"
+ message: "Process did not complete within the maximum polling attempts"
+```
+
+### Cooldown Period
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: rate-limiting
+ version: '1.0.0'
+do:
+ - processFirstBatch:
+ call: function
+ with:
+ function: dataProcessor
+ args:
+ batchId: ${ .input.batchIds[0] }
+ data: ${ .input.batches[0] }
+ result: firstBatchResult
+
+ - implementCooldown:
+ wait:
+ duration: PT30S
+ description: "API rate limiting cooldown period"
+
+ - processSecondBatch:
+ call: function
+ with:
+ function: dataProcessor
+ args:
+ batchId: ${ .input.batchIds[1] }
+ data: ${ .input.batches[1] }
+ result: secondBatchResult
+
+ - finalizeResults:
+ set:
+ result:
+ processedBatches: 2
+ results: [
+ ${ .firstBatchResult },
+ ${ .secondBatchResult }
+ ]
+ timing:
+ started: ${ .execution.startTime }
+ completed: ${ new Date().toISOString() }
+```
+
+### Business Hours Scheduling
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: business-hours-scheduling
+ version: '1.0.0'
+do:
+ - receiveRequest:
+ set:
+ currentTime: ${ new Date() }
+ targetTime: ${
+ function getNextBusinessHourTime() {
+ const now = new Date();
+ const hour = now.getHours();
+ const isWeekend = now.getDay() === 0 || now.getDay() === 6;
+
+ if (isWeekend) {
+ // Move to Monday 9 AM
+ const monday = new Date(now);
+ monday.setDate(now.getDate() + (8 - now.getDay()) % 7);
+ monday.setHours(9, 0, 0, 0);
+ return monday.toISOString();
+ } else if (hour < 9) {
+ // Before business hours, schedule for 9 AM
+ const today9am = new Date(now);
+ today9am.setHours(9, 0, 0, 0);
+ return today9am.toISOString();
+ } else if (hour >= 17) {
+ // After business hours, schedule for next day 9 AM
+ const tomorrow9am = new Date(now);
+ tomorrow9am.setDate(now.getDate() + 1);
+ tomorrow9am.setHours(9, 0, 0, 0);
+ return tomorrow9am.toISOString();
+ } else {
+ // Within business hours, process now
+ return now.toISOString();
+ }
+ }
+ getNextBusinessHourTime()
+ }
+
+ - scheduleForBusinessHours:
+ if: ${ .targetTime != .currentTime }
+ then:
+ - waitUntilBusinessHours:
+ wait:
+ timestamp: ${ .targetTime }
+ description: "Waiting until business hours to process request"
+
+ - processRequest:
+ call: function
+ with:
+ function: requestProcessor
+ args:
+ requestId: ${ .input.requestId }
+ data: ${ .input.requestData }
+ result: processingResult
+```
+
+## Combining Wait with Other Tasks
+
+Time tasks are often combined with other task types to create more complex temporal patterns:
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: examples
+ name: comprehensive-retry-with-backoff
+ version: '1.0.0'
+do:
+ - initializeState:
+ set:
+ retryCount: 0
+ maxRetries: 5
+ backoffFactor: 2
+ initialDelay: 1
+
+ - processWithRetries:
+ try:
+ do:
+ - callExternalService:
+ call: function
+ with:
+ function: externalService
+ args:
+ requestData: ${ .input.data }
+ result: serviceResponse
+ catch:
+ as: error
+ do:
+ - incrementRetryCount:
+ set:
+ retryCount: ${ .retryCount + 1 }
+ currentDelay: ${ .initialDelay * Math.pow(.backoffFactor, .retryCount - 1) }
+
+ - checkRetryLimit:
+ if: ${ .retryCount <= .maxRetries }
+ then:
+ - logRetryAttempt:
+ call: function
+ with:
+ function: logger
+ args:
+ level: "INFO"
+ message: ${ "Retrying operation (attempt " + .retryCount + " of " + .maxRetries + ")" }
+ error: ${ .error }
+ nextDelaySeconds: ${ .currentDelay }
+
+ - implementBackoff:
+ wait:
+ duration: ${ "PT" + .currentDelay + "S" }
+ description: ${ "Exponential backoff delay before retry attempt " + .retryCount }
+
+ - retry:
+ try:
+ do:
+ - callExternalServiceAgain:
+ call: function
+ with:
+ function: externalService
+ args:
+ requestData: ${ .input.data }
+ retryAttempt: ${ .retryCount }
+ result: serviceResponse
+ else:
+ - handleMaxRetriesExceeded:
+ set:
+ result:
+ success: false
+ error: "MAX_RETRIES_EXCEEDED"
+ message: "Operation failed after maximum retry attempts"
+ details: {
+ "originalError": ${ .error },
+ "retryAttempts": ${ .retryCount }
+ }
+```
\ No newline at end of file
diff --git a/src/content/docs/docs/workflow-definition-examples.mdx b/src/content/docs/docs/workflow-definition-examples.mdx
new file mode 100644
index 0000000..41090ca
--- /dev/null
+++ b/src/content/docs/docs/workflow-definition-examples.mdx
@@ -0,0 +1,378 @@
+---
+title: Workflow Definition Examples
+---
+
+## Introduction
+
+This page provides a collection of practical examples showing different approaches to defining Serverless Workflows. Each example demonstrates specific use cases, patterns, and combinations of features that can be employed when creating workflow definitions.
+
+These examples range from simple sequential task execution to complex event-driven workflows with parallel processing and error handling. They showcase how the various properties and constructs of the DSL can be combined to solve real-world orchestration challenges.
+
+For a complete reference of all top-level workflow properties, please see [Workflow Definition Structure](/docs/core-concepts/workflow-definition/).
+
+Feel free to use these examples as starting points or templates for your own workflow definitions. You can modify and extend them to suit your specific requirements.
+
+## Example 1: Basic Sequential Task Execution
+
+This example demonstrates a straightforward workflow with minimal properties and sequential task execution. It represents a simple order processing flow with three consecutive function calls.
+
+**Key features:**
+- Proper `document` structure with required workflow properties
+- Function definitions in the resource catalog (`use.functions`)
+- Sequential task execution with explicit `then` directives
+- Function calls with runtime expression arguments
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: com.example.orders
+ name: process-new-order
+ version: '1.1.0'
+ title: Process New Customer Order
+ description: Workflow to validate, reserve inventory, and initiate payment for new orders.
+
+# Define reusable function definitions
+use:
+ functions:
+ - name: validateOrderFunc
+ operation: https://order-service/validate
+ type: rest
+ - name: checkInventoryFunc
+ operation: https://inventory-service/check
+ type: rest
+ - name: processPaymentFunc
+ operation: https://payment-service/process
+ type: rest
+
+# Main execution block with sequential tasks
+do:
+ - validateOrder:
+ call: function
+ with:
+ function: validateOrderFunc
+ arguments:
+ order: "${ .inputOrder }"
+ then: checkInventory
+ - checkInventory:
+ call: function
+ with:
+ function: checkInventoryFunc
+ arguments:
+ items: "${ .order.items }"
+ then: processPayment
+ - processPayment:
+ call: function
+ with:
+ function: processPaymentFunc
+ arguments:
+ amount: "${ .order.total }"
+ customerId: "${ .order.customerId }"
+```
+
+## Example 2: Event-Driven Workflow with Resource Catalog
+
+This example shows a more sophisticated workflow that leverages the resource catalog for reusable definitions and is triggered by events. It implements a customer communication workflow that sends personalized welcome messages through either email or SMS based on customer preferences.
+
+**Key features:**
+- Proper `document` structure with required workflow properties
+- Resource catalog (`use`) with function definitions, authentication configurations, and event definitions
+- Event-based scheduling with the `schedule.when` property
+- Conditional logic using the `switch` task
+- Authentication reference in task definitions
+- Data extraction from triggering events
+- Custom metadata
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: com.example.notifications
+ name: customer-communication
+ version: '2.0.0'
+
+# Reusable definitions in the resource catalog
+use:
+ # Function definitions
+ functions:
+ - name: sendEmailFunc
+ operation: https://email-service/send
+ type: rest
+ - name: sendSmsFunc
+ operation: https://sms-service/send
+ type: rest
+
+ # Authentication definitions
+ auth:
+ - name: emailServiceAuth
+ scheme: oauth2
+ properties:
+ grantType: client_credentials
+ tokenUrl: https://auth.example.com/token
+ clientId: "${ $secrets.EMAIL_SERVICE_CLIENT_ID }"
+ clientSecret: "${ $secrets.EMAIL_SERVICE_CLIENT_SECRET }"
+
+ # Event definitions
+ events:
+ - name: CustomerSignupEvent
+ source: /customers/signup
+ type: com.example.customer.signup.v1
+
+# Event-based scheduling
+schedule:
+ - when:
+ event: CustomerSignupEvent
+ start: sendWelcomePackage
+
+# Main execution block
+do:
+ - sendWelcomePackage:
+ set:
+ customer: "${ .data }" # Extract customer data from the triggering event
+ then: determineChannel
+ - determineChannel:
+ switch:
+ conditions:
+ - condition: "${ .customer.preferences.contactMethod == 'email' }"
+ transition: sendEmail
+ - condition: "${ .customer.preferences.contactMethod == 'sms' }"
+ transition: sendSms
+ default: sendEmail
+ - sendEmail:
+ call: function
+ with:
+ function: sendEmailFunc
+ auth: emailServiceAuth
+ arguments:
+ to: "${ .customer.email }"
+ template: "welcome_email"
+ data:
+ name: "${ .customer.name }"
+ accountType: "${ .customer.accountType }"
+ - sendSms:
+ call: function
+ with:
+ function: sendSmsFunc
+ arguments:
+ to: "${ .customer.phone }"
+ message: "Welcome to our service, ${ .customer.name }!"
+
+# Custom metadata
+metadata:
+ owner: "Customer Engagement Team"
+ reviewedDate: "2023-09-15"
+ priority: "high"
+```
+
+## Example 3: Workflow with Parallel Execution and Error Handling
+
+This example illustrates a data processing workflow with parallel execution, comprehensive error handling, and timeout configuration. It fetches a batch of data, processes multiple records in parallel, aggregates the results, and handles any errors that might occur during processing.
+
+**Key features:**
+- Proper `document` structure with required workflow properties
+- Workflow-level timeout configuration
+- Parallel execution using the `fork` task
+- Retry policy with exponential backoff
+- Error handling with `try`/`catch`/`finally` blocks
+- Event emission for error notification
+- Data transformation and aggregation
+- Use of system variables like `$error` and `$now()`
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: com.example.dataprocessing
+ name: batch-data-processor
+ version: '1.0.0'
+
+# Define overall workflow timeout to ensure it doesn't run indefinitely
+timeout:
+ minutes: 30
+
+do:
+ - fetchData:
+ call: http
+ with:
+ method: GET
+ url: https://api.example.com/data/batch/${ .batchId }
+ then: processBatch
+ - processBatch:
+ # Process multiple data items concurrently
+ fork:
+ items: "${ .records }"
+ as: record
+ do:
+ call: http
+ with:
+ method: POST
+ url: https://processing-service.example.com/process
+ body: "${ .record }"
+ retry:
+ limit:
+ attempt.count: 3
+ backoff:
+ exponential:
+ initial:
+ seconds: 1
+ multiplier: 2
+ max:
+ seconds: 8
+ then: aggregateResults
+ - aggregateResults:
+ try:
+ do:
+ call: function
+ with:
+ function: aggregateResultsFunc
+ arguments:
+ results: "${ .forkedResults }"
+ catch:
+ - error:
+ type: https://serverlessworkflow.io/spec/1.0.0/errors/validation
+ do:
+ call: function
+ with:
+ function: logValidationErrorFunc
+ arguments:
+ error: "${ $error }"
+ then: notifyFailure
+ finally:
+ do:
+ call: function
+ with:
+ function: cleanupTempDataFunc
+ then: storeFinalResults
+ - storeFinalResults:
+ call: function
+ with:
+ function: storeResultsFunc
+ arguments:
+ data: "${ .aggregatedData }"
+ metadata:
+ batchId: "${ .batchId }"
+ processedAt: "${ $now() }"
+ - notifyFailure:
+ emit:
+ event:
+ type: com.example.batch.processing.failed
+ source: /data/processing
+ data:
+ batchId: "${ .batchId }"
+ error: "${ $error }"
+```
+
+## Example 4: Time-based Scheduled Workflow
+
+This example shows a simple maintenance workflow that runs on a time-based schedule using cron syntax. It executes a cleanup container and then sends a report of the operation.
+
+**Key features:**
+- Proper `document` structure with required workflow properties
+- Time-based scheduling with cron syntax
+- Container execution with the `run` task
+- Command parameters as an array
+- Result handling from previous task executions
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: com.example.maintenance
+ name: daily-cleanup
+ version: '1.0.0'
+
+# Time-based scheduling
+schedule:
+ cron: '0 0 * * *' # Run at midnight every day
+
+do:
+ - cleanupTempFiles:
+ run: container
+ with:
+ image: maintenance-tools:v1.2
+ command: ["/bin/cleanup.sh", "--older-than", "7d", "--dir", "/tmp"]
+ then: sendReport
+ - sendReport:
+ call: function
+ with:
+ function: sendReportEmailFunc
+ arguments:
+ report: "${ .result }"
+ recipients: ["admin@example.com"]
+```
+
+## Example 5: Input/Output Transformation Workflow
+
+This example demonstrates a workflow with input validation, transformation, and output formatting. It processes payment data, ensuring it meets the required schema before processing and formats the output in a standardized way.
+
+**Key features:**
+- Proper `document` structure with required workflow properties
+- Input schema validation using JSON Schema
+- Input transformation with runtime expressions
+- Workflow-level output transformation
+- Error handling for validation failures
+
+```yaml
+document:
+ dsl: '1.0.0'
+ namespace: com.example.payments
+ name: process-payment
+ version: '1.0.0'
+
+# Input validation and transformation
+input:
+ schema:
+ type: object
+ required: ["paymentId", "amount", "currency"]
+ properties:
+ paymentId:
+ type: string
+ pattern: "^PAY-[A-Z0-9]{12}$"
+ amount:
+ type: number
+ minimum: 0.01
+ currency:
+ type: string
+ enum: ["USD", "EUR", "GBP"]
+ metadata:
+ type: object
+ from: "${ {paymentId: .paymentId, amount: .amount, currency: .currency, timestamp: $now()} }"
+
+# Output transformation
+output:
+ as: "${ {id: .paymentId, status: .status, processedAt: .timestamp, receipt: .receiptUrl} }"
+
+do:
+ - validatePayment:
+ try:
+ do:
+ call: function
+ with:
+ function: validatePaymentFunc
+ arguments:
+ payment: "${ . }"
+ catch:
+ - error:
+ type: https://serverlessworkflow.io/spec/1.0.0/errors/validation
+ do:
+ set:
+ status: "REJECTED"
+ reason: "${ $error.detail }"
+ then: end
+ then: processPayment
+ - processPayment:
+ call: function
+ with:
+ function: processPaymentFunc
+ arguments:
+ paymentId: "${ .paymentId }"
+ amount: "${ .amount }"
+ currency: "${ .currency }"
+ then: generateReceipt
+ - generateReceipt:
+ call: function
+ with:
+ function: generateReceiptFunc
+ arguments:
+ payment: "${ . }"
+ then:
+ set:
+ status: "COMPLETED"
+ receiptUrl: "${ .receiptUrl }"
+```
\ No newline at end of file
diff --git a/src/content/docs/shared/common-task-data-flow.mdx b/src/content/docs/shared/common-task-data-flow.mdx
new file mode 100644
index 0000000..fc7102a
--- /dev/null
+++ b/src/content/docs/shared/common-task-data-flow.mdx
@@ -0,0 +1,9 @@
+---
+title: Common Task Data Flow
+---
+
+
+This task supports standard configuration for the [Data Flow](/docs/core-concepts/data-flow-management/)
+(including `input`, `output`, `export`, and schemas).
+Refer to those pages for details on these common properties and how they apply generally.
+
\ No newline at end of file
diff --git a/src/content/docs/shared/common-task-flow-control.mdx b/src/content/docs/shared/common-task-flow-control.mdx
new file mode 100644
index 0000000..d7e931b
--- /dev/null
+++ b/src/content/docs/shared/common-task-flow-control.mdx
@@ -0,0 +1,9 @@
+---
+title: Common Task Flow Control
+---
+
+
+This task supports standard configuration for the [Flow Control](/docs/core-concepts/flow-control/)
+(including conditional execution with `if` and branching with `then`).
+Refer to those pages for details on these common properties and how they apply generally.
+
\ No newline at end of file
diff --git a/src/overrides/ThemeSelect.astro b/src/overrides/ThemeSelect.astro
new file mode 100644
index 0000000..b0ee8c9
--- /dev/null
+++ b/src/overrides/ThemeSelect.astro
@@ -0,0 +1,7 @@
+---
+/** Overrides Starlight theme selector componenent to include version selector */
+import Default from '@astrojs/starlight/components/ThemeSelect.astro';
+---
+
+
+
\ No newline at end of file
diff --git a/src/styles/docs.css b/src/styles/docs.css
new file mode 100644
index 0000000..69547ab
--- /dev/null
+++ b/src/styles/docs.css
@@ -0,0 +1,61 @@
+@layer base, starlight, theme, components, utilities;
+
+@import '@astrojs/starlight-tailwind';
+@import 'tailwindcss/theme.css' layer(theme);
+@import 'tailwindcss/utilities.css' layer(utilities);
+
+@plugin "daisyui";
+@plugin "daisyui/theme" {
+ name: "light";
+ default: true;
+ --color-primary: oklch(46.2% 0.0713 255.54);
+ --color-secondary: oklch(78.95% 0.0554 234.08);
+ --color-accent: oklch(68.35% 0.1676 34.7);
+}
+@plugin "daisyui/theme" {
+ name: "dark";
+ default: false;
+ --color-primary: oklch(96.89% 0.0281 199.41);
+ --color-primary-content: #293241;
+ --color-secondary: oklch(78.95% 0.0554 234.08);
+ --color-accent: oklch(68.35% 0.1676 34.7);
+ --color-accent-content: oklab(0.14 0.03 0.02);
+ --color-neutral: #293241;
+ --color-neutral-content: #A6ADBB;
+}
+
+html {
+ --sl-color-text-accent: var(--color-accent);
+ --sl-color-bg-nav: var(--color-base-100);
+ --sl-color-bg-sidebar: var(--color-base-100);
+}
+
+html[data-theme="dark"] {
+ .astro-code,
+ .astro-code span {
+ color: var(--shiki-dark) !important;
+ background-color: transparent !important;
+ }
+}
+
+/* header logo */
+.site-title {
+ gap: calc(var(--spacing) * 2);
+}
+.site-title img {
+ width: 24px;
+ height: 28px;
+ filter: brightness(0) saturate(100%);
+}
+
+html[data-theme="dark"] .site-title img {
+ filter: brightness(0) invert(1);
+}
+
+/* header title */
+.title-wrapper span {
+ color: var(--color-base-content);
+ font-size: var(--text-xl);
+ --tw-font-weight: var(--font-weight-bold);
+ font-weight: var(--font-weight-bold);
+}
\ No newline at end of file
diff --git a/src/styles/site.css b/src/styles/site.css
index 7d3a698..85bf0b5 100644
--- a/src/styles/site.css
+++ b/src/styles/site.css
@@ -20,11 +20,14 @@
--color-neutral-content: #A6ADBB;
}
-html[data-theme="dark"] .astro-code,
-html[data-theme="dark"] .astro-code span {
- color: var(--shiki-dark) !important;
- background-color: transparent !important;
- /*font-style: var(--shiki-dark-font-style) !important;
- font-weight: var(--shiki-dark-font-weight) !important;
- text-decoration: var(--shiki-dark-text-decoration) !important;*/
-}
\ No newline at end of file
+html[data-theme="dark"] {
+ .astro-code,
+ .astro-code span {
+ color: var(--shiki-dark) !important;
+ background-color: transparent !important;
+ }
+}
+
+.sl-anchor-link {
+ display: none;
+}
\ No newline at end of file